Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix #6638: default log parser #6646

Merged
merged 56 commits into from
Feb 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
56 commits
Select commit Hold shift + click to select a range
ecbe7d8
Fix #6638: ckp
Jan 10, 2024
44d196f
Fix #6638: ckp
Jan 10, 2024
787a8aa
Fix #6638: WIP
Jan 10, 2024
55e73f7
Fix #6638: ckp
Jan 10, 2024
0e3e0dc
Fix #6638: WIP
Jan 10, 2024
4955635
Fix #6638: ckp
Jan 10, 2024
397fe2e
Fix #6638: ckp
Jan 10, 2024
e5a361f
Fix #6638: WIP
Jan 11, 2024
196b65f
Fix #6638: cleanup
Jan 11, 2024
99a849b
Fix #6638: ckp
Jan 11, 2024
48d5766
Fix #6638: ckp
Jan 11, 2024
cb4057c
Fix #6638: CKP
Jan 11, 2024
e8016c5
Fix #6638: ckp
Jan 11, 2024
9f81b94
FIx #6638: ckp
Jan 11, 2024
6952ef1
Fix #6638: ckp
Jan 11, 2024
82814d4
Fix #6638: ckp
Jan 11, 2024
9f7d7af
Fix #6638: ckp
Jan 11, 2024
3f573d1
Fix #6638: CKP
Jan 11, 2024
922289b
Fix #6638: cloudmc
Jan 11, 2024
1700fb7
Fix #6638: ckp
Jan 12, 2024
4444872
Fix #6638: note
Jan 12, 2024
0df37e7
Fix #6638: class ckp
Jan 12, 2024
75da300
Fix #6638: ckp
Jan 12, 2024
0089951
Fix #6638: ckp
Jan 12, 2024
7a78480
Fix #6638: ckp
Jan 12, 2024
3c018b4
Fix #6638: ckp
Jan 12, 2024
1be4338
Fix #6638: ckp
Jan 12, 2024
e22ff2b
Fix #6638: ckp
Jan 12, 2024
97813a7
Fix #6638: ckp
Jan 12, 2024
581feae
Fix #6638: ckp
Jan 12, 2024
2037a0f
Fix #6638: ckp
Jan 12, 2024
854e3bb
Fix #6638: ckp
Jan 22, 2024
2580a34
Fix #6638 ckp
Jan 22, 2024
88c716f
Fix #6638: ckp
Jan 22, 2024
d87fd61
Fix #6638: cleanup
Jan 22, 2024
ce49eb5
Merge branch 'master' into 6638-default-log-parser
Jan 22, 2024
727e0b4
Fix #6638: ckp
Jan 23, 2024
37811cd
Fix #6638:c ckp
Jan 23, 2024
30c380d
Fix #6638: ckp
Jan 23, 2024
ecc443d
Fix #6638: log_filename default
Jan 25, 2024
a39af57
for #6638 added test data
moellep Jan 26, 2024
35e5d81
for #6638 expanded opal log parser regex
moellep Jan 26, 2024
e4a2abb
Fix #6638: ckp
Jan 31, 2024
c329a25
Merge branch 'master' into 6638-default-log-parser
Jan 31, 2024
16c72fa
Fix #6638: ckp
Jan 31, 2024
4cfad43
Fix #6638: note
Jan 31, 2024
c128a4c
Fix #6638: ckp
Jan 31, 2024
5c75144
FIx #6638: ckp
Jan 31, 2024
42dc1bf
Fix #6638: tests ckp
Jan 31, 2024
aa6e92f
Fix #6638: ckp
Jan 31, 2024
c199515
Fix #6638: ckp
Feb 1, 2024
f5bd70f
Fix #6638: ckp
Feb 1, 2024
eec5c8f
Fix #6638: ckp
Feb 1, 2024
0c6d6ec
Fix #6638: tests
Feb 1, 2024
ec49be6
Fix #6638: ckp
Feb 1, 2024
8149f95
Merge branch 'master' into 6638-default-log-parser
Feb 1, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 11 additions & 7 deletions sirepo/template/activait.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import sirepo.sim_data
import sirepo.util

_LOG_FILE = "run.log"

_SEGMENT_ROWS = 3

Expand Down Expand Up @@ -102,17 +101,22 @@ def background_percent_complete(report, run_dir, is_running):
max_frame = data.models.neuralNet.epochs
res.frameCount = int(m.group(1)) + 1
res.percentComplete = float(res.frameCount) * 100 / max_frame
error = _parse_activait_log_file(run_dir)
error = _range_error(_parse_activate_log(run_dir))
if error:
res.error = error
return res


def _parse_activait_log_file(run_dir):
for l in pkio.read_text(run_dir.join(_LOG_FILE)).split("\n"):
if re.search("AssertionError: Model training failed due to:", l):
return _range_error(l)
return ""
def _parse_activate_log(run_dir, log_filename="run.log"):
return template_common.LogParser(
run_dir,
log_filename=log_filename,
error_patterns=(
r"AssertionError: (Model training failed due to .*)",
r"TypeError: (.*)",
),
default_msg="",
).parse_for_errors()


def _range_error(error):
Expand Down
25 changes: 9 additions & 16 deletions sirepo/template/cloudmc.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def post_execution_processing(
for f in ply_files:
_SIM_DATA.put_sim_file(sim_id, f, f.basename)
return None
return _parse_run_log(run_dir)
return _parse_cloudmc_log(run_dir)


def python_source_for_model(data, model, qcall, **kwargs):
Expand Down Expand Up @@ -690,21 +690,14 @@ def _is_sbatch_run_mode(data):
return data.models.openmcAnimation.jobRunMode == "sbatch"


def _parse_run_log(run_dir):
res = ""
p = run_dir.join(template_common.RUN_LOG)
if not p.exists():
return res
with pkio.open_text(p) as f:
for line in f:
# ERROR: Cannot tally flux for an individual nuclide.
m = re.match(r"^\s*Error:\s*(.*)$", line, re.IGNORECASE)
if m:
res = m.group(1)
break
if res:
return res
return "An unknown error occurred, check CloudMC log for details"
def _parse_cloudmc_log(run_dir, log_filename="run.log"):
return template_common.LogParser(
run_dir,
log_filename=log_filename,
default_msg="An unknown error occurred, check CloudMC log for details",
# ERROR: Cannot tally flux for an individual nuclide.
error_patterns=(re.compile(r"^\s*Error:\s*(.*)$", re.IGNORECASE),),
).parse_for_errors()


def _source_filename(data):
Expand Down
21 changes: 9 additions & 12 deletions sirepo/template/epicsllrf.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,18 +150,6 @@ def _generate_parameters_file(data):
)


def _parse_epics_log(run_dir):
res = ""
with pkio.open_text(run_dir.join(template_common.RUN_LOG)) as f:
for line in f:
m = re.match(
r"sirepo.template.epicsllrf.EpicsDisconnectError:\s+(.+)", line
)
if m:
return m.group(1)
return res


def _read_epics_data(run_dir, computed_values):
s = run_dir.join(_STATUS_FILE)
if s.exists():
Expand All @@ -182,6 +170,15 @@ def _read_epics_data(run_dir, computed_values):
return PKDict()


def _parse_epics_log(run_dir, log_filename="run.log"):
return template_common.LogParser(
run_dir,
log_filename=log_filename,
error_patterns=(r"sirepo.template.epicsllrf.EpicsDisconnectError:\s+(.+)",),
default_msg="",
).parse_for_errors()


def _set_zcu_signal(server_address, model):
# 'model': {'amp': 32000, 'duration': 1023, 'start': 0},
# 'serverAddress': 'localhost'
Expand Down
27 changes: 10 additions & 17 deletions sirepo/template/madx.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,15 @@ def write_files(self, data, source_path, dest_dir):
return PKDict()


class _MadxLogParser(template_common.LogParser):
def _parse_log_line(self, line):
if re.search(r"^\++ (error|warning):", line, re.IGNORECASE):
return re.sub(r"^\++ ", "", line) + "\n"
elif re.search(r"^\+.*? fatal:", line, re.IGNORECASE):
return re.sub(r"^.*? ", "", line) + "\n"
return None


class MadxOutputFileIterator(lattice.ModelIterator):
def __init__(self):
self.result = PKDict(
Expand Down Expand Up @@ -357,7 +366,7 @@ def get_data_file(run_dir, model, frame, options):
def post_execution_processing(success_exit, run_dir, **kwargs):
if success_exit:
return None
return _parse_madx_log(run_dir)
return _MadxLogParser(run_dir, log_filename=MADX_LOG_FILE).parse_for_errors()


def prepare_for_client(data, qcall, **kwargs):
Expand Down Expand Up @@ -977,22 +986,6 @@ def _output_info(run_dir):
return res


def _parse_madx_log(run_dir):
path = run_dir.join(MADX_LOG_FILE)
if not path.exists():
return ""
res = ""
with pkio.open_text(str(path)) as f:
for line in f:
if re.search(r"^\++ (error|warning):", line, re.IGNORECASE):
line = re.sub(r"^\++ ", "", line)
res += line + "\n"
elif re.search(r"^\+.*? fatal:", line, re.IGNORECASE):
line = re.sub(r"^.*? ", "", line)
res += line + "\n"
return res


def _parse_match_summary(run_dir, filename):
path = run_dir.join(filename)
node_names = ""
Expand Down
29 changes: 12 additions & 17 deletions sirepo/template/opal.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,17 @@ def is_ignore_field(self, field):
return field == "name" or field == self.IS_DISABLED_FIELD


class _OpalLogParser(template_common.LogParser):
def _parse_log_line(self, line):
if re.search(r"^Error.*?>\s*[\w\"]", line):
l = re.sub(r"Error.*?>\s*", "", line.rstrip()).rstrip()
if re.search(r"1DPROFILE1-DEFAULT", l):
return None
if l:
return l + "\n"
return None


class OpalOutputFileIterator(lattice.ModelIterator):
def __init__(self, preserve_output_filenames=False):
self.result = PKDict(
Expand Down Expand Up @@ -513,23 +524,7 @@ def _walk_file(h5file, key, step, res):


def parse_opal_log(run_dir):
res = ""
p = run_dir.join((OPAL_OUTPUT_FILE))
if not p.exists():
return res
with pkio.open_text(p) as f:
visited = set()
for line in f:
if re.search(r"^Error.*?>\s*\w", line):
line = re.sub(r"Error.*?>\s*", "", line.rstrip()).rstrip()
if re.search(r"1DPROFILE1-DEFAULT", line):
continue
if line and line not in visited:
res += line + "\n"
visited.add(line)
if res:
return res
return "An unknown error occurred"
return _OpalLogParser(run_dir, log_filename=OPAL_OUTPUT_FILE).parse_for_errors()


def post_execution_processing(success_exit, is_parallel, run_dir, **kwargs):
Expand Down
23 changes: 13 additions & 10 deletions sirepo/template/shadow.py
Original file line number Diff line number Diff line change
Expand Up @@ -938,16 +938,19 @@ def _item_field(item, fields):
return _fields("oe", item, fields)


def _parse_shadow_log(run_dir):
if run_dir.join(template_common.RUN_LOG).exists():
text = pkio.read_text(run_dir.join(template_common.RUN_LOG))
for line in text.split("\n"):
if re.search(r"invalid chemical formula", line):
return "A mirror contains an invalid reflectivity material"
m = re.search("ValueError: (.*)?", line)
if m:
return m.group(1)
return "an unknown error occurred"
def _parse_shadow_log(run_dir, log_filename="run.log"):
if template_common.LogParser(
run_dir,
log_filename=log_filename,
error_patterns=(r".*(invalid chemical formula)",),
default_msg="",
).parse_for_errors():
return "A mirror contains an invalid reflectivity material"
return template_common.LogParser(
run_dir,
log_filename=log_filename,
error_patterns=(r"ValueError: (.*)?",),
).parse_for_errors()


def _photon_energy(models):
Expand Down
30 changes: 11 additions & 19 deletions sirepo/template/silas.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,16 @@
_L_SCALE_EQUATION = "numpy.sqrt(numpy.pi) * numpy.sqrt(2) * pulse.sigx_waist"


class _SilasLogParser(template_common.LogParser):
def _parse_log_line(self, line):
if m := re.search(r"^.*Error:\s+(.*)$", line):
err = m.group(1)
if re.search("Unable to evaluate function at point", err):
return "Point evaulated outside of mesh boundary. Consider increasing Mesh Density or Boundary Tolerance."
return err + "\n"
return None


def background_percent_complete(report, run_dir, is_running):
data = simulation_db.read_json(run_dir.join(template_common.INPUT_BASE_NAME))
res = PKDict(
Expand Down Expand Up @@ -66,7 +76,7 @@ def get_data_file(run_dir, model, frame, options):
def post_execution_processing(success_exit, run_dir, **kwargs):
if success_exit:
return None
return _parse_silas_log(run_dir)
return _SilasLogParser(run_dir).parse_for_errors()


def python_source_for_model(data, model, qcall, **kwargs):
Expand Down Expand Up @@ -577,24 +587,6 @@ def gen(self):
raise AssertionError("Report is unavailable")


def _parse_silas_log(run_dir):
res = ""
path = run_dir.join(template_common.RUN_LOG)
if not path.exists():
return res
with pkio.open_text(str(path)) as f:
for line in f:
m = re.search(r"^.*Error:\s+(.*)$", line)
if m:
err = m.group(1)
if re.search("Unable to evaluate function at point", err):
return "Point evaulated outside of mesh boundary. Consider increasing Mesh Density or Boundary Tolerance."
res += err + "\n"
if res:
return res
return "An unknown error occurred"


def _convert_laser_pulse_units(laserPulse):
laserPulse.tau_0 = laserPulse.tau_0 / 1e12
laserPulse.tau_fwhm = laserPulse.tau_fwhm / 1e12
Expand Down
17 changes: 1 addition & 16 deletions sirepo/template/srw.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,7 +596,7 @@ def post_execution_processing(
f = _SIM_DATA.ML_OUTPUT
_SIM_DATA.put_sim_file(sim_id, run_dir.join(f), f)
return None
return _parse_srw_log(run_dir)
return template_common.LogParser(run_dir).parse_for_errors()


def prepare_for_client(data, qcall, **kwargs):
Expand Down Expand Up @@ -2167,21 +2167,6 @@ def _machine_learning_percent_complete(run_dir, res):
return res


def _parse_srw_log(run_dir):
res = ""
p = run_dir.join(template_common.RUN_LOG)
if not p.exists():
return res
with pkio.open_text(p) as f:
for line in f:
m = re.search(r"Error: (.*)", line)
if m:
res += m.group(1) + "\n"
if res:
return res
return "An unknown error occurred"


def _process_rsopt_elements(els):
x = [e for e in els if e.enabled and e.enabled != "0"]
names = []
Expand Down
61 changes: 50 additions & 11 deletions sirepo/template/template_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,40 @@ def __init__(self, **kwargs):
)


class LogParser(PKDict):
def __init__(self, run_dir, **kwargs):
super().__init__(run_dir=run_dir, **kwargs)
self.pksetdefault(
default_msg="An unknown error occurred",
error_patterns=(r"Error: (.*)",),
log_filename=RUN_LOG,
)

def parse_for_errors(self):
p = self.run_dir.join(self.log_filename)
if not p.exists():
return ""
res = ""
e = set()
with pkio.open_text(p) as f:
for line in f:
if m := self._parse_log_line(line):
if m not in e:
res += m
e.add(m)
if res:
return res
return self.default_msg

def _parse_log_line(self, line):
res = ""
for pattern in self.error_patterns:
m = re.search(pattern, line)
if m:
res += m.group(1) + "\n"
return res


class ModelUnits:
"""Convert model fields from native to sirepo format, or from sirepo to native format.

Expand Down Expand Up @@ -165,6 +199,21 @@ def __scale_model(self, name, model, is_native):
return model


class _MPILogParser(LogParser):
def parse_for_errors(self):
p = self.run_dir.join(self.log_filename)
e = None
if p.exists():
m = re.search(
r"^Traceback .*?^\w*Error: (.*?)\n",
pkio.read_text(p),
re.MULTILINE | re.DOTALL,
)
if m:
e = m.group(1)
return e


class NamelistParser:
def parse_text(self, text):
import f90nml
Expand Down Expand Up @@ -579,17 +628,7 @@ def parse_enums(enum_schema):


def parse_mpi_log(run_dir):
e = None
f = run_dir.join(sirepo.const.MPI_LOG)
if f.exists():
m = re.search(
r"^Traceback .*?^\w*Error: (.*?)\n",
pkio.read_text(f),
re.MULTILINE | re.DOTALL,
)
if m:
e = m.group(1)
return e
return _MPILogParser(run_dir, log_filename=sirepo.const.MPI_LOG).parse_for_errors()


def read_dict_from_h5(file_path, h5_path=None):
Expand Down
Loading
Loading