Skip to content

Commit

Permalink
WIP moved try except to _from_file_attempt
Browse files Browse the repository at this point in the history
  • Loading branch information
mpvanderschelling committed Feb 7, 2025
1 parent f61fa43 commit 3deba43
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 50 deletions.
4 changes: 2 additions & 2 deletions src/f3dasm/_src/_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,8 +464,8 @@ def copy_object(object_path: Path,
if new_location.exists():

stem, suffix = object_path.stem, object_path.suffix
while (new_project_dir / EXPERIMENTDATA_SUBFOLDER /
object_path.parent / f"{stem}{suffix}").exists():
while (new_project_dir / EXPERIMENTDATA_SUBFOLDER
/ object_path.parent / f"{stem}{suffix}").exists():
try:
stem = str(int(stem) + 1) # Increment stem as integer
except ValueError:
Expand Down
40 changes: 20 additions & 20 deletions src/f3dasm/_src/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@
from pathos.helpers import mp

# Local
from ._io import MAX_TRIES
from .errors import DecodeError, EmptyFileError, ReachMaximumTriesError
from .logger import logger

# Authorship & Credits
Expand Down Expand Up @@ -384,24 +382,26 @@ def _evaluate_cluster(
NoOpenJobsError
Raised when there are no open jobs left
"""
# Retrieve the updated experimentdata object from disc
tries = 0
while tries <= MAX_TRIES:
try:
data = type(data).from_file(data.project_dir)
break
except FileNotFoundError: # If not found, store current
data.store()
break
except (EmptyFileError, DecodeError):
tries += 1
logger.debug((
f"Error reading a file, retrying"
f" {tries+1}/{MAX_TRIES}"
))
if tries >= MAX_TRIES:
raise ReachMaximumTriesError(file_path=data.project_dir,
max_tries=MAX_TRIES)
# # Retrieve the updated experimentdata object from disc
# tries = 0
# while tries <= MAX_TRIES:
# try:
# data = type(data).from_file(data.project_dir)
# break
# except FileNotFoundError: # If not found, store current
# data.store()
# break
# except (EmptyFileError, DecodeError):
# tries += 1
# logger.debug((
# f"Error reading a file, retrying"
# f" {tries+1}/{MAX_TRIES}"
# ))
# if tries >= MAX_TRIES:
# raise ReachMaximumTriesError(file_path=data.project_dir,
# max_tries=MAX_TRIES)

data = type(data).from_file(data.project_dir)

get_open_job = data.access_file(type(data).get_open_job)
store_experiment_sample = data.access_file(
Expand Down
70 changes: 42 additions & 28 deletions src/f3dasm/_src/experimentdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,24 +299,28 @@ def wrapper_func(project_dir: Path, *args, **kwargs) -> None:

# If the lock has been acquired:
with lock:
tries = 0
while tries <= MAX_TRIES:
try:
# Load a fresh instance of ExperimentData from file
loaded_self = ExperimentData.from_file(
self.project_dir)
break
# Catch racing conditions
except (EmptyFileError, DecodeError):
tries += 1
logger.debug((
f"Error reading a file, retrying"
f" {tries+1}/{MAX_TRIES}"))
sleep(random.uniform(0.5, 2.5))

if tries >= MAX_TRIES:
raise ReachMaximumTriesError(file_path=self.project_dir,
max_tries=tries)
# tries = 0
# while tries <= MAX_TRIES:
# try:
# # Load a fresh instance of ExperimentData from file
# loaded_self = ExperimentData.from_file(
# self.project_dir)
# break
# # Catch racing conditions
# except (EmptyFileError, DecodeError):
# tries += 1
# logger.debug((
# f"Error reading a file, retrying"
# f" {tries+1}/{MAX_TRIES}"))
# sleep(random.uniform(0.5, 2.5))

# if tries >= MAX_TRIES:
# raise ReachMaximumTriesError(file_path=self.project_dir,
# max_tries=tries)

# Load a fresh instance of ExperimentData from file
loaded_self = ExperimentData.from_file(
self.project_dir)

args = (loaded_self,) + args[1:]
value = operation(*args, **kwargs)
Expand Down Expand Up @@ -1534,7 +1538,8 @@ def x0_factory(experiment_data: ExperimentData,
return x0.reset_index()


def _from_file_attempt(project_dir: Path) -> ExperimentData:
def _from_file_attempt(project_dir: Path, max_tries: int = MAX_TRIES
) -> ExperimentData:
"""Attempt to create an ExperimentData object
from .csv and .pkl files.
Expand All @@ -1555,16 +1560,25 @@ def _from_file_attempt(project_dir: Path) -> ExperimentData:
"""
subdirectory = project_dir / EXPERIMENTDATA_SUBFOLDER

try:
return ExperimentData(domain=subdirectory / DOMAIN_FILENAME,
input_data=subdirectory / INPUT_DATA_FILENAME,
output_data=subdirectory / OUTPUT_DATA_FILENAME,
jobs=subdirectory / JOBS_FILENAME,
project_dir=project_dir)
# Retrieve the updated experimentdata object from disc
tries = 0
while tries <= max_tries:
try:
return ExperimentData(
domain=subdirectory / DOMAIN_FILENAME,
input_data=subdirectory / INPUT_DATA_FILENAME,
output_data=subdirectory / OUTPUT_DATA_FILENAME,
jobs=subdirectory / JOBS_FILENAME,
project_dir=project_dir)
except (EmptyFileError, DecodeError):
tries += 1
logger.debug((
f"Error reading a file, retrying"
f" {tries+1}/{MAX_TRIES}"
))
sleep(random.uniform(0.5, 2.5))

except FileNotFoundError:
raise FileNotFoundError(
f"Cannot find the files from {subdirectory}.")
raise ReachMaximumTriesError(file_path=subdirectory, max_tries=max_tries)


def convert_numpy_to_dataframe_with_domain(
Expand Down

0 comments on commit 3deba43

Please sign in to comment.