Skip to content

Commit

Permalink
Remove multiprocessing
Browse files Browse the repository at this point in the history
  • Loading branch information
Reillyhewitson committed Jan 10, 2023
1 parent a0349ba commit a4fa2e7
Showing 1 changed file with 24 additions and 19 deletions.
43 changes: 24 additions & 19 deletions util/icat_db_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
dest="years",
help="Provide number of years to generate",
type=int,
default=10,
default=5,
)
args = parser.parse_args()
SEED = args.seed
Expand Down Expand Up @@ -308,7 +308,7 @@ def generate_groupings(self, i):

class InvestigationGenerator(Generator):
tier = 2
amount = 3 * FacilityCycleGenerator.amount # 3 Investigations per cycle (120)
amount = 3 * FacilityCycleGenerator.amount # 3 Investigations per cycle (60)

def generate(self):
for i in range(1, self.amount):
Expand Down Expand Up @@ -470,9 +470,11 @@ class KeywordGenerator(Generator):
keywords = []

def generate(self):
timer = datetime.datetime.now()
with multiprocessing.get_context("spawn").Pool() as pool:
pool.map(KeywordGenerator.generate_keyword, range(1, self.amount))
# timer = datetime.datetime.now()
# with multiprocessing.get_context("spawn").Pool() as pool:
# pool.map(KeywordGenerator.generate_keyword, range(1, self.amount))
for i in range(1, self.amount):
KeywordGenerator.generate_keyword(i)
self.client.createMany(self.keywords)

@classmethod
Expand Down Expand Up @@ -596,7 +598,7 @@ def generate_study_investigation(self, i):

class DatasetGenerator(Generator):
tier = 4
amount = InvestigationGenerator.amount * 2 # Two Datasets per investigation (240)
amount = InvestigationGenerator.amount * 2 # Two Datasets per investigation (120)

def generate(self):
for i in range(1, self.amount):
Expand Down Expand Up @@ -652,14 +654,15 @@ def generate_dataset_parameter(self, i):

class DatafileGenerator(Generator):
tier = 5
amount = DatasetGenerator.amount * 15 # 15 files per Dataset (3600)
amount = DatasetGenerator.amount * 15 # 15 files per Dataset (1800)
datafiles = []

def generate(self):
self.client.refresh()
timer = datetime.datetime.now()
with multiprocessing.get_context("spawn").Pool() as pool:
pool.map(DatafileGenerator.generate_datafile, range(1, self.amount))
for i in range(1, self.amount):
DatafileGenerator.generate_datafile(i)
# timer = datetime.datetime.now()
# with multiprocessing.get_context("spawn").Pool() as pool:
# pool.map(DatafileGenerator.generate_datafile, range(1, self.amount))

@classmethod
def generate_datafile(cls, i):
Expand Down Expand Up @@ -738,16 +741,18 @@ def generate_sample_parameter(self, i):

class DatafileParameterGenerator(Generator):
tier = 6
amount = DatafileGenerator.amount # 3600
amount = DatafileGenerator.amount # 1800

def generate(self):
self.client.refresh()
timer = datetime.datetime.now()
with multiprocessing.get_context("spawn").Pool() as pool:
pool.map(
DatafileParameterGenerator.generate_datafile_parameter,
range(1, self.amount),
)
for i in range(1, self.amount):
DatafileParameterGenerator.generate_datafile_parameter(i)

# timer = datetime.datetime.now()
# with multiprocessing.get_context("spawn").Pool() as pool:
# pool.map(
# DatafileParameterGenerator.generate_datafile_parameter,
# range(1, self.amount),
# )

@classmethod
def generate_datafile_parameter(cls, i):
Expand Down

0 comments on commit a4fa2e7

Please sign in to comment.