-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathio_operations.py
99 lines (78 loc) · 3.07 KB
/
io_operations.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# External Imports
import os
import pandas as pd
# Project Level Imports
import config
# Save a dataset from Pandas Dataframe to specified location
def saveDataset(dataset, filename, origin):
print("Saving File ("+filename+")...")
# Join the paths
saveLoc = os.path.join(config.cfg["dataset_save_loc"], origin)
filePath = os.path.join(saveLoc, filename)
# Check existence of dataset directory (ie: RAVDESS or SAVEE etc)
if not os.path.exists(saveLoc):
print("IO Log - Save Directory was missing, now created.")
os.mkdir(saveLoc)
# Increment a counter and write the file preventing duplication
i = 1
while os.path.exists(filePath+".csv"):
filePath = filePath+"-"+str(i)
i += 1
dataset.to_csv(filePath+".csv")
print("IO Log -", filePath+".csv", "Saved to disk.")
def loadDataset(filename, origin):
print("Loading File (" + filename + ")...")
# Join the paths
saveLoc = os.path.join(config.cfg["dataset_save_loc"], origin)
filePath = os.path.join(saveLoc, filename)
# Check existence of dataset directory (ie: RAVDESS or SAVEE etc)
if not os.path.exists(saveLoc):
print("IO ERROR - Save Directory ("+saveLoc+") was missing.")
exit(-1)
# Try and read the file
try:
return pd.read_csv(filePath+".csv", header=[0], index_col=[0])
except FileNotFoundError:
print("IO ERROR - File ("+filePath+".csv) was not found.")
exit(-1)
def checkIfFileExists(filename, origin):
# Join the paths
saveLoc = os.path.join(config.cfg["dataset_save_loc"], origin)
filePath = os.path.join(saveLoc, filename)
if os.path.exists(filePath):
return True
else:
return False
# Save a dataset from Pandas Dataframe to specified location
def savePickle(dataset, filename, origin):
print("Saving File ("+filename+")...")
# Join the paths
saveLoc = os.path.join(config.cfg["dataset_save_loc"], origin)
filePath = os.path.join(saveLoc, filename)
# Check existence of dataset directory (ie: RAVDESS or SAVEE etc)
if not os.path.exists(saveLoc):
print("IO Log - Save Directory was missing, now created.")
os.mkdir(saveLoc)
# Increment a counter and write the file preventing duplication
i = 1
while os.path.exists(filePath+".pickle"):
filePath = filePath+"-"+str(i)
i += 1
dataset.to_pickle(filePath+".pickle")
print("IO Log -", filePath+".pickle", "Saved to disk.")
# Load a pickle file
def loadPickle(filename, origin):
print("Loading File (" + filename + ")...")
# Join the paths
saveLoc = os.path.join(config.cfg["dataset_save_loc"], origin)
filePath = os.path.join(saveLoc, filename)
# Check existence of dataset directory (ie: RAVDESS or SAVEE etc)
if not os.path.exists(saveLoc):
print("IO ERROR - Save Directory ("+saveLoc+") was missing.")
exit(-1)
# Try and read the file
try:
return pd.read_pickle(filePath+".pickle")
except FileNotFoundError:
print("IO ERROR - File ("+filePath+".pickle) was not found.")
exit(-1)