-
Notifications
You must be signed in to change notification settings - Fork 5
/
pickle_dataset_multiprocessing.py
executable file
·40 lines (31 loc) · 1.26 KB
/
pickle_dataset_multiprocessing.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#!/usr/bin/python3
from pickle_dataset import loadDataSet, saveDataSet, getFiles, checkPath
from multiprocessing import Process
import multiprocessing
import os
import platform
# For this problem the validation and test data provided by the concerned authority did not have labels,
# so the training data was split into train, test and validation sets
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
def mpStart(gender,arg):
(X_train, x_gender, y_age) = loadDataSet(arg)
saveDataSet(gender, X_train, x_gender, y_age)
if __name__ == "__main__":
# Create the directories to save the images
checkPath()
print("Loading data set...")
female, male = getFiles()
num_processes = multiprocessing.cpu_count()
if platform.system() == "Linux" and num_processes > 1:
processes = []
processes.append(Process(target=mpStart, args=("female",female)))
processes.append(Process(target=mpStart, args=("male",male)))
if len(processes) > 0:
print("Processing images...")
for p in processes:
p.start()
for p in processes:
p.join()
else:
print("No podemos dividir la cargan en distintos procesadores")
exit(0)