forked from DonaldRR/SimpleNet
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
175 lines (150 loc) · 5.69 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
import csv
import logging
import os
import random
import matplotlib.pyplot as plt
import numpy as np
import PIL
import torch
import tqdm
LOGGER = logging.getLogger(__name__)
def plot_segmentation_images(
savefolder,
image_paths,
segmentations,
anomaly_scores=None,
mask_paths=None,
image_transform=lambda x: x,
mask_transform=lambda x: x,
save_depth=4,
):
"""Generate anomaly segmentation images.
Args:
image_paths: List[str] List of paths to images.
segmentations: [List[np.ndarray]] Generated anomaly segmentations.
anomaly_scores: [List[float]] Anomaly scores for each image.
mask_paths: [List[str]] List of paths to ground truth masks.
image_transform: [function or lambda] Optional transformation of images.
mask_transform: [function or lambda] Optional transformation of masks.
save_depth: [int] Number of path-strings to use for image savenames.
"""
if mask_paths is None:
mask_paths = ["-1" for _ in range(len(image_paths))]
masks_provided = mask_paths[0] != "-1"
if anomaly_scores is None:
anomaly_scores = ["-1" for _ in range(len(image_paths))]
os.makedirs(savefolder, exist_ok=True)
for image_path, mask_path, anomaly_score, segmentation in tqdm.tqdm(
zip(image_paths, mask_paths, anomaly_scores, segmentations),
total=len(image_paths),
desc="Generating Segmentation Images...",
leave=False,
):
image = PIL.Image.open(image_path).convert("RGB")
image = image_transform(image)
if not isinstance(image, np.ndarray):
image = image.numpy()
if masks_provided:
if mask_path is not None:
mask = PIL.Image.open(mask_path).convert("RGB")
mask = mask_transform(mask)
if not isinstance(mask, np.ndarray):
mask = mask.numpy()
else:
mask = np.zeros_like(image)
savename = image_path.split("/")
savename = "_".join(savename[-save_depth:])
savename = os.path.join(savefolder, savename)
f, axes = plt.subplots(1, 2 + int(masks_provided))
axes[0].imshow(image.transpose(1, 2, 0))
axes[1].imshow(mask.transpose(1, 2, 0))
axes[2].imshow(segmentation)
f.set_size_inches(3 * (2 + int(masks_provided)), 3)
f.tight_layout()
f.savefig(savename)
plt.close()
def create_storage_folder(
main_folder_path, project_folder, group_folder, run_name, mode="iterate"
):
os.makedirs(main_folder_path, exist_ok=True)
project_path = os.path.join(main_folder_path, project_folder)
os.makedirs(project_path, exist_ok=True)
save_path = os.path.join(project_path, group_folder, run_name)
if mode == "iterate":
counter = 0
while os.path.exists(save_path):
save_path = os.path.join(project_path, group_folder + "_" + str(counter))
counter += 1
os.makedirs(save_path)
elif mode == "overwrite":
os.makedirs(save_path, exist_ok=True)
return save_path
def set_torch_device(gpu_ids):
"""Returns correct torch.device.
Args:
gpu_ids: [list] list of gpu ids. If empty, cpu is used.
"""
if len(gpu_ids):
# os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
# os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_ids[0])
return torch.device("cuda:{}".format(gpu_ids[0]))
return torch.device("cpu")
def fix_seeds(seed, with_torch=True, with_cuda=True):
"""Fixed available seeds for reproducibility.
Args:
seed: [int] Seed value.
with_torch: Flag. If true, torch-related seeds are fixed.
with_cuda: Flag. If true, torch+cuda-related seeds are fixed
"""
random.seed(seed)
np.random.seed(seed)
if with_torch:
torch.manual_seed(seed)
if with_cuda:
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.deterministic = True
def compute_and_store_final_results(
results_path,
results,
row_names=None,
column_names=[
"Instance AUROC",
"Full Pixel AUROC",
"Full PRO",
"Anomaly Pixel AUROC",
"Anomaly PRO",
],
):
"""Store computed results as CSV file.
Args:
results_path: [str] Where to store result csv.
results: [List[List]] List of lists containing results per dataset,
with results[i][0] == 'dataset_name' and results[i][1:6] =
[instance_auroc, full_pixelwisew_auroc, full_pro,
anomaly-only_pw_auroc, anomaly-only_pro]
"""
if row_names is not None:
assert len(row_names) == len(results), "#Rownames != #Result-rows."
mean_metrics = {}
for i, result_key in enumerate(column_names):
mean_metrics[result_key] = np.mean([x[i] for x in results])
LOGGER.info("{0}: {1:3.3f}".format(result_key, mean_metrics[result_key]))
savename = os.path.join(results_path, "results.csv")
with open(savename, "w") as csv_file:
csv_writer = csv.writer(csv_file, delimiter=",")
header = column_names
if row_names is not None:
header = ["Row Names"] + header
csv_writer.writerow(header)
for i, result_list in enumerate(results):
csv_row = result_list
if row_names is not None:
csv_row = [row_names[i]] + result_list
csv_writer.writerow(csv_row)
mean_scores = list(mean_metrics.values())
if row_names is not None:
mean_scores = ["Mean"] + mean_scores
csv_writer.writerow(mean_scores)
mean_metrics = {"mean_{0}".format(key): item for key, item in mean_metrics.items()}
return mean_metrics