-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdata.py
33 lines (25 loc) · 1.21 KB
/
data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import tensorflow as tf
import glob
AUTOTUNE = tf.data.experimental.AUTOTUNE
class DataSet:
def __init__(self, storage_dir, img_width=64, img_height=64):
self.storage_dir = storage_dir
self.width = img_width
self.height = img_height
def build(self, batch_size, buffer_size=10000, transformation="standardize"):
image_paths = glob.glob(self.storage_dir + "/*.jpg")
ds = tf.data.Dataset.from_tensor_slices(image_paths)
ds = ds.map(tf.io.read_file)
ds = ds.map(lambda img: tf.image.decode_jpeg(img), num_parallel_calls=AUTOTUNE)
ds = ds.map(lambda img: tf.image.resize(img, [self.height, self.width]), num_parallel_calls=AUTOTUNE)
if transformation == "normalize":
ds = ds.map(self._normalize, num_parallel_calls=AUTOTUNE)
elif transformation == "standardize":
ds = ds.map(self._standardize, num_parallel_calls=AUTOTUNE)
ds = ds.cache().shuffle(buffer_size)
ds = ds.batch(batch_size, drop_remainder=True)
return ds, len(image_paths) // batch_size
def _normalize(self, img):
return img/255.
def _standardize(self, img):
return img/127.5 - 1.