Skip to content

Commit

Permalink
hopefully fix numpy memory leak
Browse files Browse the repository at this point in the history
  • Loading branch information
Ryan Zotti authored and Ryan Zotti committed Aug 15, 2016
1 parent 9db7109 commit 5eed950
Show file tree
Hide file tree
Showing 2 changed files with 90 additions and 0 deletions.
43 changes: 43 additions & 0 deletions util.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
import subprocess
from os import listdir
from os.path import isfile
import numpy as np
import os

def remove_file_if_exists(file_path):
if os.path.exists(file_path):
os.remove(file_path)

def dir_count(dir):
shell_cmd = 'ls -ltr {dir} | wc -l'.format(dir=dir)
Expand Down Expand Up @@ -39,7 +45,44 @@ def cleanup(dir):
pass # do not accidentally delete entire file system!


def shuffle_dataset(predictors, targets):
record_count = predictors.shape[0]
shuffle_index = np.arange(record_count)
np.random.shuffle(shuffle_index)
predictors = predictors[shuffle_index]
targets = targets[shuffle_index]
return predictors, targets


def window(batch_index,batch_size,window_size,predictors,targets):
frame_index = batch_size * batch_index
windowed_predictors = []
windowed_targets = []
for record_index in range(batch_size):
frame_index += record_index
windowed_predictors.append(predictors[frame_index:frame_index + window_size])
windowed_targets.append(targets[frame_index + window_size])
windowed_predictors = np.array(windowed_predictors)
windowed_targets = np.array(windowed_targets)
windowed_predictors, windowed_targets = shuffle_dataset(windowed_predictors,windowed_targets)
return windowed_predictors, windowed_targets

def windowed_dataset(inpt,otpt,bindx,pred_nm,trgt_nm):
cmd = '''python windowed_dataset.py -i {inpt} -o {otpt} -b {bindx} -p {pred_nm} -t {trgt_nm}
'''.format(inpt=inpt,otpt=otpt,bindx=bindx,pred_nm=pred_nm,trgt_nm=trgt_nm)
shell_command(cmd)
npzfile = np.load(otpt+'/window.npz')
predictors = npzfile[pred_nm]
targets = npzfile[trgt_nm]
return predictors, targets


def record_count(file_path):
result = int(str(shell_command('cat '+file_path)).replace("b","").replace("'",""))
return result

if __name__ == '__main__':
tensorboard_basedir = '/Users/ryanzotti/Documents/repos/Self_Driving_RC_Car/tf_visual_data/runs/'
abc = record_count('/Users/ryanzotti/Documents/repos/Self_Driving_RC_Car/shape')
cleanup(tensorboard_basedir)
mkdir_tfboard_run_dir(tensorboard_basedir)
47 changes: 47 additions & 0 deletions windowed_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import numpy as np
import random

input_file_path = '/Users/ryanzotti/Documents/repos/Self_Driving_RC_Car/final_processed_data_3_channels.npz'
npzfile = np.load(input_file_path)

# training data
train_predictors = npzfile['train_predictors']
train_targets = npzfile['train_targets']

def shuffle_dataset(predictors, targets):
record_count = predictors.shape[0]
shuffle_index = np.arange(record_count)
np.random.shuffle(shuffle_index)
predictors = predictors[shuffle_index]
targets = targets[shuffle_index]
return predictors, targets

def window(batch_index,batch_size,window_size,predictors,targets):
frame_index = batch_size * batch_index
windowed_predictors = []
windowed_targets = []
for record_index in range(batch_size):
frame_index += record_index
windowed_predictors.append(predictors[frame_index:frame_index + window_size])
windowed_targets.append(targets[frame_index + window_size])

windowed_predictors = np.array(windowed_predictors)
windowed_targets = np.array(windowed_targets)
windowed_predictors, windowed_targets = shuffle_dataset(windowed_predictors,windowed_targets)

'''
for record_index in range(batch_size):
for frame_index, frame in enumerate(windowed_predictors[record_index]):
cv2.imshow('frame', frame)
print()
if cv2.waitKey(1) & 0xFF == ord('q'):
break
'''

return windowed_predictors, windowed_targets

# 15.83, 23.57
batch_index = 0
predictors, target = window(batch_index, 50, 50, train_predictors, train_targets)

print('test')

0 comments on commit 5eed950

Please sign in to comment.