Select Git revision
processingImages.py 5.41 KiB
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
import numpy as np
import skimage
import os
def dogfilterimage(image, sigma0=1.0, sigma1=6.0):
""" Difference of Gaussian image filtering """
if len(image.shape) == 3:
image = image[:, :, 0]
s0 = skimage.filters.gaussian(image, sigma0)
s1 = skimage.filters.gaussian(image, sigma1)
dog = s0 - s1
return dog
def cropimage(image, crop):
""" Crop Image """
y0 = crop[1]
y1 = crop[3]
x0 = crop[0]
x1 = crop[2]
return image[y0:y1, x0:x1]
def getfilenames(filename):
""" returns list of filenames """
filenames = [line.rstrip('\n').split() for line in open(filename)]
return filenames
def load_images(filenames, dogfilter, crop, test):
"""load_images: reads images into a numpy array
Arguments:
dogfilter: boolean flag indicating if DoG filtering is used
crop: array that specifies cropping bounds
(upper left corner x, upper left corner y, lower right corner x, lower right corner y).
If no cropping, then set to None.
test: Flag used to indicate testing mode. More specifically, during test mode no augmentation is applied.
When test is False, then image augmentation is used.
"""
n = len(filenames) # number of images
h = 360 # image height
w = 280 # image width
# change height and width if cropping bounds are specified
if crop is not None:
h = crop[3]-crop[1]
w = crop[2]-crop[0]
# if test flag is False, then use image augmentation
# otherwise, no image augmentation is applied
if test is False:
images = np.zeros((2*n, h, w))
else:
images = np.zeros((n, h, w))
for i, f in enumerate(filenames):
# read image
image = skimage.io.imread(f)
# if training, do augmentation (blur image)
if test is False:
image_ = skimage.filters.gaussian(image, 1.0)
# apply DoG filtering
if dogfilter is True:
if test is False:
image_ = dogfilterimage(image_)
image = dogfilterimage(image)
# crop image
if crop is not None:
if test is False:
image_ = cropimage(image_, crop)
image = cropimage(image, crop)
# normalize image between [0, 255]
if len(image.shape) == 3:
if test is False:
images[2*i, :, :] = (image[:, :, 1] - image[:, :, 1].min()) / (image[:, :, 1].max() - image[:, :, 1].min()) * 255.0
images[2*i+1, :, :] = (image_[:, :, 1] - image_[:, :, 1].min()) / (image_[:, :, 1].max() - image_[:, :, 1].min()) * 255.0
else:
images[i, :, :] = (image[:, :, 1] - image[:, :, 1].min()) / (image[:, :, 1].max() - image[:, :, 1].min()) * 255.0
else:
if test is False:
images[2*i, :, :] = (image - image.min()) / (image.max() - image.min()) * 255.0
images[2*i+1, :, :] = (image_ - image_.min()) / (image_.max() - image_.min()) * 255.0
else:
if image.min() == image.max():
print('{}) {}'.format(i, os.path.basename(f)), end="\n")
exit()
else:
images[i, :, :] = (image-image.min()) / (image.max()-image.min()) * 255.0
# replicate grayscale images (Red, Green, and Blue Channels identical)
images = np.expand_dims(images, axis=3)
images = np.tile(images, (1, 1, 1, 3))
return images
def get_image_patches(image, step, size):
""" extract image patches """
h, w = image.shape
# upper left coordinate for patches
xi = np.arange(0, w-size+1, step)
yi = np.arange(0, h-size+1, step)
# determine remaining size of image
xrem = w-(xi[-1]+size)
yrem = h-(yi[-1]+size)
# center patch coverage
xi = xi + int(np.floor(xrem / 2))
yi = yi + int(np.floor(yrem / 2))
ndescr = len(xi) * len(yi)
keypts = np.zeros((ndescr, 2))
descrs = np.zeros((ndescr, size*size))
k = 0
for x in xi:
for y in yi:
keypts[k,0] = x
keypts[k,1] = y
descrs[k,:] = cropimage(image, [x, y, x+20, y+20]).reshape((1,size*size))
k = k+1
return keypts, descrs
def extract_patches(images, step=8, size=20):
n = images.shape[0]
if n > 0:
d, f = get_image_patches(images[0, :, :, 0], step=step, size=size)
ndescr, descrl = f.shape
feats = np.zeros((n, ndescr, descrl))
keypts = np.zeros((n, ndescr, 2))
for i in range(n):
d, f = get_image_patches(images[i, :, :, 0], step=step, size=size)
feats[i, :, :] = f
keypts[i, :, :] = d
return feats, keypts
def pca_reduce(feats, n_components, trans=None, scaler=None):
""" feature selection via Principle Components Analysis (PCA)"""
x = feats
# PCA expects data to be scaled
if scaler is None:
scaler = StandardScaler()
scaler.fit(x)
x = scaler.transform(x)
# applied PCA transformation
if trans is None:
trans = PCA(n_components=n_components, svd_solver='full')
trans.fit(x)
x = trans.transform(x)
# return trans and scalar for use with test data
return x, trans, scaler