In this notebook we compare the time to run the same analysis using Cython (precompiled) version with the Python process using FFT and/or direct cross-correlation method
from openpiv import tools, scaling, pyprocess, validation, filters
import numpy as np
import pylab
%matplotlib inline
frame_a = tools.imread( '../test1/exp1_001_a.bmp' )
frame_b = tools.imread( '../test1/exp1_001_b.bmp' )
pylab.imshow(np.c_[frame_a,np.ones((frame_a.shape[0],20)),frame_b],
cmap=pylab.cm.gray)
<matplotlib.image.AxesImage at 0x7f34e8fd1dc0>
# Typical set of parameters
window_size = 32 # pixels, IW size in frame A
overlap = 16 # 50% overlap
search_area_size = 32 # pixels, IW in frame B, helps for large displacements
dt = 0.02 # second, 50 Hz, just an example
scaling_factor = 96.52 # micron/pixel
%%time
# default correlation is FFT circular type (faster, less robust)
# default type of correlation is not normalized, faster
# we do not know the values of signal to noise ratio a priori
# therefore we decide that we remove lowest 5%
# the signal to noise ratio is defined here as 1st to 2nd peak ratio
# All the parameters need to be checked.
# get the values of displacements in pixel/sec units
u, v, sig2noise = pyprocess.extended_search_area_piv(
frame_a, frame_b,
window_size=window_size,
overlap=overlap,
dt=dt,
search_area_size=search_area_size,
sig2noise_method='peak2peak' )
# prepare centers of the IWs to know where locate the vectors
x, y = pyprocess.get_coordinates(frame_a.shape,
search_area_size=search_area_size,
overlap=overlap)
u, v, mask = validation.sig2noise_val( u, v,
sig2noise,
threshold = np.percentile(sig2noise,5))
# removing and filling in the outlier vectors
u, v = filters.replace_outliers( u, v, method='localmean',
max_iter=10,
kernel_size=2)
# rescale the results to millimeters and mm/sec
x, y, u, v = scaling.uniform(x, y, u, v,
scaling_factor=scaling_factor )
# save the data
tools.save(x, y, u, v, sig2noise, mask, 'circular_default.txt' )
CPU times: user 125 ms, sys: 12.4 ms, total: 137 ms Wall time: 136 ms
/home/user/miniconda3/envs/openpiv/lib/python3.8/site-packages/numpy/core/fromnumeric.py:3372: RuntimeWarning: Mean of empty slice. return _methods._mean(a, axis=axis, dtype=dtype, /home/user/miniconda3/envs/openpiv/lib/python3.8/site-packages/numpy/core/_methods.py:170: RuntimeWarning: invalid value encountered in double_scalars ret = ret.dtype.type(ret / rcount)
%%time
# use normalized_correlation
# both image intensity is normalized before correlation
# and the correlation map has peaks between 0..1
# get the values of displacements in pixel/sec units
u, v, sig2noise = pyprocess.extended_search_area_piv(
frame_a, frame_b,
window_size=window_size,
overlap=overlap,
dt=dt,
search_area_size=search_area_size,
sig2noise_method='peak2peak',
normalized_correlation = True)
# prepare centers of the IWs to know where locate the vectors
x, y = pyprocess.get_coordinates(frame_a.shape,
search_area_size=search_area_size,
overlap=overlap)
u, v, mask = validation.sig2noise_val( u, v,
sig2noise,
threshold = np.percentile(sig2noise,5))
# removing and filling in the outlier vectors
u, v = filters.replace_outliers( u, v, method='localmean',
max_iter=10,
kernel_size=2)
# rescale the results to millimeters and mm/sec
x, y, u, v = scaling.uniform(x, y, u, v,
scaling_factor=scaling_factor )
# save the data
tools.save(x, y, u, v, sig2noise, mask, 'circular_normalized.txt' )
CPU times: user 136 ms, sys: 12.3 ms, total: 149 ms Wall time: 148 ms
%%time
# change to another type of correlation 'linear' - uses
# zero padding prior to the correlation
# it requires uniform background and therefore
# we need to normalize intensity of the images
u, v, sig2noise = pyprocess.extended_search_area_piv(
pyprocess.normalize_intensity(frame_a),
pyprocess.normalize_intensity(frame_b),
window_size=window_size,
overlap=overlap,
dt=dt,
search_area_size=search_area_size,
sig2noise_method='peak2peak',
correlation_method = 'linear')
# prepare centers of the IWs to know where locate the vectors
x, y = pyprocess.get_coordinates(frame_a.shape,
search_area_size=search_area_size,
overlap=overlap)
u, v, mask = validation.sig2noise_val( u, v,
sig2noise,
threshold = np.percentile(sig2noise,5))
# removing and filling in the outlier vectors
u, v = filters.replace_outliers( u, v, method='localmean',
max_iter=10,
kernel_size=2)
# rescale the results to millimeters and mm/sec
x, y, u, v = scaling.uniform(x, y, u, v,
scaling_factor=scaling_factor )
# save the data
tools.save(x, y, u, v, sig2noise, mask, 'linear_intensity.txt' )
CPU times: user 194 ms, sys: 28.9 ms, total: 223 ms Wall time: 221 ms
%%time
# add normalized correlation to linear
u, v, sig2noise = pyprocess.extended_search_area_piv(
pyprocess.normalize_intensity(frame_a),
pyprocess.normalize_intensity(frame_b),
window_size=window_size,
overlap=overlap,
dt=dt,
search_area_size=search_area_size,
sig2noise_method='peak2peak',
correlation_method='linear',
normalized_correlation=True)
# prepare centers of the IWs to know where locate the vectors
x, y = pyprocess.get_coordinates(frame_a.shape,
search_area_size=search_area_size,
overlap=overlap)
u, v, mask = validation.sig2noise_val( u, v,
sig2noise,
threshold = np.percentile(sig2noise,5))
# removing and filling in the outlier vectors
u, v = filters.replace_outliers( u, v, method='localmean',
max_iter=10,
kernel_size=2)
# rescale the results to millimeters and mm/sec
x, y, u, v = scaling.uniform(x, y, u, v,
scaling_factor=scaling_factor )
# save the data
tools.save(x, y, u, v, sig2noise, mask, 'linear_normalized.txt' )
CPU times: user 204 ms, sys: 31.8 ms, total: 235 ms Wall time: 234 ms
%%time
# change to the extended search_type when window size in B is
# larger to capture large displacements
search_area_size = 40
u, v, sig2noise = pyprocess.extended_search_area_piv(
frame_a,
frame_b,
window_size=window_size,
overlap=overlap,
dt=dt,
search_area_size=search_area_size,
sig2noise_method='peak2peak',
correlation_method='linear',
normalized_correlation=True)
# prepare centers of the IWs to know where locate the vectors
x, y = pyprocess.get_coordinates(frame_a.shape,
search_area_size=search_area_size,
overlap=overlap)
u, v, mask = validation.sig2noise_val( u, v,
sig2noise,
threshold = np.percentile(sig2noise,5))
# removing and filling in the outlier vectors
u, v = filters.replace_outliers( u, v, method='localmean',
max_iter=10,
kernel_size=2)
# rescale the results to millimeters and mm/sec
x, y, u, v = scaling.uniform(x, y, u, v,
scaling_factor=scaling_factor )
# save the data
tools.save(x, y, u, v, sig2noise, mask, 'linear_normalized_extended.txt' )
CPU times: user 193 ms, sys: 52.3 ms, total: 245 ms Wall time: 244 ms
%%time
# default correlation is FFT circular type (faster, less robust)
# default type of correlation is not normalized, faster
# we do not know the values of signal to noise ratio a priori
# therefore we decide that we remove lowest 5%
# the signal to noise ratio is defined here as 1st to 2nd peak ratio
# All the parameters need to be checked.
# get the values of displacements in pixel/sec units
u, v, sig2noise = pyprocess.extended_search_area_piv(
frame_a, frame_b,
window_size=window_size,
overlap=overlap,
dt=dt,
search_area_size=search_area_size,
sig2noise_method='peak2peak' )
# prepare centers of the IWs to know where locate the vectors
x, y = pyprocess.get_coordinates(frame_a.shape,
search_area_size=search_area_size,
overlap=overlap)
u, v, mask = validation.sig2noise_val( u, v,
sig2noise,
threshold = np.percentile(sig2noise,5))
# removing and filling in the outlier vectors
u, v = filters.replace_outliers( u, v, method='localmean',
max_iter=10,
kernel_size=2)
# rescale the results to millimeters and mm/sec
x, y, u, v = scaling.uniform(x, y, u, v,
scaling_factor=scaling_factor )
# save the data
tools.save(x, y, u, v, sig2noise, mask, 'circular_extended.txt' )
CPU times: user 70.8 ms, sys: 296 µs, total: 71.1 ms Wall time: 70.2 ms
tools.display_vector_field('linear_normalized_extended.txt', scale=30)
tools.display_vector_field('linear_normalized.txt', scale=30)
tools.display_vector_field('linear_intensity.txt', scale=30)
tools.display_vector_field('circular_default.txt', scale=30);
tools.display_vector_field('circular_normalized.txt', scale=30);
tools.display_vector_field('circular_extended.txt', scale=30);