Commit 1ae6c40e authored by Chris Smith's avatar Chris Smith
Browse files

Documentation cleanup and standardization

parent 4010d710
......@@ -8,6 +8,7 @@ Created on Mon Sep 28 11:35:57 2015
import numpy as np
from numpy import exp, abs, sqrt, sum, real, imag, arctan2, append
def SHOfunc(parms, w_vec):
"""
Generates the SHO response over the given frequency band
......@@ -16,16 +17,19 @@ def SHOfunc(parms, w_vec):
-----------
parms : list or tuple
SHO parae=(A,w0,Q,phi)
w_vec : 1D numpy array
Vector of frequency values
"""
return parms[0] * exp(1j * parms[3]) * parms[1] ** 2 / (w_vec ** 2 - 1j * w_vec * parms[1] / parms[2] - parms[1] ** 2)
def SHOestimateFit(w_vec, resp_vec, num_points=5):
"""
Generates good initial guesses for fitting
Parameters
------------
w_vec: 1D numpy array or list
w_vec : 1D numpy array or list
Vector of BE frequencies
resp_vec : 1D complex numpy array or list
BE response vector as a function of frequency
......@@ -98,6 +102,7 @@ def SHOestimateFit(w_vec, resp_vec, num_points=5):
return p0
def SHOfastGuess(w_vec, resp_vec, qual_factor=10):
"""
Default SHO guess from the maximum value of the response
......@@ -116,37 +121,40 @@ def SHOfastGuess(w_vec, resp_vec, qual_factor=10):
retval : 1D numpy array
SHO fit parameters arranged as [amplitude, frequency, quality factor, phase]
"""
amp_vec =abs(resp_vec)
i_max=np.argmax(amp_vec )
return np.array([np.max(amp_vec ) / qual_factor, w_vec[i_max], qual_factor, np.angle(resp_vec[i_max])])
amp_vec = abs(resp_vec)
i_max = np.argmax(amp_vec)
return np.array([np.max(amp_vec) / qual_factor, w_vec[i_max], qual_factor, np.angle(resp_vec[i_max])])
def SHOlowerBound(w_vec):
"""
Provides the lower bound for the SHO fitting function
Parameters
------------
w_vec: 1D numpy array or list
----------
w_vec : 1D numpy array or list
Vector of BE frequencies
Returns
---------
-------
retval : tuple
SHO fit parameters arranged as amplitude, frequency, quality factor, phase
"""
return 0, np.min(w_vec), -1e5, -np.pi
def SHOupperBound(w_vec):
"""
Provides the upper bound for the SHO fitting function
Parameters
------------
----------
w_vec: 1D numpy array or list
Vector of BE frequencies
Returns
---------
-------
retval : tuple
SHO fit parameters arranged as amplitude, frequency, quality factor, phase
"""
return 1e5,np.max(w_vec), 1e5,np.pi
\ No newline at end of file
return 1e5, np.max(w_vec), 1e5, np.pi
......@@ -150,7 +150,8 @@ def getH5GroupRef(group_name, h5_refs):
Returns
-------
h5_grp
h5_grp : HDF5 Object Reference
reference to group that matches the `group_name`
"""
for dset in h5_refs:
if dset.name.split('/')[-1].startswith(group_name):
......@@ -192,20 +193,22 @@ def getH5RegRefIndices(ref, h5_main, return_method='slices'):
Parameters
----------
ref - HDF5 Region Reference
h5_main - HDF5 object that the reference can be returned
from
return_method - String, what for should the reference indices be returned
Options:
slices - default, the reference is return as pairs of slices
corners - the reference is returned as pairs of corners representing the
starting and ending indices of each block
points - the reference is returns as a list of tuples of points
ref : HDF5 Region Reference
h5_main : HDF5 Dataset
dataset that the reference can be returned from
return_method : {'slices', 'corners', 'points'}
slices : the reference is return as pairs of slices
corners : the reference is returned as pairs of corners representing
the starting and ending indices of each block
points : the reference is returns as a list of tuples of points
Returns
-------
ref_inds - Array of indices in the source dataset that
ref accesses
ref_inds : Numpy Array
array of indices in the source dataset that ref accesses
"""
if return_method == 'points':
......@@ -216,12 +219,15 @@ def getH5RegRefIndices(ref, h5_main, return_method='slices'):
Parameters
----------
start - Tuple holding the starting indices of the region
stop - Tuple holding the final indices of the region
start : Tuple
the starting indices of the region
stop : Tuple
the final indices of the region
Returns
-------
inds - Tuple of arrays containing the list of points in each dimension
inds : Tuple of arrays
the list of points in each dimension
"""
ranges = []
for i in xrange(len(start)):
......@@ -249,12 +255,15 @@ def getH5RegRefIndices(ref, h5_main, return_method='slices'):
Parameters
----------
start - Tuple holding the starting indices of the region
stop - Tuple holding the final indices of the region
start : Tuple
the starting indices of the region
stop : Tuple
the final indices of the region
Returns
-------
slices - pair of slices representing the region
slices : list
pair of slices representing the region
"""
slices = []
for idim in xrange(len(start)):
......@@ -307,19 +316,25 @@ def checkAndLinkAncillary(h5_dset, anc_names, h5_main=None, anc_refs=None):
Parameters
----------
hdf -- ioHDF object associated with the HDF5 file
h5_dset -- HDF5 dataset to which the attributes will be written\
anc_names -- list of strings containing the attribute names to be used
h5_main -- Optional, HDF5 dataset from which attributes will be copied
if anc_refs is None
anc_refs -- Optional, HDF5 references that correspond to the strings in
anc_names
*Note: either h5_main or anc_ref MUST be provided and anc_ref has the
higher priority if both are present.
hdf : ioHDF5 object
object associated with the HDF5 file
h5_dset : HDF5 Dataset
dataset to which the attributes will be written
anc_names : list of str
the attribute names to be used
h5_main : HDF5 Dataset, optional
dataset from which attributes will be copied if `anc_refs` is None
anc_refs : list of HDF5 Object References, optional
references that correspond to the strings in `anc_names`
Returns
-------
None
Notes
-----
Either `h5_main` or `anc_refs` MUST be provided and `anc_refs` has the
higher priority if both are present.
"""
def __checkAndLinkSingle(h5_ref, ref_name):
......@@ -368,13 +383,16 @@ def createRefFromIndices(h5_main, ref_inds):
Parameters
----------
h5_main - HDF5 dataset which the region will be in
ref_inds - Iterable of index pairs, [start indices, final indices] for each block in the
hyperslab
h5_main : HDF5 dataset
dataset the region will be created in
ref_inds : Iterable
index pairs, [start indices, final indices] for each block in the
hyperslab
Returns
-------
new_ref - Region reference in h5_main for the blocks of points defined by ref_inds
new_ref : HDF5 Region reference
reference in `h5_main` for the blocks of points defined by `ref_inds`
"""
h5_space = h5_main.id.get_space()
h5_space.select_none()
......@@ -398,15 +416,12 @@ def reshape_to_Ndims(h5_main, h5_pos=None, h5_spec=None):
Parameters
----------
h5_main : HDF5 Dataset, 2D data to be reshaped
h5_pos : (Optional) HDF5 Dataset, Position indices corresponding to
rows in ds_main
h5_spec : (Optional) HDF5 Dataset, Spectroscopic indices corresponding
to columns in ds_main
If either h5_pos or h5_spec are not provided, the function will first
attempt to find them as attributes of h5_main. If that fails, it will
generate dummy values for them.
h5_main : HDF5 Dataset
2D data to be reshaped
h5_pos : HDF5 Dataset, optional
Position indices corresponding to rows in `h5_main`
h5_spec : HDF5 Dataset, optional
Spectroscopic indices corresponding to columns in `h5_main`
Returns
-------
......@@ -414,9 +429,17 @@ def reshape_to_Ndims(h5_main, h5_pos=None, h5_spec=None):
N dimensional numpy array arranged as [positions slowest to fastest, spectroscopic slowest to fastest]
success : boolean or string
True if full reshape was successful
"Positions" if it was only possible to reshape by
the position dimensions
False if no reshape was possible
Notes
-----
If either `h5_pos` or `h5_spec` are not provided, the function will first
attempt to find them as attributes of `h5_main`. If that fails, it will
generate dummy values for them.
"""
if h5_pos is None:
......@@ -606,13 +629,20 @@ def checkIfMain(h5_main):
"""
Checks the input dataset to see if it has all the neccessary
features to be considered a Main dataset. This means it is
2D and has the following attributes
Position_Indices
Position_Values
Spectroscopic_Indices
Spectroscopic_Values
:param h5_main: HDF5 Dataset
:return: success: Boolean, did all tests pass
2D and has the following attributes
Position_Indices
Position_Values
Spectroscopic_Indices
Spectroscopic_Values
Parameters
----------
h5_main : HDF5 Dataset
Returns
-------
success : Boolean
True if all tests pass
"""
# Check that h5_main is a dataset
success = isinstance(h5_main, h5py.Dataset)
......@@ -647,29 +677,29 @@ def checkIfMain(h5_main):
def linkRefs(src, trg):
'''
"""
Creates Dataset attributes that contain references to other Dataset Objects.
Parameters
-----------
src : Reference to h5.objects
Reference to the the object to which attributes will be added
trg : list of references to h5.objects
objects whose references that can be accessed from src.attrs
Returns
--------
None
'''
"""
for itm in trg:
src.attrs[itm.name.split('/')[-1]] = itm.ref
def linkRefAsAlias(src, trg, trg_name):
'''
"""
Creates Dataset attributes that contain references to other Dataset Objects.
This function is useful when the reference attribute must have a reserved name.
Such as linking 'SHO_Indices' as 'Spectroscopic_Indices'
Parameters
------------
src : reference to h5.object
......@@ -678,5 +708,5 @@ def linkRefAsAlias(src, trg, trg_name):
object whose reference that can be accessed from src.attrs
trg_name : String
Alias / alternate name for trg
'''
"""
src.attrs[trg_name] = trg.ref
\ No newline at end of file
......@@ -21,18 +21,18 @@ from ..__version__ import version
class ioHDF5(object):
def __init__(self, file_handle,cachemult=1):
'''
"""
Handles:
+ I/O operation from HDF5 file.
+ Utilities to get data and associated auxiliary.
Parameters
----------
file_handle : Object - String or Unicode or open hdf5 file
Absolute path to the h5 file or an open hdf5 file
cachemult : unsigned int (Optional. default = 1)
Cache multiplier
'''
"""
if type(file_handle) in [str, unicode]:
# file handle is actually a file path
propfaid = h5py.h5p.create(h5py.h5p.FILE_ACCESS)
......@@ -59,24 +59,24 @@ class ioHDF5(object):
self.path = file_handle.filename
def clear(self):
'''
"""
Clear h5.file of all contents
file.clear() only removes the contents, it does not free up previously allocated space.
To do so, it's necessary to use the h5repack command after clearing.
Because the file must be closed and reopened, it is best to call this
Because the file must be closed and reopened, it is best to call this
function immediately after the creation of the ioHDF5 object.
'''
"""
self.file.clear()
self.repack()
def repack(self):
'''
"""
Uses the h5repack command to recover cleared space in an hdf5 file.
h5repack can also be used to change chunking and compression, but these options have
not yet been implemented here.
'''
"""
self.close()
tmpfile = self.path+'.tmp'
......
......@@ -204,12 +204,14 @@ def getSpectroscopicParmLabel(expt_type):
Returns the label for the spectroscopic parameter in the plot group.
Parameters
---------
* expt_type -- Type of the experiment - found in the parms.txt file
----------
expt_type : str
Type of the experiment - found in the parms.txt file
Returns
---------
String - label for the spectroscopic parameter axis in the plot
-------
str
label for the spectroscopic parameter axis in the plot
"""
if expt_type in ['DC modulation mode','current mode']:
......
......@@ -727,11 +727,11 @@ class BEPSndfParser(object):
Returns the excitation wave type as an integer
Parameters
-----------
----------
None
Returns
---------
-------
wave_type : int
Wave type. Positive number means chirp up, negative number is chirp down.
"""
......@@ -742,11 +742,11 @@ class BEPSndfParser(object):
Returns the total number of spatial pixels. This includes X, Y, Z, Laser positions
Parameters
-----------
----------
None
Returns
---------
-------
num_pix : unsigned int
Number of pixels in this file
"""
......@@ -758,10 +758,10 @@ class BEPSndfParser(object):
organized from fastest to slowest varying dimension
Parameters
-----------
----------
Returns
-----------
-------
Laser steps, Z steps, Y steps, X steps : unsigned ints
"""
return (self.__num_laser_steps__,self.__num_z_steps__,self.__num_x_steps__,self.__num_y_steps__)
......@@ -781,7 +781,7 @@ class BEPSndfParser(object):
None
Returns
------
-------
None
"""
count = 0
......@@ -844,13 +844,9 @@ class BEPSndfParser(object):
Returns a BEpixel object containing the parsed information within a pixel.
Moves pixel index up by one.
This is where one could conceivably read the file in one pass instead of making 100,000 file I/Os.
Parameters
---------
None
Returns
--------
-------
pixel : BEPSndfPixel
Object that describes the data contained within the pixel
"""
......@@ -889,7 +885,7 @@ class BEPSndfPixel(object):
Initializes the pixel instance by parsing the provided data.
Parameters
---------------
----------
data_vec : 1D float numpy array
Data contained within each pixel
harm: unsigned int
......@@ -981,17 +977,18 @@ class BEPSndfPixel(object):
to tell if any parameter has changed between these pixels
Parameters
------------
----------
prevPixel : BEPSndfPixel object
The other pixel object to compare this pixel to
Returns
-------------
-------
is_different : Boolean
Whether or not these pixel objects share the same parameters
Typical things that change during BEPS
--------------
Notes
-----
*Typical things that change during BEPS*
1. BE parameters:
a. Center Frequency, Band Width - changes in the BE_bin_w
b. Amplitude, Phase Variation, Band Edge Smoothing, Band Edge Trim - Harder to find out what happened exactly - FFT should show changes
......
......@@ -6,24 +6,23 @@ Created on Tue Oct 20 17:42:41 2015
"""
###############################################################################
import numpy as np;# for all array, data operations
import matplotlib.pyplot as plt; # for all plots
import numpy as np # for all array, data operations
import matplotlib.pyplot as plt # for all plots
from scipy.special import erf
from warnings import warn
###############################################################################
def getNoiseFloor(fft_data,tolerance):
"""
Paramters:
-----------
Paramters
---------
fft_data : 1D or 2D complex numpy array
Signal in frequency space (ie - after FFT shifting) arranged as (channel or repetition, signal)
tolerance : unsigned float
Tolerance to noise. A smaller value gets rid of more noise.
Returns:
-----------
Returns
-------
noise_floor : 1D real numpy array
One value per channel / repetition
"""
......
......@@ -17,9 +17,9 @@ from skimage.transform import warp, SimilarityTransform
# Functions
def euclidMatch(Matches, keypts1, keypts2, misalign):
''' Function that thresholds the matches, found from a comparison of
""" Function that thresholds the matches, found from a comparison of
their descriptors, by the maximum expected misalignment.
'''
"""
filteredMatches = np.array([])
deltaX =(keypts1[Matches[:,0],:][:,0]-keypts2[Matches[:,1],:][:,0])**2
deltaY =(keypts1[Matches[:,0],:][:,1]-keypts2[Matches[:,1],:][:,1])**2
......@@ -166,7 +166,7 @@ class TranslationTransform(object):
return self._apply_mat(coords, self.params)
def inverse(self, coords):
''' Apply inverse transformation.
""" Apply inverse transformation.
Parameters
----------
......@@ -178,7 +178,7 @@ class TranslationTransform(object):
coords : (N, 2) array
Transformed coordinates.
'''
"""
return self._apply_mat(coords, self._inv_matrix)
def residuals(self, src, dst):
"""Determine residuals of transformed destination coordinates.
......@@ -723,12 +723,12 @@ class geoTransformerParallel(object):
return results
class geoTransformerSerial(object):
''' This object contains methods to perform geometric transformations on
""" This object contains methods to perform geometric transformations on
a sequence of images. Some of the capabilities are:
+ Homography by feature extraction.
+ Intensity-based image registration.
+ Projection Correction.
'''
"""
def __init__(self):
self.__init__
......@@ -736,15 +736,15 @@ class geoTransformerSerial(object):
self.features = []
def clearData(self):
''' This is a Method to clear the data from the object.
'''
""" This is a Method to clear the data from the object.
"""
del self.data
self.data = []
def loadData(self, dataset):
''' This is a Method that loads h5 Dataset to be corrected.
""" This is a Method that loads h5 Dataset to be corrected.
input: h5 dataset
'''
"""
if not isinstance(dataset, h5py.Dataset):
warnings.warn( 'Error: Data must be an h5 Dataset object' )
else:
......@@ -753,18 +753,18 @@ class geoTransformerSerial(object):
self.data = self.data.reshape(-1,dim,dim)
def loadFeatures(self, features):
''' This is a Method that loads features to be used for homography etc ...
""" This is a Method that loads features to be used for homography etc ...
input:
features : [keypoints, descriptors].
These can come from FeatureExtractor.getFeatures() or elsewhere.
The format is :
keypoints = [np.ndarray([y_position, x_position])]
descriptors = [np.ndarray()]
'''
"""
self.features = features
def matchFeatures(self, **kwargs):
''' This is a Method that computes similarity between keypoints based on their
""" This is a Method that computes similarity between keypoints based on their
descriptors. Currently only skimage.feature.match_descriptors is implemented.
In the future will need to add opencv2.matchers.
Input:
......@@ -773,7 +773,7 @@ class geoTransformerSerial(object):
Used to filter the matches before optimizing the transformation.
Output:
Matches.
'''
"""
desc = self.features[-1]
keypts = self.features[0]
maxDis = kwargs.get('maximum_distance', np.infty)
......@@ -814,7 +814,7 @@ class geoTransformerSerial(object):
#TODO: Need Better Error Handling.
def findTransformation(self, transform, matches, processes, **kwargs):
''' This is a Method that finds the optimal transformation between two images
""" This is a Method that finds the optimal transformation between two images
given matching features using a random sample consensus.