Commit 9e99aa32 authored by Somnath, Suhas's avatar Somnath, Suhas
Browse files

Moved many functions from .translator to .write_utils

parent 2b260fb3
......@@ -86,6 +86,7 @@ from warnings import warn
# Package for downloading online files:
import pycroscopy.core.io.translator
import pycroscopy.core.io.write_utils
try:
# This package is not part of anaconda and may need to be installed.
......@@ -279,9 +280,9 @@ px.plot_utils.plot_cluster_results_together(np.reshape(labels, (num_rows, num_co
#
# In this case, `centroids` has `k` positions all in one dimension. Thus the matrix needs to be reshaped to `k x 1`
ds_labels_spec_inds, ds_labels_spec_vals = pycroscopy.core.io.translator.build_ind_val_dsets([1], labels=['Label'])
ds_cluster_inds, ds_cluster_vals = pycroscopy.core.io.translator.build_ind_val_dsets([centroids.shape[0]], is_spectral=False,
labels=['Cluster'])
ds_labels_spec_inds, ds_labels_spec_vals = pycroscopy.core.io.write_utils.build_ind_val_dsets([1], labels=['Label'])
ds_cluster_inds, ds_cluster_vals = pycroscopy.core.io.write_utils.build_ind_val_dsets([centroids.shape[0]], is_spectral=False,
labels=['Cluster'])
labels_mat = np.uint32(labels.reshape([-1, 1]))
# Rename the datasets
......
......@@ -86,6 +86,7 @@ from warnings import warn
# Package for downloading online files:
import pycroscopy.core.io.translator
import pycroscopy.core.io.write_utils
try:
# This package is not part of anaconda and may need to be installed.
......@@ -279,9 +280,9 @@ px.plot_utils.plot_cluster_results_together(np.reshape(labels, (num_rows, num_co
#
# In this case, `centroids` has `k` positions all in one dimension. Thus the matrix needs to be reshaped to `k x 1`
ds_labels_spec_inds, ds_labels_spec_vals = pycroscopy.core.io.translator.build_ind_val_dsets([1], labels=['Label'])
ds_cluster_inds, ds_cluster_vals = pycroscopy.core.io.translator.build_ind_val_dsets([centroids.shape[0]], is_spectral=False,
labels=['Cluster'])
ds_labels_spec_inds, ds_labels_spec_vals = pycroscopy.core.io.write_utils.build_ind_val_dsets([1], labels=['Label'])
ds_cluster_inds, ds_cluster_vals = pycroscopy.core.io.write_utils.build_ind_val_dsets([centroids.shape[0]], is_spectral=False,
labels=['Cluster'])
labels_mat = np.uint32(labels.reshape([-1, 1]))
# Rename the datasets
......
......@@ -13,7 +13,7 @@ from ..core.processing.process import Process, parallel_compute
from ..core.io.microdata import MicroDataset, MicroDataGroup
from ..core.io.dtype_utils import real_to_compound
from ..core.io.hdf_utils import get_h5_obj_refs, get_auxillary_datasets, copy_attributes, link_as_main
from ..core.io.translator import build_ind_val_dsets
from ..core.io.write_utils import build_ind_val_dsets
from ..core.io.hdf_writer import HDFwriter
from .utils.giv_utils import do_bayesian_inference
......
......@@ -7,6 +7,7 @@ from . import numpy_translator
from . import hdf_utils
from . import io_utils
from . import dtype_utils
from . import write_utils
from .hdf_writer import HDFwriter
from .microdata import *
......@@ -15,4 +16,4 @@ from .translator import *
from .numpy_translator import NumpyTranslator
__all__ = ['HDFwriter', 'MicroDataset', 'MicroDataGroup', 'PycroDataset', 'hdf_utils', 'io_utils', 'dtype_utils',
'NumpyTranslator']
'NumpyTranslator', 'write_utils']
......@@ -8,7 +8,8 @@ Created on Fri Jan 27 17:58:35 2017
from __future__ import division, print_function, absolute_import, unicode_literals
import numpy as np # For array operations
from pycroscopy.core.io.translator import Translator, build_ind_val_dsets
from .translator import Translator
from .write_utils import build_ind_val_dsets
from .hdf_utils import calc_chunks
from .microdata import MicroDataset # building blocks for defining hierarchical storage in the H5 file
......
......@@ -11,12 +11,10 @@ import abc
import time as tm
from os import path, remove
import numpy as np
from .io_utils import get_available_memory
from .microdata import MicroDataGroup, MicroDataset
from .hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs
from pycroscopy.core.io.hdf_writer import HDFwriter # Now the translator is responsible for writing the data.
from .hdf_writer import HDFwriter # Now the translator is responsible for writing the data.
class Translator(object):
......@@ -155,201 +153,3 @@ def generate_dummy_main_parms():
return main_parms
def make_position_mat(num_steps):
"""
Sets the position index matrices and labels for each of the spatial dimensions.
It is intentionally generic so that it works for any SPM dataset.
Parameters
------------
num_steps : List / numpy array
Steps in each spatial direction.
Note that the axes must be ordered from fastest varying to slowest varying
Returns
--------------
pos_mat : 2D unsigned int numpy array
arranged as [steps, spatial dimension]
"""
num_steps = np.array(num_steps)
spat_dims = max(1, len(np.where(num_steps > 1)[0]))
pos_mat = np.zeros(shape=(np.prod(num_steps), spat_dims), dtype=np.uint32)
pos_ind = 0
for indx, curr_steps in enumerate(num_steps):
if curr_steps > 1:
part1 = np.prod(num_steps[:indx+1])
if indx > 0:
part2 = np.prod(num_steps[:indx])
else:
part2 = 1
if indx+1 == len(num_steps):
part3 = 1
else:
part3 = np.prod(num_steps[indx+1:])
pos_mat[:, pos_ind] = np.tile(np.floor(np.arange(part1)/part2), part3)
pos_ind += 1
return pos_mat
def get_position_slicing(pos_lab, curr_pix=None):
"""
Returns a dictionary of slice objects to help in creating region references
to the position indices and values H5 datasets
Parameters
------------
pos_lab : List of strings
Labels of each of the position axes
curr_pix : (Optional) unsigned int
Last pixel in the positon matrix. Useful in experiments where the
parameters have changed (eg. BEPS new data format)
Returns
------------
slice_dict : dictionary
Dictionary of tuples containing slice objects corresponding to
each position axis.
"""
slice_dict = dict()
for spat_ind, spat_dim in enumerate(pos_lab):
slice_dict[spat_dim] = (slice(curr_pix), slice(spat_ind, spat_ind+1))
return slice_dict
def get_spectral_slicing(spec_lab, curr_spec=None):
"""
Returns a dictionary of slice objects to help in creating region references
to the spectroscopic indices and values H5 datasets
Parameters
------------
spec_lab : List of strings
Labels of each of the Spectroscopic axes
curr_spec : (Optional) unsigned int
Last position in the spectroscopic matrix. Useful in experiments where the
parameters have changed (eg. BEPS new data format)
Returns
------------
slice_dict : dictionary
Dictionary of tuples containing slice objects corresponding to
each Spectroscopic axis.
"""
slice_dict = dict()
for spat_ind, spat_dim in enumerate(spec_lab):
slice_dict[spat_dim] = (slice(spat_ind, spat_ind + 1), slice(curr_spec))
return slice_dict
def build_ind_val_dsets(dimensions, is_spectral=True, steps=None, initial_values=None, labels=None,
units=None, verbose=False):
"""
Builds the MicroDatasets for the position OR spectroscopic indices and values
of the data
Parameters
----------
is_spectral : Boolean
Spectroscopic (True) or Position (False)
dimensions : array_like of numpy.uint
Integer values for the length of each dimension
steps : array_like of float, optional
Floating point values for the step-size in each dimension. One
if not specified.
initial_values : array_like of float, optional
Floating point for the zeroth value in each dimension. Zero if
not specified.
labels : array_like of str, optional
The names of each dimension. Empty strings will be used if not
specified.
units : array_like of str, optional
The units of each dimension. Empty strings will be used if not
specified.
verbose : Boolean, optional
Whether or not to print statements for debugging purposes
Returns
-------
ds_spec_inds : Microdataset of numpy.uint
Dataset containing the position indices
ds_spec_vals : Microdataset of float
Dataset containing the value at each position
Notes
-----
`steps`, `initial_values`, `labels`, and 'units' must be the same length as
`dimensions` when they are specified.
Dimensions should be in the order from fastest varying to slowest.
"""
if steps is None:
steps = np.ones_like(dimensions)
elif len(steps) != len(dimensions):
raise ValueError('The arrays for step sizes and dimension sizes must be the same.')
steps = np.atleast_2d(steps)
if verbose:
print('Steps')
print(steps.shape)
print(steps)
if initial_values is None:
initial_values = np.zeros_like(dimensions)
elif len(initial_values) != len(dimensions):
raise ValueError('The arrays for initial values and dimension sizes must be the same.')
initial_values = np.atleast_2d(initial_values)
if verbose:
print('Initial Values')
print(initial_values.shape)
print(initial_values)
if labels is None:
labels = ['' for _ in dimensions]
elif len(labels) != len(dimensions):
raise ValueError('The arrays for labels and dimension sizes must be the same.')
# Get the indices for all dimensions
indices = make_position_mat(dimensions)
if verbose:
print('Indices')
print(indices.shape)
print(indices)
# Convert the indices to values
values = initial_values + np.float32(indices)*steps
# Create the slices that will define the labels
if is_spectral:
mode = 'Spectroscopic_'
indices = indices.transpose()
values = values.transpose()
region_slices = get_spectral_slicing(labels)
else:
mode = 'Position_'
region_slices = get_position_slicing(labels)
# Create the MicroDatasets for both Indices and Values
ds_indices = MicroDataset(mode + 'Indices', indices, dtype=np.uint32)
ds_indices.attrs['labels'] = region_slices
ds_values = MicroDataset(mode + 'Values', np.float32(values), dtype=np.float32)
ds_values.attrs['labels'] = region_slices
if units is None:
pass
elif len(units) != len(dimensions):
raise ValueError('The arrays for labels and dimension sizes must be the same.')
else:
ds_indices.attrs['units'] = units
ds_values.attrs['units'] = units
return ds_indices, ds_values
\ No newline at end of file
import numpy as np
from pycroscopy import MicroDataset
__all__ = ['build_ind_val_dsets', 'get_position_slicing', 'get_spectral_slicing', 'make_indices_matrix']
def build_ind_val_dsets(dimensions, is_spectral=True, steps=None, initial_values=None, labels=None,
units=None, verbose=False):
"""
Builds the MicroDatasets for the position OR spectroscopic indices and values
of the data
Parameters
----------
is_spectral : Boolean
Spectroscopic (True) or Position (False)
dimensions : array_like of numpy.uint
Integer values for the length of each dimension
steps : array_like of float, optional
Floating point values for the step-size in each dimension. One
if not specified.
initial_values : array_like of float, optional
Floating point for the zeroth value in each dimension. Zero if
not specified.
labels : array_like of str, optional
The names of each dimension. Empty strings will be used if not
specified.
units : array_like of str, optional
The units of each dimension. Empty strings will be used if not
specified.
verbose : Boolean, optional
Whether or not to print statements for debugging purposes
Returns
-------
ds_spec_inds : Microdataset of numpy.uint
Dataset containing the position indices
ds_spec_vals : Microdataset of float
Dataset containing the value at each position
Notes
-----
`steps`, `initial_values`, `labels`, and 'units' must be the same length as
`dimensions` when they are specified.
Dimensions should be in the order from fastest varying to slowest.
"""
if steps is None:
steps = np.ones_like(dimensions)
elif len(steps) != len(dimensions):
raise ValueError('The arrays for step sizes and dimension sizes must be the same.')
steps = np.atleast_2d(steps)
if verbose:
print('Steps')
print(steps.shape)
print(steps)
if initial_values is None:
initial_values = np.zeros_like(dimensions)
elif len(initial_values) != len(dimensions):
raise ValueError('The arrays for initial values and dimension sizes must be the same.')
initial_values = np.atleast_2d(initial_values)
if verbose:
print('Initial Values')
print(initial_values.shape)
print(initial_values)
if labels is None:
labels = ['' for _ in dimensions]
elif len(labels) != len(dimensions):
raise ValueError('The arrays for labels and dimension sizes must be the same.')
# Get the indices for all dimensions
indices = make_indices_matrix(dimensions)
if verbose:
print('Indices')
print(indices.shape)
print(indices)
# Convert the indices to values
values = initial_values + np.float32(indices)*steps
# Create the slices that will define the labels
if is_spectral:
mode = 'Spectroscopic_'
indices = indices.transpose()
values = values.transpose()
region_slices = get_spectral_slicing(labels)
else:
mode = 'Position_'
region_slices = get_position_slicing(labels)
# Create the MicroDatasets for both Indices and Values
ds_indices = MicroDataset(mode + 'Indices', indices, dtype=np.uint32)
ds_indices.attrs['labels'] = region_slices
ds_values = MicroDataset(mode + 'Values', np.float32(values), dtype=np.float32)
ds_values.attrs['labels'] = region_slices
if units is None:
pass
elif len(units) != len(dimensions):
raise ValueError('The arrays for labels and dimension sizes must be the same.')
else:
ds_indices.attrs['units'] = units
ds_values.attrs['units'] = units
return ds_indices, ds_values
def get_position_slicing(pos_lab, curr_pix=None):
"""
Returns a dictionary of slice objects to help in creating region references
to the position indices and values H5 datasets
Parameters
------------
pos_lab : List of strings
Labels of each of the position axes
curr_pix : (Optional) unsigned int
Last pixel in the positon matrix. Useful in experiments where the
parameters have changed (eg. BEPS new data format)
Returns
------------
slice_dict : dictionary
Dictionary of tuples containing slice objects corresponding to
each position axis.
"""
slice_dict = dict()
for spat_ind, spat_dim in enumerate(pos_lab):
slice_dict[spat_dim] = (slice(curr_pix), slice(spat_ind, spat_ind+1))
return slice_dict
def get_spectral_slicing(spec_lab, curr_spec=None):
"""
Returns a dictionary of slice objects to help in creating region references
to the spectroscopic indices and values H5 datasets
Parameters
------------
spec_lab : List of strings
Labels of each of the Spectroscopic axes
curr_spec : (Optional) unsigned int
Last position in the spectroscopic matrix. Useful in experiments where the
parameters have changed (eg. BEPS new data format)
Returns
------------
slice_dict : dictionary
Dictionary of tuples containing slice objects corresponding to
each Spectroscopic axis.
"""
slice_dict = dict()
for spat_ind, spat_dim in enumerate(spec_lab):
slice_dict[spat_dim] = (slice(spat_ind, spat_ind + 1), slice(curr_spec))
return slice_dict
def make_indices_matrix(num_steps, is_position=True):
"""
Makes an ancillary indices matrix given the number of steps in each dimension. In other words, this function builds
a matrix whose rows correspond to unique combinations of the multiple dimensions provided.
Parameters
------------
num_steps : List / numpy array
Number of steps in each spatial or spectral dimension
Note that the axes must be ordered from fastest varying to slowest varying
is_position : bool, optional, default = True
Whether the returned matrix is meant for position (True) indices (tall and skinny) or spectroscopic (False)
indices (short and wide)
Returns
--------------
indices_matrix : 2D unsigned int numpy array
arranged as [steps, spatial dimension]
"""
assert isinstance(num_steps, (list, tuple))
# assert np.all([isinstance(x, int) for x in num_steps])
num_steps = np.array(num_steps)
spat_dims = max(1, len(np.where(num_steps > 1)[0]))
indices_matrix = np.zeros(shape=(np.prod(num_steps), spat_dims), dtype=np.uint32)
dim_ind = 0
for indx, curr_steps in enumerate(num_steps):
if curr_steps > 1:
part1 = np.prod(num_steps[:indx+1])
if indx > 0:
part2 = np.prod(num_steps[:indx])
else:
part2 = 1
if indx+1 == len(num_steps):
part3 = 1
else:
part3 = np.prod(num_steps[indx+1:])
indices_matrix[:, dim_ind] = np.tile(np.floor(np.arange(part1)/part2), part3)
dim_ind += 1
if not is_position:
indices_matrix = indices_matrix.T
return indices_matrix
......@@ -11,7 +11,7 @@ import joblib
import time as tm
from ..io.hdf_utils import check_if_main, check_for_old, get_attributes
from pycroscopy.core.io.hdf_writer import HDFwriter
from ..io.hdf_writer import HDFwriter
from ..io.io_utils import recommend_cpu_cores, get_available_memory, format_time
......
......@@ -15,7 +15,8 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
from .df_utils.be_utils import trimUDVS, getSpectroscopicParmLabel, parmsToDict, generatePlotGroups, \
createSpecVals, requires_conjugate, nf32
from pycroscopy.core.io.translator import Translator, generate_dummy_main_parms, build_ind_val_dsets
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import build_ind_val_dsets
from ...core.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs, calc_chunks
from ...core.io.hdf_writer import HDFwriter
from ...core.io.microdata import MicroDataGroup, MicroDataset
......
......@@ -15,10 +15,10 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
from .df_utils.be_utils import trimUDVS, getSpectroscopicParmLabel, generatePlotGroups, createSpecVals, maxReadPixels, \
nf32
from ...core.io.translator import Translator, generate_dummy_main_parms, make_position_mat, get_position_slicing
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import make_indices_matrix, get_position_slicing
from ...core.io.hdf_utils import get_h5_obj_refs
from ...core.io.hdf_writer import HDFwriter # Now the translator is responsible for writing the data.
# The building blocks for defining hierarchical storage in the H5 file
from ...core.io.hdf_writer import HDFwriter
from ...core.io.microdata import MicroDataGroup, MicroDataset
......@@ -123,7 +123,7 @@ class BEodfRelaxationTranslator(Translator):
ex_wfm = np.float32(ex_wfm)
self.FFT_BE_wave = bin_FFT
pos_mat = make_position_mat([num_cols, num_rows])
pos_mat = make_indices_matrix([num_cols, num_rows])
pos_slices = get_position_slicing(['X', 'Y'], num_pix)
ds_ex_wfm = MicroDataset('Excitation_Waveform', ex_wfm)
......
......@@ -10,7 +10,8 @@ from skimage.measure import block_reduce
from ...core.io.hdf_writer import HDFwriter
from ...core.io.hdf_utils import calc_chunks, get_h5_obj_refs, link_as_main, get_attr, build_reduced_spec_dsets
from ...core.io.dtype_utils import real_to_compound
from ...core.io.translator import Translator, generate_dummy_main_parms, build_ind_val_dsets
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import build_ind_val_dsets
from ...core.io.microdata import MicroDataGroup, MicroDataset
from ...core.io.pycro_data import PycroDataset
from ...analysis.utils.be_loop import loop_fit_function
......
......@@ -17,7 +17,8 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
from .df_utils.be_utils import trimUDVS, getSpectroscopicParmLabel, parmsToDict, generatePlotGroups, \
normalizeBEresponse, createSpecVals, nf32
from ...core.io.translator import Translator, generate_dummy_main_parms, make_position_mat
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import make_indices_matrix
from ...core.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs, calc_chunks
from ...core.io.hdf_writer import HDFwriter
from ...core.io.microdata import MicroDataGroup, MicroDataset
......@@ -145,7 +146,7 @@ class BEPSndfTranslator(Translator):
s_pixels = np.array(parsers[0].get_spatial_pixels())
self.pos_labels = ['Laser Spot', 'Z', 'Y', 'X']
self.pos_labels = [self.pos_labels[i] for i in np.where(s_pixels > 1)[0]]
self.pos_mat = make_position_mat(s_pixels)
self.pos_mat = make_indices_matrix(s_pixels)
self.pos_units = ['um' for _ in range(len(self.pos_labels))]
# self.pos_mat = np.int32(self.pos_mat)
......
......@@ -12,7 +12,8 @@ import h5py
import numpy as np # For array operations
from scipy.io import loadmat
from ...core.io.translator import Translator, build_ind_val_dsets
from ...core.io.translator import Translator
from ...core.io.write_utils import build_ind_val_dsets
from ...core.io.microdata import MicroDataset # building blocks for defining hierarchical storage in the H5 file
......
......@@ -14,8 +14,8 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
from .df_utils.gmode_utils import readGmodeParms
from ...core.io.translator import Translator, \
generate_dummy_main_parms, make_position_mat, \
get_position_slicing # Because this class extends the abstract Translator class
generate_dummy_main_parms # Because this class extends the abstract Translator class
from ...core.io.write_utils import make_indices_matrix, get_position_slicing