Commit 30009b57 authored by Somnath, Suhas's avatar Somnath, Suhas
Browse files

Moved deprecated classes out of core

virtual_data, hdf_writer moved to io
functions using VirtualDataset in core.io.write_utils and hdf_utils
moved into a new .io.write_utils
parent fc683160
......@@ -85,6 +85,7 @@ import os
from warnings import warn
# Package for downloading online files:
import pycroscopy.io.write_utils
import pycroscopy.viz.cluster_utils
try:
......@@ -278,9 +279,9 @@ px.plot_utils.plot_cluster_results_together(np.reshape(labels, (num_rows, num_co
#
# In this case, `centroids` has `k` positions all in one dimension. Thus the matrix needs to be reshaped to `k x 1`
ds_labels_spec_inds, ds_labels_spec_vals = pycroscopy.core.io.write_utils.build_ind_val_dsets([1], labels=['Label'])
ds_cluster_inds, ds_cluster_vals = pycroscopy.core.io.write_utils.build_ind_val_dsets([centroids.shape[0]], is_spectral=False,
labels=['Cluster'])
ds_labels_spec_inds, ds_labels_spec_vals = pycroscopy.io.write_utils.build_ind_val_dsets([1], labels=['Label'])
ds_cluster_inds, ds_cluster_vals = pycroscopy.io.write_utils.build_ind_val_dsets([centroids.shape[0]], is_spectral=False,
labels=['Cluster'])
labels_mat = np.uint32(labels.reshape([-1, 1]))
# Rename the datasets
......
......@@ -85,6 +85,7 @@ import os
from warnings import warn
# Package for downloading online files:
import pycroscopy.io.write_utils
import pycroscopy.viz.cluster_utils
try:
......@@ -278,9 +279,9 @@ px.plot_utils.plot_cluster_results_together(np.reshape(labels, (num_rows, num_co
#
# In this case, `centroids` has `k` positions all in one dimension. Thus the matrix needs to be reshaped to `k x 1`
ds_labels_spec_inds, ds_labels_spec_vals = pycroscopy.core.io.write_utils.build_ind_val_dsets([1], labels=['Label'])
ds_cluster_inds, ds_cluster_vals = pycroscopy.core.io.write_utils.build_ind_val_dsets([centroids.shape[0]], is_spectral=False,
labels=['Cluster'])
ds_labels_spec_inds, ds_labels_spec_vals = pycroscopy.io.write_utils.build_ind_val_dsets([1], labels=['Label'])
ds_cluster_inds, ds_cluster_vals = pycroscopy.io.write_utils.build_ind_val_dsets([centroids.shape[0]], is_spectral=False,
labels=['Cluster'])
labels_mat = np.uint32(labels.reshape([-1, 1]))
# Rename the datasets
......
......@@ -19,8 +19,8 @@ import matplotlib.patches as patches
from ...core.io.io_utils import recommend_cpu_cores
from ...core.io.dtype_utils import stack_real_to_compound
from ...core.io.virtual_data import VirtualDataset, VirtualGroup
from ...core.io.hdf_writer import HDFwriter
from ...io.virtual_data import VirtualDataset, VirtualGroup
from ...io.hdf_writer import HDFwriter
# atom_dtype = np.dtype([('x', np.float32),
# ('y', np.float32),
......
......@@ -12,8 +12,8 @@ import time as tm
import matplotlib.pyplot as plt
from ...core.io.io_utils import recommend_cpu_cores
from ...core.io.virtual_data import VirtualDataset, VirtualGroup
from ...core.io.hdf_writer import HDFwriter
from ...io.virtual_data import VirtualDataset, VirtualGroup
from ...io.hdf_writer import HDFwriter
from ...core.viz.plot_utils import cmap_jet_white_center
......
from . import hdf_writer
from . import virtual_data
from . import pycro_data
from . import translator
from . import numpy_translator
......@@ -9,11 +7,10 @@ from . import io_utils
from . import dtype_utils
from . import write_utils
from .hdf_writer import HDFwriter
from .virtual_data import *
from pycroscopy.io.hdf_writer import HDFwriter
from pycroscopy.io.virtual_data import *
from .pycro_data import PycroDataset
from .translator import *
from .numpy_translator import NumpyTranslator
__all__ = ['HDFwriter', 'VirtualDataset', 'VirtualGroup', 'PycroDataset', 'hdf_utils', 'io_utils', 'dtype_utils',
'NumpyTranslator', 'write_utils']
__all__ = ['PycroDataset', 'hdf_utils', 'io_utils', 'dtype_utils', 'NumpyTranslator', 'write_utils']
......@@ -20,8 +20,6 @@ from .io_utils import get_time_stamp
from .dtype_utils import contains_integers
from ...__version__ import version as pycroscopy_version
from .virtual_data import VirtualDataset
__all__ = ['get_attr', 'get_h5_obj_refs', 'get_indices_for_region_ref', 'get_dimensionality', 'get_sort_order',
'get_auxillary_datasets', 'get_attributes', 'get_group_refs', 'check_if_main', 'check_and_link_ancillary',
'copy_region_refs', 'get_all_main', 'get_unit_values', 'get_data_descriptor',
......@@ -30,7 +28,7 @@ __all__ = ['get_attr', 'get_h5_obj_refs', 'get_indices_for_region_ref', 'get_dim
'find_results_groups', 'get_formatted_labels', 'reshape_from_n_dims', 'find_dataset', 'print_tree',
'copy_main_attributes', 'create_empty_dataset', 'calc_chunks', 'check_for_old', 'get_source_dataset',
'link_as_main', 'copy_reg_ref_reduced_dim', 'simple_region_ref_copy', 'write_basic_attrs_to_group',
'is_editable_h5', 'write_ind_val_dsets', 'build_reduced_spec_dsets', 'write_reduced_spec_dsets',
'is_editable_h5', 'write_ind_val_dsets', 'write_reduced_spec_dsets',
'write_simple_attrs', 'write_main_dataset', 'attempt_reg_ref_build', 'write_region_references',
'assign_group_index', 'clean_reg_ref', 'create_results_group', 'create_indexed_group'
]
......@@ -2268,94 +2266,6 @@ def write_reduced_spec_dsets(h5_parent_group, h5_spec_inds, h5_spec_vals, keep_d
return h5_inds, h5_vals
def build_reduced_spec_dsets(h5_parent_group, h5_spec_inds, h5_spec_vals, keep_dim, step_starts,
basename='Spectroscopic'):
"""
Creates new Spectroscopic Indices and Values datasets from the input datasets
and keeps the dimensions specified in keep_dim
Parameters
----------
h5_parent_group : h5py.Group or h5py.File
Group under which the indices and values datasets will be created
h5_spec_inds : HDF5 Dataset
Spectroscopic indices dataset
h5_spec_vals : HDF5 Dataset
Spectroscopic values dataset
keep_dim : Numpy Array, Boolean
Array designating which rows of the input spectroscopic datasets to keep
step_starts : Numpy Array, Unsigned Integers
Array specifying the start of each step in the reduced datasets
basename : str / unicode
String to which '_Indices' and '_Values' will be appended to get the names
of the new datasets
Returns
-------
ds_inds : VirtualDataset
Reduced Spectroscopic indices dataset
ds_vals : VirtualDataset
Reduces Spectroscopic values dataset
"""
warn('build_reduced_spec_dsets is available only for legacy purposes and will be REMOVED in a future release.\n'
'Please consider using write_reduced_spec_dsets instead', DeprecationWarning)
if not isinstance(h5_parent_group, (h5py.Group, h5py.File)):
raise TypeError('h5_parent_group should be a h5py.File or h5py.Group object')
if basename is not None:
if not isinstance(basename, (str, unicode)):
raise TypeError('basename should be a string')
for sub_name in ['_Indices', '_Values']:
if basename + sub_name in h5_parent_group.keys():
raise KeyError('Dataset: {} already exists in provided group: {}'.format(basename + sub_name,
h5_parent_group.name))
for param, param_name in zip([h5_spec_inds, h5_spec_vals], ['h5_spec_inds', 'h5_spec_vals']):
if not isinstance(param, h5py.Dataset):
raise TypeError(param_name + ' should be a h5py.Dataset object')
if not isinstance(keep_dim, (bool, np.ndarray, list, tuple)):
raise TypeError('keep_dim should be a bool, np.ndarray, list, or tuple')
if not isinstance(step_starts, (list, np.ndarray, list, tuple)):
raise TypeError('step_starts should be a list, np.ndarray, list, or tuple')
if h5_spec_inds.shape[0] > 1:
'''
Extract all rows that we want to keep from input indices and values
'''
# TODO: handle TypeError: Indexing elements must be in increasing order
ind_mat = h5_spec_inds[keep_dim, :][:, step_starts]
val_mat = h5_spec_vals[keep_dim, :][:, step_starts]
'''
Create new Datasets to hold the data
Name them based on basename
'''
ds_inds = VirtualDataset(basename + '_Indices', ind_mat, dtype=h5_spec_inds.dtype)
ds_vals = VirtualDataset(basename + '_Values', val_mat, dtype=h5_spec_vals.dtype)
# Extracting the labels from the original spectroscopic data sets
labels = h5_spec_inds.attrs['labels'][keep_dim]
# Creating the dimension slices for the new spectroscopic data sets
reg_ref_slices = dict()
for row_ind, row_name in enumerate(labels):
reg_ref_slices[row_name] = (slice(row_ind, row_ind + 1), slice(None))
# Adding the labels and units to the new spectroscopic data sets
for dset in [ds_inds, ds_vals]:
dset.attrs['labels'] = reg_ref_slices
dset.attrs['units'] = h5_spec_inds.attrs['units'][keep_dim]
else: # Single spectroscopic dimension:
ds_inds = VirtualDataset(basename + '_Indices', np.array([[0]], dtype=INDICES_DTYPE))
ds_vals = VirtualDataset(basename + '_Values', np.array([[0]], dtype=VALUES_DTYPE))
for dset in [ds_inds, ds_vals]:
dset.attrs['labels'] = {'Single_Step': (slice(0, None), slice(None))}
dset.attrs['units'] = ''
return ds_inds, ds_vals
def assign_group_index(h5_parent_group, base_name, verbose=False):
"""
Searches the parent h5 group to find the next available index for the group
......
......@@ -19,7 +19,7 @@ class Translator(object):
"""
__metaclass__ = abc.ABCMeta
def __init__(self, max_mem_mb=1024):
def __init__(self, max_mem_mb=1024, *args, **kwargs):
"""
Parameters
-----------
......@@ -33,12 +33,12 @@ class Translator(object):
self.max_ram = min(max_mem_mb * 1024 ** 2, 0.75 * get_available_memory())
@abc.abstractmethod
def translate(self, filepath):
def translate(self, filepath, *args, **kwargs):
"""
Abstract method.
To be implemented by extensions of this class. God I miss Java!
"""
raise NotImplementedError('Ths translate method needs to be implemented by the child class')
raise NotImplementedError('The translate method needs to be implemented by the child class')
def generate_dummy_main_parms():
......
......@@ -11,12 +11,8 @@ from collections import Iterable
from .dtype_utils import contains_integers
from warnings import warn
from .virtual_data import VirtualDataset
__all__ = ['clean_string_att', 'get_aux_dset_slicing', 'make_indices_matrix',
'INDICES_DTYPE', 'VALUES_DTYPE', 'Dimension', 'build_ind_val_dsets']
__all__ = ['clean_string_att', 'get_aux_dset_slicing', 'make_indices_matrix', 'INDICES_DTYPE', 'VALUES_DTYPE',
'Dimension']
if sys.version_info.major == 3:
unicode = str
......@@ -215,84 +211,6 @@ def build_ind_val_matricies(unit_values, is_spectral=True):
return INDICES_DTYPE(ind_mat), VALUES_DTYPE(val_mat)
def build_ind_val_dsets(dimensions, is_spectral=True, verbose=False, base_name=None):
"""
Creates VirtualDatasets for the position OR spectroscopic indices and values of the data.
Remember that the contents of the dataset can be changed if need be after the creation of the datasets.
For example if one of the spectroscopic dimensions (e.g. - Bias) was sinusoidal and not linear, The specific
dimension in the Spectroscopic_Values dataset can be manually overwritten.
Parameters
----------
dimensions : Dimension or array-like of Dimension objects
Sequence of Dimension objects that provides all necessary instructions for constructing the indices and values
datasets
is_spectral : bool, optional. default = True
Spectroscopic (True) or Position (False)
verbose : Boolean, optional
Whether or not to print statements for debugging purposes
base_name : str / unicode, optional
Prefix for the datasets. Default: 'Position_' when is_spectral is False, 'Spectroscopic_' otherwise
Returns
-------
ds_inds : VirtualDataset
Reduced Spectroscopic indices dataset
ds_vals : VirtualDataset
Reduces Spectroscopic values dataset
Notes
-----
`steps`, `initial_values`, `labels`, and 'units' must be the same length as
`dimensions` when they are specified.
Dimensions should be in the order from fastest varying to slowest.
"""
warn('build_ind_val_dsets is available only for legacy purposes and will be REMOVED in a future release.\n'
'Please consider using write_ind_val_dsets in hdf_utils instead', DeprecationWarning)
if isinstance(dimensions, Dimension):
dimensions = [dimensions]
if not isinstance(dimensions, (list, np.ndarray, tuple)):
raise TypeError('dimensions should be array-like ')
if not np.all([isinstance(x, Dimension) for x in dimensions]):
raise TypeError('dimensions should be a sequence of Dimension objects')
if base_name is not None:
if not isinstance(base_name, (str, unicode)):
raise TypeError('base_name should be a string')
if not base_name.endswith('_'):
base_name += '_'
else:
base_name = 'Position_'
if is_spectral:
base_name = 'Spectroscopic_'
unit_values = [x.values for x in dimensions]
indices, values = build_ind_val_matricies(unit_values, is_spectral=is_spectral)
if verbose:
print('Indices:')
print(indices)
print('Values:')
print(values)
# Create the slices that will define the labels
region_slices = get_aux_dset_slicing([x.name for x in dimensions], is_spectroscopic=is_spectral)
# Create the VirtualDataset for both Indices and Values
ds_indices = VirtualDataset(base_name + 'Indices', indices, dtype=INDICES_DTYPE)
ds_values = VirtualDataset(base_name + 'Values', VALUES_DTYPE(values), dtype=VALUES_DTYPE)
for dset in [ds_indices, ds_values]:
dset.attrs['labels'] = region_slices
dset.attrs['units'] = [x.units for x in dimensions]
return ds_indices, ds_values
def create_spec_inds_from_vals(ds_spec_val_mat):
"""
Create new Spectroscopic Indices table from the changes in the
......
......@@ -17,5 +17,7 @@ Submodules
"""
from . import translators
from .translators import *
from .hdf_writer import HDFwriter
from .virtual_data import VirtualDataset, VirtualGroup, VirtualData
__all__ = translators.__all__
......@@ -13,7 +13,7 @@ from time import time, sleep
from warnings import warn
import h5py
from .hdf_utils import assign_group_index, write_simple_attrs, attempt_reg_ref_build, write_region_references
from ..core.io.hdf_utils import assign_group_index, write_simple_attrs, attempt_reg_ref_build, write_region_references
from .virtual_data import VirtualGroup, VirtualDataset, VirtualData
from ..__version__ import version
......
......@@ -18,8 +18,8 @@ from .df_utils.be_utils import trimUDVS, getSpectroscopicParmLabel, generatePlot
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import make_indices_matrix, get_aux_dset_slicing, INDICES_DTYPE, VALUES_DTYPE
from ...core.io.hdf_utils import get_h5_obj_refs
from ...core.io.hdf_writer import HDFwriter
from ...core.io.virtual_data import VirtualGroup, VirtualDataset
from ..hdf_writer import HDFwriter
from ..virtual_data import VirtualGroup, VirtualDataset
class BEodfRelaxationTranslator(Translator):
......
......@@ -7,20 +7,21 @@ import numpy as np
from sklearn.utils import gen_batches
from skimage.measure import block_reduce
# Pycroscopy imports
from ...core.io.hdf_writer import HDFwriter
from ...core.io.hdf_utils import calc_chunks, get_h5_obj_refs, link_as_main, get_attr
from ...core.io.dtype_utils import stack_real_to_compound
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.hdf_utils import build_reduced_spec_dsets
from ...core.io.write_utils import build_ind_val_dsets, Dimension
from ...core.io.virtual_data import VirtualGroup, VirtualDataset
from ...core.io.pycro_data import PycroDataset
from ...core.io.write_utils import Dimension
from ...analysis.utils.be_loop import loop_fit_function
from ...analysis.utils.be_sho import SHOfunc
from ...analysis.be_sho_fitter import sho32
from ...analysis.be_loop_fitter import loop_fit32
from .df_utils.beps_gen_utils import get_noise_vec, beps_image_folder
from .df_utils.io_image import read_image, no_bin
# Deprecated imports:
from ..hdf_writer import HDFwriter
from ..write_utils import build_reduced_spec_dsets, build_ind_val_dsets
from ..virtual_data import VirtualGroup, VirtualDataset
class FakeBEPSGenerator(Translator):
......
......@@ -20,8 +20,8 @@ from .df_utils.be_utils import trimUDVS, getSpectroscopicParmLabel, parmsToDict,
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import make_indices_matrix, VALUES_DTYPE, INDICES_DTYPE
from ...core.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs, calc_chunks
from ...core.io.hdf_writer import HDFwriter
from ...core.io.virtual_data import VirtualGroup, VirtualDataset
from ..hdf_writer import HDFwriter
from ..virtual_data import VirtualGroup, VirtualDataset
class BEPSndfTranslator(Translator):
......
......@@ -17,13 +17,13 @@ import xlrd as xlreader
from ....core.io.hdf_utils import get_auxillary_datasets, find_dataset, get_h5_obj_refs, link_h5_objects_as_attrs, \
get_attr
from pycroscopy.core.io.write_utils import create_spec_inds_from_vals
from ....core.io.hdf_writer import HDFwriter
from ....core.io.write_utils import create_spec_inds_from_vals
from ....core.io.io_utils import get_available_memory, recommend_cpu_cores
from ....core.io.virtual_data import VirtualDataset, VirtualGroup
from ....analysis.optimize import Optimize
from ....processing.histogram import build_histogram
from ....viz.be_viz_utils import plot_1d_spectrum, plot_2d_spectrogram, plot_histograms
from ...hdf_writer import HDFwriter
from ...virtual_data import VirtualDataset, VirtualGroup
nf32 = np.dtype({'names': ['super_band', 'inter_bin_band', 'sub_band'],
'formats': [np.float32, np.float32, np.float32]})
......
......@@ -13,8 +13,9 @@ import numpy as np # For array operations
from scipy.io import loadmat
from ...core.io.translator import Translator
from ...core.io.write_utils import build_ind_val_dsets, Dimension
from ...core.io.virtual_data import VirtualDataset # building blocks for defining hierarchical storage in the H5 file
from ...core.io.write_utils import Dimension
from ..write_utils import build_ind_val_dsets
from ..virtual_data import VirtualDataset # building blocks for defining hierarchical storage in the H5 file
class ForcIVTranslator(Translator):
......
......@@ -17,9 +17,9 @@ from ...core.io.translator import Translator, \
generate_dummy_main_parms # Because this class extends the abstract Translator class
from ...core.io.write_utils import make_indices_matrix, get_aux_dset_slicing, INDICES_DTYPE, VALUES_DTYPE
from ...core.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs
from ...core.io.hdf_writer import HDFwriter # Now the translator is responsible for writing the data.
from ..hdf_writer import HDFwriter # Now the translator is responsible for writing the data.
# The building blocks for defining heirarchical storage in the H5 file
from ...core.io.virtual_data import VirtualGroup, VirtualDataset
from ..virtual_data import VirtualGroup, VirtualDataset
class GDMTranslator(Translator):
......
......@@ -15,10 +15,11 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
from .df_utils.be_utils import parmsToDict
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import VALUES_DTYPE
from ...core.io.write_utils import build_ind_val_dsets, Dimension
from ...core.io.write_utils import Dimension
from ...core.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs
from ...core.io.hdf_writer import HDFwriter
from ...core.io.virtual_data import VirtualGroup, VirtualDataset
from ..write_utils import build_ind_val_dsets
from ..hdf_writer import HDFwriter
from ..virtual_data import VirtualGroup, VirtualDataset
class GLineTranslator(Translator):
......
......@@ -18,10 +18,11 @@ from .df_utils.be_utils import parmsToDict
from .gmode_line import GLineTranslator
from ...core.io.translator import generate_dummy_main_parms
from ...core.io.write_utils import VALUES_DTYPE
from ...core.io.write_utils import build_ind_val_dsets, Dimension
from ...core.io.write_utils import Dimension
from ...core.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs
from ...core.io.hdf_writer import HDFwriter
from ...core.io.virtual_data import VirtualGroup, VirtualDataset
from ..write_utils import build_ind_val_dsets
from ..hdf_writer import HDFwriter
from ..virtual_data import VirtualGroup, VirtualDataset
class GTuneTranslator(GLineTranslator):
......
......@@ -14,10 +14,11 @@ from igor import binarywave as bw
from ...core.io.translator import Translator, \
generate_dummy_main_parms # Because this class extends the abstract Translator class
from ...core.io.write_utils import VALUES_DTYPE
from ...core.io.write_utils import build_ind_val_dsets, Dimension
from ...core.io.write_utils import Dimension
from ...core.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs
from ...core.io.hdf_writer import HDFwriter # Now the translator is responsible for writing the data.
from ...core.io.virtual_data import VirtualGroup, \
from ..write_utils import build_ind_val_dsets
from ..hdf_writer import HDFwriter # Now the translator is responsible for writing the data.
from ..virtual_data import VirtualGroup, \
VirtualDataset # The building blocks for defining hierarchical storage in the H5 file
......
......@@ -13,10 +13,11 @@ from skimage.measure import block_reduce
from .df_utils.io_image import read_image
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import build_ind_val_dsets, Dimension
from ...core.io.write_utils import Dimension
from ...core.io.hdf_utils import get_h5_obj_refs, calc_chunks, link_as_main, find_dataset
from ...core.io.hdf_writer import HDFwriter
from ...core.io.virtual_data import VirtualGroup, VirtualDataset
from ..write_utils import build_ind_val_dsets
from ..hdf_writer import HDFwriter
from ..virtual_data import VirtualGroup, VirtualDataset
class ImageTranslator(Translator):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment