Commit 7f499d48 authored by Somnath, Suhas's avatar Somnath, Suhas
Browse files

importing pyUSID instead of core

parent 50a5b08b
......@@ -21,11 +21,11 @@ from ..processing.tree import ClusterTree
from .be_sho_fitter import sho32
from .fit_methods import BE_Fit_Methods
from .optimize import Optimize
from ..core.io.dtype_utils import flatten_compound_to_real, stack_real_to_compound
from ..core.io.hdf_utils import copy_region_refs, \
from pyUSID.io.dtype_utils import flatten_compound_to_real, stack_real_to_compound
from pyUSID.io.hdf_utils import copy_region_refs, \
get_sort_order, get_dimensionality, reshape_to_n_dims, reshape_from_n_dims, get_attr, \
create_empty_dataset, create_results_group, write_reduced_spec_dsets, write_simple_attrs, write_main_dataset
from ..core.io.pycro_data import PycroDataset
from pyUSID import USIDataset
'''
Custom dtypes for the datasets created during fitting.
......@@ -279,7 +279,7 @@ class BELoopFitter(Fitter):
if get_loop_parameters:
self.h5_guess_parameters = self.extract_loop_parameters(self.h5_guess)
return PycroDataset(self.h5_guess)
return USIDataset(self.h5_guess)
def do_fit(self, processors=None, max_mem=None, solver_type='least_squares', solver_options=None,
obj_func=None,
......@@ -411,7 +411,7 @@ class BELoopFitter(Fitter):
if get_loop_parameters:
self.h5_fit_parameters = self.extract_loop_parameters(self.h5_fit)
return PycroDataset(self.h5_fit)
return USIDataset(self.h5_fit)
@staticmethod
def extract_loop_parameters(h5_loop_fit, nuc_threshold=0.03):
......
......@@ -8,8 +8,8 @@ from warnings import warn
import numpy as np
from .fitter import Fitter
from ..core.io.pycro_data import PycroDataset
from ..core.io.hdf_utils import copy_region_refs, write_simple_attrs, create_results_group, write_reduced_spec_dsets, \
from pyUSID import USIDataset
from pyUSID.io.hdf_utils import copy_region_refs, write_simple_attrs, create_results_group, write_reduced_spec_dsets, \
create_empty_dataset, get_auxiliary_datasets, write_main_dataset
'''
......@@ -111,7 +111,7 @@ class BESHOfitter(Fitter):
# Create the fit dataset as an empty dataset of the same size and dtype as the guess.
# Also automatically links in the ancillary datasets.
self.h5_fit = PycroDataset(create_empty_dataset(self.h5_guess, dtype=sho32, dset_name='Fit'))
self.h5_fit = USIDataset(create_empty_dataset(self.h5_guess, dtype=sho32, dset_name='Fit'))
# This is necessary comparing against new runs to avoid re-computation + resuming partial computation
write_simple_attrs(self.h5_fit, self._parms_dict)
......
......@@ -12,9 +12,9 @@ import h5py
import time as tm
from .guess_methods import GuessMethods
from .fit_methods import Fit_Methods
from ..core.io.pycro_data import PycroDataset
from ..core.io.io_utils import get_available_memory, recommend_cpu_cores, format_time
from ..core.io.hdf_utils import check_for_old, find_results_groups, check_for_matching_attrs, get_attr
from pyUSID import USIDataset
from pyUSID.io.io_utils import get_available_memory, recommend_cpu_cores, format_time
from pyUSID.io.hdf_utils import check_for_old, find_results_groups, check_for_matching_attrs, get_attr
from .optimize import Optimize
......@@ -43,8 +43,8 @@ class Fitter(object):
"""
if not isinstance(h5_main, PycroDataset):
h5_main = PycroDataset(h5_main)
if not isinstance(h5_main, USIDataset):
h5_main = USIDataset(h5_main)
# Checking if dataset has the proper dimensions for the model to run.
if self._is_legal(h5_main, variables):
......@@ -107,7 +107,7 @@ class Fitter(object):
Parameters
----
h5_main : PycroDataset instance
h5_main : USIDataset instance
The dataset over which the analysis will be performed. This dataset should be linked to the spectroscopic
indices and values, and position indices and values datasets.
......@@ -317,7 +317,7 @@ class Fitter(object):
# First try to simply return any completed computation
if len(completed_dsets) > 0:
print('Returned previously computed results at ' + completed_dsets[-1].name)
self.h5_guess = PycroDataset(completed_dsets[-1])
self.h5_guess = USIDataset(completed_dsets[-1])
return
# Next attempt to resume automatically if nothing is provided
......@@ -394,7 +394,7 @@ class Fitter(object):
print('Completed computing guess')
print()
return PycroDataset(self.h5_guess)
return USIDataset(self.h5_guess)
def _reformat_results(self, results, strategy='wavelet_peaks'):
"""
......@@ -564,7 +564,7 @@ class Fitter(object):
# First try to simply return completed results
if len(completed_fits) > 0:
print('Returned previously computed results at ' + completed_fits[-1].name)
self.h5_fit = PycroDataset(completed_fits[-1])
self.h5_fit = USIDataset(completed_fits[-1])
return
# Next, attempt to resume automatically:
......@@ -652,4 +652,4 @@ class Fitter(object):
print('Completed computing fit. Writing to file.')
return PycroDataset(self.h5_fit)
return USIDataset(self.h5_fit)
......@@ -9,12 +9,12 @@ Created on Thu Nov 02 11:48:53 2017
from __future__ import division, print_function, absolute_import, unicode_literals
import numpy as np
from ..core.processing.process import Process, parallel_compute
from ..core.io.dtype_utils import stack_real_to_compound
from ..core.io.hdf_utils import write_main_dataset, create_results_group, create_empty_dataset, write_simple_attrs, \
from pyUSID.processing.process import Process, parallel_compute
from pyUSID.io.dtype_utils import stack_real_to_compound
from pyUSID.io.hdf_utils import write_main_dataset, create_results_group, create_empty_dataset, write_simple_attrs, \
print_tree, get_attributes
from ..core.io.write_utils import Dimension
from ..core.io.pycro_data import PycroDataset
from pyUSID.io.write_utils import Dimension
from pyUSID import USIDataset
from .utils.giv_utils import do_bayesian_inference, bayesian_inference_on_period
cap_dtype = np.dtype({'names': ['Forward', 'Reverse'],
......@@ -53,7 +53,7 @@ class GIVBayesian(Process):
if self.verbose:
print('ensuring that half steps should be odd, num_x_steps is now', self.num_x_steps)
self.h5_main = PycroDataset(self.h5_main)
self.h5_main = USIDataset(self.h5_main)
# take these from kwargs
bayesian_parms = {'gam': 0.03, 'e': 10.0, 'sigma': 10.0, 'sigmaC': 1.0, 'num_samples': 2E3}
......@@ -164,7 +164,7 @@ class GIVBayesian(Process):
print('Created Resistance')
print_tree(h5_group)
assert isinstance(self.h5_resistance, PycroDataset) # only here for PyCharm
assert isinstance(self.h5_resistance, USIDataset) # only here for PyCharm
self.h5_new_spec_vals = self.h5_resistance.h5_spec_vals
# The variance is identical to the resistance dataset
......
......@@ -17,8 +17,8 @@ from sklearn.neighbors import KNeighborsClassifier
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from ...core.io.io_utils import recommend_cpu_cores
from ...core.io.dtype_utils import stack_real_to_compound
from pyUSID.io.io_utils import recommend_cpu_cores
from pyUSID.io.dtype_utils import stack_real_to_compound
from ...io.virtual_data import VirtualDataset, VirtualGroup
from ...io.hdf_writer import HDFwriter
......
......@@ -11,10 +11,10 @@ import multiprocessing as mp
import time as tm
import matplotlib.pyplot as plt
from ...core.io.io_utils import recommend_cpu_cores
from pyUSID.io.io_utils import recommend_cpu_cores
from ...io.virtual_data import VirtualDataset, VirtualGroup
from ...io.hdf_writer import HDFwriter
from ...core.viz.plot_utils import cmap_jet_white_center
from pyUSID.viz.plot_utils import cmap_jet_white_center
def do_fit(single_parm):
......
......@@ -10,8 +10,8 @@ import numpy as np
import matplotlib.pyplot as plt
from scipy.linalg import sqrtm
from ...core.io.hdf_utils import get_auxiliary_datasets
from ...core.viz.plot_utils import set_tick_font_size
from pyUSID.io.hdf_utils import get_auxiliary_datasets
from pyUSID.viz.plot_utils import set_tick_font_size
def do_bayesian_inference(i_meas, bias, freq, num_x_steps=251, r_extra=110, gam=0.03, e=10.0, sigma=10., sigmaC=1.,
......
......@@ -13,7 +13,7 @@ from time import time, sleep
from warnings import warn
import h5py
from ..core.io.hdf_utils import assign_group_index, write_simple_attrs, attempt_reg_ref_build, write_region_references
from pyUSID.io.hdf_utils import assign_group_index, write_simple_attrs, attempt_reg_ref_build, write_region_references
from .virtual_data import VirtualGroup, VirtualDataset, VirtualData
from ..__version__ import version
......
......@@ -15,9 +15,9 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
from .df_utils.be_utils import trimUDVS, getSpectroscopicParmLabel, parmsToDict, generatePlotGroups, \
createSpecVals, requires_conjugate, nf32
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import INDICES_DTYPE, VALUES_DTYPE, Dimension, calc_chunks
from ...core.io.hdf_utils import write_ind_val_dsets, write_main_dataset, write_region_references, \
from pyUSID.io.translator import Translator, generate_dummy_main_parms
from pyUSID.io.write_utils import INDICES_DTYPE, VALUES_DTYPE, Dimension, calc_chunks
from pyUSID.io.hdf_utils import write_ind_val_dsets, write_main_dataset, write_region_references, \
create_indexed_group, write_simple_attrs, write_book_keeping_attrs
......
......@@ -2,7 +2,7 @@
"""
Created on Thursday May 26 11:23:00 2016
@author: Rama Vasudevan, Suhas Somnath
@author: Rama Vasudevan, Suhas Somnath, Chris Smith
"""
from __future__ import division, print_function, absolute_import, unicode_literals
......@@ -16,9 +16,9 @@ import h5py
from .df_utils.be_utils import trimUDVS, getSpectroscopicParmLabel, generatePlotGroups, createSpecVals, maxReadPixels, \
nf32
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import INDICES_DTYPE, Dimension
from ...core.io.hdf_utils import create_indexed_group, write_main_dataset, write_simple_attrs
from pyUSID.io.translator import Translator, generate_dummy_main_parms
from pyUSID.io.write_utils import INDICES_DTYPE, Dimension
from pyUSID.io.hdf_utils import create_indexed_group, write_main_dataset, write_simple_attrs
class BEodfRelaxationTranslator(Translator):
......
......@@ -7,12 +7,12 @@ import numpy as np
from sklearn.utils import gen_batches
from skimage.measure import block_reduce
# Pycroscopy imports
from ...core.io.hdf_utils import get_h5_obj_refs, link_as_main, get_attr
from ...core.io.dtype_utils import stack_real_to_compound
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.pycro_data import PycroDataset
from ...core.io.write_utils import Dimension, calc_chunks
from ...core.io.image import read_image
from pyUSID.io.hdf_utils import get_h5_obj_refs, link_as_main, get_attr
from pyUSID.io.dtype_utils import stack_real_to_compound
from pyUSID.io.translator import Translator, generate_dummy_main_parms
from pyUSID import USIDataset
from pyUSID.io.write_utils import Dimension, calc_chunks
from pyUSID.io.image import read_image
from ...analysis.utils.be_loop import loop_fit_function
from ...analysis.utils.be_sho import SHOfunc
from ...analysis.be_sho_fitter import sho32
......@@ -558,11 +558,11 @@ class FakeBEPSGenerator(Translator):
link_as_main(h5_loop_fit, h5_pos_inds, h5_pos_vals, h5_loop_spec_inds, h5_loop_spec_vals)
link_as_main(h5_loop_guess, h5_pos_inds, h5_pos_vals, h5_loop_spec_inds, h5_loop_spec_vals)
self.h5_raw = PycroDataset(h5_raw)
self.h5_sho_guess = PycroDataset(h5_sho_guess)
self.h5_sho_fit = PycroDataset(h5_sho_fit)
self.h5_loop_guess = PycroDataset(h5_loop_guess)
self.h5_loop_fit = PycroDataset(h5_loop_fit)
self.h5_raw = USIDataset(h5_raw)
self.h5_sho_guess = USIDataset(h5_sho_guess)
self.h5_sho_fit = USIDataset(h5_sho_fit)
self.h5_loop_guess = USIDataset(h5_loop_guess)
self.h5_loop_fit = USIDataset(h5_loop_fit)
self.h5_spec_vals = h5_spec_vals
self.h5_spec_inds = h5_spec_inds
self.h5_sho_spec_inds = h5_sho_spec_inds
......
......@@ -17,10 +17,10 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
from .df_utils.be_utils import trimUDVS, getSpectroscopicParmLabel, parmsToDict, generatePlotGroups, \
normalizeBEresponse, createSpecVals, nf32
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import make_indices_matrix, VALUES_DTYPE, INDICES_DTYPE, calc_chunks
from ...core.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs
from ...core.io.pycro_data import PycroDataset
from pyUSID.io.translator import Translator, generate_dummy_main_parms
from pyUSID.io.write_utils import make_indices_matrix, VALUES_DTYPE, INDICES_DTYPE, calc_chunks
from pyUSID.io.hdf_utils import get_h5_obj_refs, link_h5_objects_as_attrs
from pyUSID.io.usi_data import USIDataset
from ..hdf_writer import HDFwriter
from ..virtual_data import VirtualGroup, VirtualDataset
......@@ -333,7 +333,7 @@ class BEPSndfTranslator(Translator):
link_h5_objects_as_attrs(self.ds_main, get_h5_obj_refs(aux_ds_names, h5_refs))
# While we have all the references and mean data, write the plot groups as well:
generatePlotGroups(PycroDataset(self.ds_main), self.mean_resp,
generatePlotGroups(USIDataset(self.ds_main), self.mean_resp,
self.folder_path, self.basename,
self.max_resp, self.min_resp,
max_mem_mb=self.max_ram,
......
......@@ -11,9 +11,9 @@ from collections import OrderedDict
import numpy as np
import h5py
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import Dimension
from ...core.io.hdf_utils import create_indexed_group, write_main_dataset, write_simple_attrs, write_ind_val_dsets
from pyUSID.io.translator import Translator, generate_dummy_main_parms
from pyUSID.io.write_utils import Dimension
from pyUSID.io.hdf_utils import create_indexed_group, write_main_dataset, write_simple_attrs, write_ind_val_dsets
from .df_utils.base_utils import read_binary_data
# TODO: Adopt missing aspects / features from https://github.com/paruch-group/distortcorrect/blob/master/afm/filereader/readNanoscope.py
......
......@@ -15,10 +15,10 @@ import matplotlib.pyplot as plt
import numpy as np
import xlrd as xlreader
from ....core.io.hdf_utils import get_auxiliary_datasets, find_dataset, get_h5_obj_refs, link_h5_objects_as_attrs, \
from pyUSID.io.hdf_utils import get_auxiliary_datasets, find_dataset, get_h5_obj_refs, link_h5_objects_as_attrs, \
get_attr, create_indexed_group, write_simple_attrs, write_main_dataset, Dimension
from ....core.io.write_utils import create_spec_inds_from_vals
from ....core.io.io_utils import get_available_memory, recommend_cpu_cores
from pyUSID.io.write_utils import create_spec_inds_from_vals
from pyUSID.io.io_utils import get_available_memory, recommend_cpu_cores
from ....analysis.optimize import Optimize
from ....processing.histogram import build_histogram
from ....viz.be_viz_utils import plot_1d_spectrum, plot_2d_spectrogram, plot_histograms
......
......@@ -12,9 +12,9 @@ import h5py
import numpy as np # For array operations
from scipy.io import loadmat
from ...core.io.translator import Translator
from ...core.io.write_utils import Dimension
from ...core.io.hdf_utils import write_main_dataset, create_indexed_group, write_simple_attrs
from pyUSID.io.translator import Translator
from pyUSID.io.write_utils import Dimension
from pyUSID.io.hdf_utils import write_main_dataset, create_indexed_group, write_simple_attrs
class ForcIVTranslator(Translator):
......
......@@ -14,10 +14,10 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
import h5py
from .df_utils.gmode_utils import readGmodeParms
from ...core.io.translator import Translator, \
from pyUSID.io.translator import Translator, \
generate_dummy_main_parms # Because this class extends the abstract Translator class
from ...core.io.write_utils import VALUES_DTYPE, Dimension
from ...core.io.hdf_utils import link_h5_objects_as_attrs, create_indexed_group, \
from pyUSID.io.write_utils import VALUES_DTYPE, Dimension
from pyUSID.io.hdf_utils import link_h5_objects_as_attrs, create_indexed_group, \
write_simple_attrs, write_main_dataset
......
......@@ -14,10 +14,10 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
import h5py
from .df_utils.gmode_utils import readGmodeParms
from ...core.io.translator import Translator, \
from pyUSID.io.translator import Translator, \
generate_dummy_main_parms # Because this class extends the abstract Translator class
from ...core.io.write_utils import VALUES_DTYPE, Dimension
from ...core.io.hdf_utils import link_h5_objects_as_attrs, create_indexed_group, \
from pyUSID.io.write_utils import VALUES_DTYPE, Dimension
from pyUSID.io.hdf_utils import link_h5_objects_as_attrs, create_indexed_group, \
write_simple_attrs, write_main_dataset
......
......@@ -13,9 +13,9 @@ from warnings import warn
import h5py
import numpy as np # For array operations
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import Dimension
from ...core.io.hdf_utils import write_main_dataset, create_indexed_group, write_simple_attrs
from pyUSID.io.translator import Translator, generate_dummy_main_parms
from pyUSID.io.write_utils import Dimension
from pyUSID.io.hdf_utils import write_main_dataset, create_indexed_group, write_simple_attrs
class GIVTranslator(Translator):
......
......@@ -14,9 +14,9 @@ import numpy as np
from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat file
from .df_utils.be_utils import parmsToDict
from ...core.io.translator import Translator, generate_dummy_main_parms
from ...core.io.write_utils import VALUES_DTYPE, Dimension
from ...core.io.hdf_utils import write_main_dataset, create_indexed_group, write_simple_attrs, write_ind_val_dsets
from pyUSID.io.translator import Translator, generate_dummy_main_parms
from pyUSID.io.write_utils import VALUES_DTYPE, Dimension
from pyUSID.io.hdf_utils import write_main_dataset, create_indexed_group, write_simple_attrs, write_ind_val_dsets
class GLineTranslator(Translator):
......
......@@ -16,9 +16,9 @@ from scipy.io.matlab import loadmat # To load parameters stored in Matlab .mat
from .df_utils.be_utils import parmsToDict
from .gmode_line import GLineTranslator
from ...core.io.translator import generate_dummy_main_parms
from ...core.io.write_utils import VALUES_DTYPE, Dimension
from ...core.io.hdf_utils import write_simple_attrs, create_indexed_group, write_ind_val_dsets, write_main_dataset
from pyUSID.io.translator import generate_dummy_main_parms
from pyUSID.io.write_utils import VALUES_DTYPE, Dimension
from pyUSID.io.hdf_utils import write_simple_attrs, create_indexed_group, write_ind_val_dsets, write_main_dataset
class GTuneTranslator(GLineTranslator):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment