Commit aa8b998d authored by Somnath, Suhas's avatar Somnath, Suhas Committed by Chris Smith
Browse files

renamed write_data() to write()

parent 2fce9f86
......@@ -105,7 +105,7 @@ hdf = px.HDFwriter(h5_path)
# The writeData method builds the hdf5 file using the structure defined by the
# MicroData objects. It returns a list of references to all h5py objects in the
# new file.
h5_refs = hdf.write_data(root_group, print_log=True)
h5_refs = hdf.write(root_group, print_log=True)
# We can use these references to get the h5py dataset and group objects
h5_main = px.io.hdf_utils.getH5DsetRefs(['Main_Data'], h5_refs)[0]
......
......@@ -182,7 +182,7 @@ Now that we have created the objects, we can write them to an hdf5 file
The write_data method builds the hdf5 file using the structure defined by the
The write method builds the hdf5 file using the structure defined by the
MicroData objects. It returns a list of references to all h5py objects in the
new file.
......@@ -190,7 +190,7 @@ new file.
.. code-block:: python
h5_refs = hdf.write_data(root_group, print_log=True)
h5_refs = hdf.write(root_group, print_log=True)
# We can use these references to get the h5py dataset and group objects
h5_main = px.io.hdf_utils.get_h5_obj_refs(['Main_Data'], h5_refs)[0]
......
......@@ -368,7 +368,7 @@ for at_name in cluster_grp.attrs:
#
# Once the tree is prepared (previous cell), ioHDF5 will handle all the file writing.
h5_clust_refs = hdf.write_data(cluster_grp, print_log=True)
h5_clust_refs = hdf.write(cluster_grp, print_log=True)
h5_labels = px.hdf_utils.getH5DsetRefs(['Labels'], h5_clust_refs)[0]
h5_centroids = px.hdf_utils.getH5DsetRefs(['Mean_Response'], h5_clust_refs)[0]
......
......@@ -528,7 +528,7 @@ Once the tree is prepared (previous cell), ioHDF5 will handle all the file writi
.. code-block:: python
h5_clust_refs = hdf.write_data(cluster_grp, print_log=True)
h5_clust_refs = hdf.write(cluster_grp, print_log=True)
h5_labels = px.hdf_utils.get_h5_obj_refs(['Labels'], h5_clust_refs)[0]
h5_centroids = px.hdf_utils.get_h5_obj_refs(['Mean_Response'], h5_clust_refs)[0]
......
......@@ -142,7 +142,7 @@ class ShoGuess(px.Process):
sho_grp.add_children([ds_guess, ds_sho_inds, ds_sho_vals])
sho_grp.attrs['SHO_guess_method'] = "pycroscopy BESHO"
h5_sho_grp_refs = self.hdf.write_data(sho_grp)
h5_sho_grp_refs = self.hdf.write(sho_grp)
self.h5_guess = px.hdf_utils.getH5DsetRefs(['Guess'], h5_sho_grp_refs)[0]
self.h5_results_grp = self.h5_guess.parent
......
......@@ -160,7 +160,7 @@ Note that:
sho_grp.add_children([ds_guess, ds_sho_inds, ds_sho_vals])
sho_grp.attrs['SHO_guess_method'] = "pycroscopy BESHO"
h5_sho_grp_refs = self.hdf.write_data(sho_grp)
h5_sho_grp_refs = self.hdf.write(sho_grp)
self.h5_guess = px.hdf_utils.get_h5_obj_refs(['Guess'], h5_sho_grp_refs)[0]
self.h5_results_grp = self.h5_guess.parent
......
......@@ -105,7 +105,7 @@ hdf = px.HDFwriter(h5_path)
# The writeData method builds the hdf5 file using the structure defined by the
# MicroData objects. It returns a list of references to all h5py objects in the
# new file.
h5_refs = hdf.write_data(root_group, print_log=True)
h5_refs = hdf.write(root_group, print_log=True)
# We can use these references to get the h5py dataset and group objects
h5_main = px.io.hdf_utils.getH5DsetRefs(['Main_Data'], h5_refs)[0]
......
......@@ -182,7 +182,7 @@ Now that we have created the objects, we can write them to an hdf5 file
The write_data method builds the hdf5 file using the structure defined by the
The write method builds the hdf5 file using the structure defined by the
MicroData objects. It returns a list of references to all h5py objects in the
new file.
......@@ -190,7 +190,7 @@ new file.
.. code-block:: python
h5_refs = hdf.write_data(root_group, print_log=True)
h5_refs = hdf.write(root_group, print_log=True)
# We can use these references to get the h5py dataset and group objects
h5_main = px.io.hdf_utils.get_h5_obj_refs(['Main_Data'], h5_refs)[0]
......
......@@ -368,7 +368,7 @@ for at_name in cluster_grp.attrs:
#
# Once the tree is prepared (previous cell), ioHDF5 will handle all the file writing.
h5_clust_refs = hdf.write_data(cluster_grp, print_log=True)
h5_clust_refs = hdf.write(cluster_grp, print_log=True)
h5_labels = px.hdf_utils.getH5DsetRefs(['Labels'], h5_clust_refs)[0]
h5_centroids = px.hdf_utils.getH5DsetRefs(['Mean_Response'], h5_clust_refs)[0]
......
......@@ -528,7 +528,7 @@ Once the tree is prepared (previous cell), ioHDF5 will handle all the file writi
.. code-block:: python
h5_clust_refs = hdf.write_data(cluster_grp, print_log=True)
h5_clust_refs = hdf.write(cluster_grp, print_log=True)
h5_labels = px.hdf_utils.get_h5_obj_refs(['Labels'], h5_clust_refs)[0]
h5_centroids = px.hdf_utils.get_h5_obj_refs(['Mean_Response'], h5_clust_refs)[0]
......
......@@ -142,7 +142,7 @@ class ShoGuess(px.Process):
sho_grp.add_children([ds_guess, ds_sho_inds, ds_sho_vals])
sho_grp.attrs['SHO_guess_method'] = "pycroscopy BESHO"
h5_sho_grp_refs = self.hdf.write_data(sho_grp)
h5_sho_grp_refs = self.hdf.write(sho_grp)
self.h5_guess = px.hdf_utils.getH5DsetRefs(['Guess'], h5_sho_grp_refs)[0]
self.h5_results_grp = self.h5_guess.parent
......
......@@ -160,7 +160,7 @@ Note that:
sho_grp.add_children([ds_guess, ds_sho_inds, ds_sho_vals])
sho_grp.attrs['SHO_guess_method'] = "pycroscopy BESHO"
h5_sho_grp_refs = self.hdf.write_data(sho_grp)
h5_sho_grp_refs = self.hdf.write(sho_grp)
self.h5_guess = px.hdf_utils.get_h5_obj_refs(['Guess'], h5_sho_grp_refs)[0]
self.h5_results_grp = self.h5_guess.parent
......
......@@ -111,7 +111,7 @@ hdf = px.HDFwriter(h5_path)
# The writeData method builds the hdf5 file using the structure defined by the
# MicroData objects. It returns a list of references to all h5py objects in the
# new file.
h5_refs = hdf.write_data(root_group, print_log=True)
h5_refs = hdf.write(root_group, print_log=True)
# We can use these references to get the h5py dataset and group objects
h5_main = px.io.hdf_utils.getH5DsetRefs(['Main_Data'], h5_refs)[0]
......
......@@ -367,7 +367,7 @@ for at_name in cluster_grp.attrs:
#
# Once the tree is prepared (previous cell), ioHDF5 will handle all the file writing.
h5_clust_refs = hdf.write_data(cluster_grp, print_log=True)
h5_clust_refs = hdf.write(cluster_grp, print_log=True)
h5_labels = px.hdf_utils.getH5DsetRefs(['Labels'], h5_clust_refs)[0]
h5_centroids = px.hdf_utils.getH5DsetRefs(['Mean_Response'], h5_clust_refs)[0]
......
......@@ -142,7 +142,7 @@ class ShoGuess(px.Process):
sho_grp.add_children([ds_guess, ds_sho_inds, ds_sho_vals])
sho_grp.attrs['SHO_guess_method'] = "pycroscopy BESHO"
h5_sho_grp_refs = self.hdf.write_data(sho_grp)
h5_sho_grp_refs = self.hdf.write(sho_grp)
self.h5_guess = px.hdf_utils.getH5DsetRefs(['Guess'], h5_sho_grp_refs)[0]
self.h5_results_grp = self.h5_guess.parent
......
......@@ -7,16 +7,21 @@ Submodules
.. autosummary::
:toctree: _autosummary
analysis
io
processing
viz
core
"""
from . import core
from .core import *
from warnings import warn
from . import io
from .io import translators
from . import analysis
from . import processing
from .__version__ import version as __version__
from .__version__ import time as __time__
__all__ = core.__all__
warn('You are using the unity_dev branch, which is aimed at a 1.0 release for pycroscopy. '
'Be advised - this branch changes very significantly and frequently. It is therefore not meant for usage. '
'Use the master or dev branches for regular purposes.')
......@@ -475,7 +475,7 @@ class BELoopFitter(Fitter):
proj_grp.add_children([ds_projected_loops, ds_loop_metrics,
ds_loop_met_spec_inds, ds_loop_met_spec_vals])
h5_proj_grp_refs = self.hdf.write_data(proj_grp)
h5_proj_grp_refs = self.hdf.write(proj_grp)
self.h5_projected_loops = get_h5_obj_refs(['Projected_Loops'], h5_proj_grp_refs)[0]
self.h5_loop_metrics = get_h5_obj_refs(['Loop_Metrics'], h5_proj_grp_refs)[0]
self._met_spec_inds = get_h5_obj_refs(['Loop_Metrics_Indices'], h5_proj_grp_refs)[0]
......
......@@ -87,7 +87,7 @@ class BESHOfitter(Fitter):
ds_sho_vals])
sho_grp.attrs['SHO_guess_method'] = "pycroscopy BESHO"
h5_sho_grp_refs = self.hdf.write_data(sho_grp, print_log=self._verbose)
h5_sho_grp_refs = self.hdf.write(sho_grp, print_log=self._verbose)
self.h5_guess = get_h5_obj_refs(['Guess'], h5_sho_grp_refs)[0]
h5_sho_inds = get_h5_obj_refs(['Spectroscopic_Indices'],
......
......@@ -147,7 +147,7 @@ class GIVBayesian(Process):
bayes_grp.show_tree()
self.hdf = HDFwriter(self.h5_main.file)
h5_refs = self.hdf.write_data(bayes_grp, print_log=self.verbose)
h5_refs = self.hdf.write(bayes_grp, print_log=self.verbose)
self.h5_new_spec_vals = get_h5_obj_refs(['Spectroscopic_Values'], h5_refs)[0]
h5_new_spec_inds = get_h5_obj_refs(['Spectroscopic_Indices'], h5_refs)[0]
......
......@@ -334,7 +334,7 @@ def fit_atom_positions_dset(h5_grp, fitting_parms=None, num_cores=None):
dgrp_atom_finding.add_children([ds_atom_guesses, ds_atom_fits])
hdf = HDFwriter(h5_grp.file)
h5_atom_refs = hdf.write_data(dgrp_atom_finding)
h5_atom_refs = hdf.write(dgrp_atom_finding)
return h5_grp
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment