Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
Vasudevan, Rama K
pycroscopy
Commits
52cee572
Commit
52cee572
authored
Aug 22, 2017
by
Unknown
Browse files
Code cleanups
parent
45cab2ca
Changes
17
Expand all
Hide whitespace changes
Inline
Side-by-side
pycroscopy/analysis/optimize.py
View file @
52cee572
...
@@ -156,7 +156,7 @@ class Optimize(object):
...
@@ -156,7 +156,7 @@ class Optimize(object):
self
.
solver_type
=
solver_type
self
.
solver_type
=
solver_type
self
.
solver_options
=
solver_options
self
.
solver_options
=
solver_options
if
self
.
solver_type
not
in
scipy
.
optimize
.
__dict__
.
keys
():
if
self
.
solver_type
not
in
scipy
.
optimize
.
__dict__
.
keys
():
warn
(
'Solver %s does not exist!. For additional info see scipy.optimize'
%
(
solver_type
)
)
warn
(
'Solver %s does not exist!. For additional info see scipy.optimize'
%
solver_type
)
sys
.
exit
()
sys
.
exit
()
if
obj_func
[
'class'
]
is
None
:
if
obj_func
[
'class'
]
is
None
:
self
.
obj_func
=
obj_func
[
'obj_func'
]
self
.
obj_func
=
obj_func
[
'obj_func'
]
...
...
pycroscopy/io/hdf_utils.py
View file @
52cee572
...
@@ -129,8 +129,8 @@ def getAuxData(parent_data, auxDataName=None):
...
@@ -129,8 +129,8 @@ def getAuxData(parent_data, auxDataName=None):
auxDataName
=
parent_data
.
attrs
.
keys
()
auxDataName
=
parent_data
.
attrs
.
keys
()
elif
type
(
auxDataName
)
not
in
[
list
,
tuple
,
set
]:
elif
type
(
auxDataName
)
not
in
[
list
,
tuple
,
set
]:
auxDataName
=
[
auxDataName
]
# typically a single string
auxDataName
=
[
auxDataName
]
# typically a single string
data_list
=
list
()
try
:
try
:
data_list
=
[]
file_ref
=
parent_data
.
file
file_ref
=
parent_data
.
file
for
auxName
in
auxDataName
:
for
auxName
in
auxDataName
:
ref
=
parent_data
.
attrs
[
auxName
]
ref
=
parent_data
.
attrs
[
auxName
]
...
@@ -197,14 +197,15 @@ def get_attributes(parent_data, attr_names=None):
...
@@ -197,14 +197,15 @@ def get_attributes(parent_data, attr_names=None):
attr_names
=
[
attr_names
]
attr_names
=
[
attr_names
]
att_dict
=
{}
att_dict
=
{}
try
:
for
attr
in
attr_names
:
for
attr
in
attr_names
:
try
:
att_dict
[
attr
]
=
get_attr
(
parent_data
,
attr
)
att_dict
[
attr
]
=
get_attr
(
parent_data
,
attr
)
except
KeyError
:
except
KeyError
:
warn
(
'%s is not an attribute of %s'
warn
(
'%s is not an attribute of %s'
%
(
str
(
attr
),
parent_data
.
name
))
%
(
str
(
attr
),
parent_data
.
name
))
except
:
except
:
raise
raise
return
att_dict
return
att_dict
...
@@ -410,6 +411,7 @@ def getH5RegRefIndices(ref, h5_main, return_method='slices'):
...
@@ -410,6 +411,7 @@ def getH5RegRefIndices(ref, h5_main, return_method='slices'):
ref_inds
=
return_func
(
start
,
end
)
ref_inds
=
return_func
(
start
,
end
)
else
:
else
:
warn
(
'No method currently exists for converting this type of reference.'
)
warn
(
'No method currently exists for converting this type of reference.'
)
ref_inds
=
np
.
empty
(
0
)
else
:
else
:
raise
TypeError
(
'Input ref must be an HDF5 Region Reference'
)
raise
TypeError
(
'Input ref must be an HDF5 Region Reference'
)
...
@@ -740,11 +742,9 @@ def reshape_to_Ndims(h5_main, h5_pos=None, h5_spec=None, get_labels=False):
...
@@ -740,11 +742,9 @@ def reshape_to_Ndims(h5_main, h5_pos=None, h5_spec=None, get_labels=False):
ds_labels
=
np
.
hstack
([
pos_labs
,
spec_labs
])
ds_labels
=
np
.
hstack
([
pos_labs
,
spec_labs
])
re
sults
=
(
ds_Nd2
,
True
,
ds_labels
)
re
turn
ds_Nd2
,
True
,
ds_labels
else
:
else
:
results
=
(
ds_Nd2
,
True
)
return
ds_Nd2
,
True
return
results
def
reshape_from_Ndims
(
ds_Nd
,
h5_pos
=
None
,
h5_spec
=
None
):
def
reshape_from_Ndims
(
ds_Nd
,
h5_pos
=
None
,
h5_spec
=
None
):
...
...
pycroscopy/io/io_utils.py
View file @
52cee572
...
@@ -23,14 +23,14 @@ def check_ssh():
...
@@ -23,14 +23,14 @@ def check_ssh():
return
'SSH_CLIENT'
in
os
.
environ
or
'SSH_TTY'
in
os
.
environ
return
'SSH_CLIENT'
in
os
.
environ
or
'SSH_TTY'
in
os
.
environ
def
uiGetFile
(
filter
=
'H5 file (*.h5)'
,
caption
=
'Select File'
):
def
uiGetFile
(
file_
filter
=
'H5 file (*.h5)'
,
caption
=
'Select File'
):
"""
"""
Presents a File dialog used for selecting the .mat file
Presents a File dialog used for selecting the .mat file
and returns the absolute filepath of the selecte file
\n
and returns the absolute filepath of the selecte file
\n
Parameters
Parameters
----------
----------
filter : String or list of strings
file_
filter : String or list of strings
file extensions to look for
file extensions to look for
caption : (Optional) String
caption : (Optional) String
Title for the file browser window
Title for the file browser window
...
@@ -51,7 +51,7 @@ def uiGetFile(filter='H5 file (*.h5)', caption='Select File'):
...
@@ -51,7 +51,7 @@ def uiGetFile(filter='H5 file (*.h5)', caption='Select File'):
raise
raise
else
:
else
:
app
=
QtWidgets
.
QApplication
([])
app
=
QtWidgets
.
QApplication
([])
path
=
QtWidgets
.
QFileDialog
.
getOpenFileName
(
caption
=
caption
,
filter
=
filter
)[
0
]
path
=
QtWidgets
.
QFileDialog
.
getOpenFileName
(
caption
=
caption
,
filter
=
file_
filter
)[
0
]
app
.
closeAllWindows
()
app
.
closeAllWindows
()
app
.
exit
()
app
.
exit
()
del
app
del
app
...
@@ -66,7 +66,7 @@ def uiGetFile(filter='H5 file (*.h5)', caption='Select File'):
...
@@ -66,7 +66,7 @@ def uiGetFile(filter='H5 file (*.h5)', caption='Select File'):
raise
raise
else
:
else
:
app
=
QtGui
.
QApplication
([])
app
=
QtGui
.
QApplication
([])
path
=
QtGui
.
QFileDialog
.
getOpenFileName
(
caption
=
caption
,
filter
=
filter
)
path
=
QtGui
.
QFileDialog
.
getOpenFileName
(
caption
=
caption
,
filter
=
file_
filter
)
app
.
exit
()
app
.
exit
()
del
app
del
app
...
...
pycroscopy/io/microdata.py
View file @
52cee572
...
@@ -21,7 +21,7 @@ class MicroData(object):
...
@@ -21,7 +21,7 @@ class MicroData(object):
"""
"""
def
__init__
(
self
,
name
,
parent
):
def
__init__
(
self
,
name
,
parent
):
'''
"""
Parameters
Parameters
----------
----------
name : String
name : String
...
@@ -29,7 +29,7 @@ class MicroData(object):
...
@@ -29,7 +29,7 @@ class MicroData(object):
parent : String
parent : String
HDF5 path to the parent of this object. Typically used when
HDF5 path to the parent of this object. Typically used when
appending to an existing HDF5 file
appending to an existing HDF5 file
'''
"""
self
.
name
=
name
self
.
name
=
name
self
.
attrs
=
dict
()
self
.
attrs
=
dict
()
self
.
parent
=
parent
self
.
parent
=
parent
...
@@ -65,18 +65,18 @@ class MicroDataGroup(MicroData):
...
@@ -65,18 +65,18 @@ class MicroDataGroup(MicroData):
pass
pass
def
addChildren
(
self
,
children
):
def
addChildren
(
self
,
children
):
'''
"""
Adds Children to the class to make a tree structure.
Adds Children to the class to make a tree structure.
Parameters
Parameters
----------
----------
children : list of MicroData objects
children : list of MicroData objects
Children can be a mixture of groups and datasets
Children can be a mixture of groups and datasets
Returns
Returns
-------
-------
None
None
'''
"""
for
child
in
children
:
for
child
in
children
:
if
isinstance
(
child
,
MicroData
):
if
isinstance
(
child
,
MicroData
):
child
.
parent
=
self
.
parent
+
self
.
name
child
.
parent
=
self
.
parent
+
self
.
name
...
...
pycroscopy/io/translators/be_odf_relaxation.py
View file @
52cee572
...
@@ -181,8 +181,11 @@ class BEodfRelaxationTranslator(Translator):
...
@@ -181,8 +181,11 @@ class BEodfRelaxationTranslator(Translator):
ds_wfm_typ
=
MicroDataset
(
'Bin_Wfm_Type'
,
exec_bin_vec
)
ds_wfm_typ
=
MicroDataset
(
'Bin_Wfm_Type'
,
exec_bin_vec
)
# Create Spectroscopic Values and Spectroscopic Values Labels datasets
# Create Spectroscopic Values and Spectroscopic Values Labels datasets
spec_vals
,
spec_vals_labs
,
spec_vals_units
=
createSpecVals
(
UDVS_mat
,
spec_inds
,
bin_freqs
,
exec_bin_vec
,
spec_vals
,
spec_inds
,
spec_vals_labs
,
spec_vals_units
,
spec_vals_names
=
createSpecVals
(
UDVS_mat
,
spec_inds
,
parm_dict
,
UDVS_labs
,
UDVS_units
)
bin_freqs
,
exec_bin_vec
,
parm_dict
,
UDVS_labs
,
UDVS_units
)
spec_vals_slices
=
dict
()
spec_vals_slices
=
dict
()
for
row_ind
,
row_name
in
enumerate
(
spec_vals_labs
):
for
row_ind
,
row_name
in
enumerate
(
spec_vals_labs
):
...
@@ -359,7 +362,7 @@ class BEodfRelaxationTranslator(Translator):
...
@@ -359,7 +362,7 @@ class BEodfRelaxationTranslator(Translator):
FFT_full
=
np
.
fft
.
fftshift
(
np
.
fft
.
fft
(
BE_wave
))
FFT_full
=
np
.
fft
.
fftshift
(
np
.
fft
.
fft
(
BE_wave
))
bin_FFT
=
np
.
conjugate
(
FFT_full
[
bin_inds
])
bin_FFT
=
np
.
conjugate
(
FFT_full
[
bin_inds
])
return
(
bin_inds
,
bin_w
,
bin_FFT
,
BE_wave
,
dc_amp_vec_full
)
return
bin_inds
,
bin_w
,
bin_FFT
,
BE_wave
,
dc_amp_vec_full
def
_parse_file_path
(
self
,
data_filepath
):
def
_parse_file_path
(
self
,
data_filepath
):
"""
"""
...
@@ -393,7 +396,7 @@ class BEodfRelaxationTranslator(Translator):
...
@@ -393,7 +396,7 @@ class BEodfRelaxationTranslator(Translator):
path_dict
[
'read_imag'
]
=
imag_path
path_dict
[
'read_imag'
]
=
imag_path
path_dict
[
'old_mat_parms'
]
=
data_filepath
path_dict
[
'old_mat_parms'
]
=
data_filepath
return
(
basename
,
path_dict
)
return
basename
,
path_dict
@
staticmethod
@
staticmethod
def
__getParmsFromOldMat
(
file_path
):
def
__getParmsFromOldMat
(
file_path
):
...
@@ -493,7 +496,7 @@ class BEodfRelaxationTranslator(Translator):
...
@@ -493,7 +496,7 @@ class BEodfRelaxationTranslator(Translator):
elif
VS_parms
[
0
]
==
2
:
elif
VS_parms
[
0
]
==
2
:
# AC mode
# AC mode
parm_dict
[
'VS_mode'
]
=
'AC modulation mode with time reversal'
parm_dict
[
'VS_mode'
]
=
'AC modulation mode with time reversal'
parm_dict
[
'VS_amplitude_[V]'
]
=
0.5
*
(
VS_final_loop_amp
)
parm_dict
[
'VS_amplitude_[V]'
]
=
0.5
*
VS_final_loop_amp
parm_dict
[
parm_dict
[
'VS_offset_[V]'
]
=
0
# this is not correct. Fix manually when it comes to UDVS generation?
'VS_offset_[V]'
]
=
0
# this is not correct. Fix manually when it comes to UDVS generation?
else
:
else
:
...
@@ -620,4 +623,4 @@ class BEodfRelaxationTranslator(Translator):
...
@@ -620,4 +623,4 @@ class BEodfRelaxationTranslator(Translator):
UD_VS_table
[
BE_IF_switch
==
1
,
5
]
=
UD_VS_table
[
BE_IF_switch
==
1
,
1
]
UD_VS_table
[
BE_IF_switch
==
1
,
5
]
=
UD_VS_table
[
BE_IF_switch
==
1
,
1
]
UD_VS_table
[
BE_OF_switch
==
1
,
6
]
=
UD_VS_table
[
BE_IF_switch
==
1
,
1
]
UD_VS_table
[
BE_OF_switch
==
1
,
6
]
=
UD_VS_table
[
BE_IF_switch
==
1
,
1
]
return
(
UD_VS_table_label
,
UD_VS_table_unit
,
UD_VS_table
)
return
UD_VS_table_label
,
UD_VS_table_unit
,
UD_VS_table
pycroscopy/io/translators/df_utils/be_utils.py
View file @
52cee572
...
@@ -299,7 +299,7 @@ def normalizeBEresponse(spectrogram_mat, FFT_BE_wave, harmonic):
...
@@ -299,7 +299,7 @@ def normalizeBEresponse(spectrogram_mat, FFT_BE_wave, harmonic):
# Generate transfer functions
# Generate transfer functions
F_AO_spectrogram
=
np
.
transpose
(
np
.
tile
(
FFT_BE_wave
/
scaling_factor
,
[
spectrogram_mat
.
shape
[
1
],
1
]))
F_AO_spectrogram
=
np
.
transpose
(
np
.
tile
(
FFT_BE_wave
/
scaling_factor
,
[
spectrogram_mat
.
shape
[
1
],
1
]))
# Divide by transfer function
# Divide by transfer function
spectrogram_mat
=
spectrogram_mat
/
(
F_AO_spectrogram
)
spectrogram_mat
=
spectrogram_mat
/
F_AO_spectrogram
return
spectrogram_mat
return
spectrogram_mat
...
@@ -729,7 +729,7 @@ def createSpecVals(udvs_mat, spec_inds, bin_freqs, bin_wfm_type, parm_dict,
...
@@ -729,7 +729,7 @@ def createSpecVals(udvs_mat, spec_inds, bin_freqs, bin_wfm_type, parm_dict,
Check if more that one unique value
Check if more that one unique value
Append column number to iSpec_var if true
Append column number to iSpec_var if true
"""
"""
if
(
uvals
.
size
>
1
)
:
if
uvals
.
size
>
1
:
iSpec_var
=
np
.
append
(
iSpec_var
,
int
(
i
))
iSpec_var
=
np
.
append
(
iSpec_var
,
int
(
i
))
iSpec_var
=
np
.
asarray
(
iSpec_var
,
np
.
int
)
iSpec_var
=
np
.
asarray
(
iSpec_var
,
np
.
int
)
...
@@ -1179,7 +1179,7 @@ BEHistogram Class and Functions
...
@@ -1179,7 +1179,7 @@ BEHistogram Class and Functions
"""
"""
class
BEHistogram
()
:
class
BEHistogram
:
# TODO: Turn into proper class
# TODO: Turn into proper class
# TODO: Parallelize Histogram generation
# TODO: Parallelize Histogram generation
"""
"""
...
@@ -1552,7 +1552,7 @@ class BEHistogram():
...
@@ -1552,7 +1552,7 @@ class BEHistogram():
udvs_bins
=
np
.
where
(
x_hist
[
1
]
==
udvs_step
)[
0
]
udvs_bins
=
np
.
where
(
x_hist
[
1
]
==
udvs_step
)[
0
]
if
debug
:
if
debug
:
print
(
np
.
shape
(
x_hist
))
print
(
np
.
shape
(
x_hist
))
data_mat
=
h5_main
[
pix_chunks
[
ichunk
]:
pix_chunks
[
ichunk
+
1
],
(
udvs_bins
)
]
data_mat
=
h5_main
[
pix_chunks
[
ichunk
]:
pix_chunks
[
ichunk
+
1
],
udvs_bins
]
"""
"""
Get the frequecies that correspond to the current UDVS bins from the total x_hist
Get the frequecies that correspond to the current UDVS bins from the total x_hist
...
...
pycroscopy/io/translators/df_utils/parse_dm3.py
View file @
52cee572
...
@@ -357,7 +357,7 @@ def dm_read_string(f, outdata=None):
...
@@ -357,7 +357,7 @@ def dm_read_string(f, outdata=None):
put_into_file
(
f
,
">"
+
str
(
slen
)
+
"s"
,
outdata
)
put_into_file
(
f
,
">"
+
str
(
slen
)
+
"s"
,
outdata
)
return
header_size
return
header_size
else
:
else
:
assert
(
False
)
assert
False
slen
=
get_from_file
(
f
,
">L"
)
slen
=
get_from_file
(
f
,
">L"
)
raws
=
get_from_file
(
f
,
">"
+
str
(
slen
)
+
"s"
)
raws
=
get_from_file
(
f
,
">"
+
str
(
slen
)
+
"s"
)
if
verbose
:
if
verbose
:
...
...
pycroscopy/io/translators/general_dynamic_mode.py
View file @
52cee572
...
@@ -162,7 +162,7 @@ class GDMTranslator(Translator):
...
@@ -162,7 +162,7 @@ class GDMTranslator(Translator):
else
:
else
:
print
(
'File not found for: row {} col {}'
.
format
(
row_ind
,
col_ind
))
print
(
'File not found for: row {} col {}'
.
format
(
row_ind
,
col_ind
))
pos_ind
+=
1
pos_ind
+=
1
if
(
100.0
*
(
pos_ind
)
/
num_pix
)
%
10
==
0
:
if
(
100.0
*
pos_ind
/
num_pix
)
%
10
==
0
:
print
(
'completed translating {} %'
.
format
(
int
(
100
*
pos_ind
/
num_pix
)))
print
(
'completed translating {} %'
.
format
(
int
(
100
*
pos_ind
/
num_pix
)))
hdf
.
close
()
hdf
.
close
()
...
...
pycroscopy/io/translators/sporc.py
View file @
52cee572
...
@@ -146,7 +146,7 @@ class SporcTranslator(Translator):
...
@@ -146,7 +146,7 @@ class SporcTranslator(Translator):
else
:
else
:
print
(
'File for row {} col {} not found'
.
format
(
row_ind
,
col_ind
))
print
(
'File for row {} col {} not found'
.
format
(
row_ind
,
col_ind
))
pos_ind
+=
1
pos_ind
+=
1
if
(
100.0
*
(
pos_ind
)
/
num_pix
)
%
10
==
0
:
if
(
100.0
*
pos_ind
/
num_pix
)
%
10
==
0
:
print
(
'Finished reading {} % of data'
.
format
(
int
(
100
*
pos_ind
/
num_pix
)))
print
(
'Finished reading {} % of data'
.
format
(
int
(
100
*
pos_ind
/
num_pix
)))
hdf
.
close
()
hdf
.
close
()
...
...
pycroscopy/processing/atom_finding.py
View file @
52cee572
This diff is collapsed.
Click to expand it.
pycroscopy/processing/feature_extraction.py
View file @
52cee572
...
@@ -157,9 +157,9 @@ class FeatureExtractorParallel(object):
...
@@ -157,9 +157,9 @@ class FeatureExtractorParallel(object):
return
keypts
,
descs
return
keypts
,
descs
# start pool of workers
# start pool of workers
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
pool
=
mp
.
Pool
(
processes
)
pool
=
mp
.
Pool
(
processes
)
tasks
=
[
(
imp
)
for
imp
in
self
.
data
]
tasks
=
[
imp
for
imp
in
self
.
data
]
chunk
=
int
(
self
.
data
.
shape
[
0
]
/
processes
)
chunk
=
int
(
self
.
data
.
shape
[
0
]
/
processes
)
jobs
=
pool
.
imap
(
detect
,
tasks
,
chunksize
=
chunk
)
jobs
=
pool
.
imap
(
detect
,
tasks
,
chunksize
=
chunk
)
...
...
pycroscopy/processing/gmode_utils.py
View file @
52cee572
...
@@ -744,7 +744,7 @@ def reshape_from_lines_to_pixels(h5_main, pts_per_cycle, scan_step_x_m=1):
...
@@ -744,7 +744,7 @@ def reshape_from_lines_to_pixels(h5_main, pts_per_cycle, scan_step_x_m=1):
if
h5_main
.
shape
[
1
]
%
pts_per_cycle
!=
0
:
if
h5_main
.
shape
[
1
]
%
pts_per_cycle
!=
0
:
warn
(
'Error in reshaping the provided dataset to pixels. Check points per pixel'
)
warn
(
'Error in reshaping the provided dataset to pixels. Check points per pixel'
)
raise
ValueError
raise
ValueError
return
num_cols
=
int
(
h5_main
.
shape
[
1
]
/
pts_per_cycle
)
num_cols
=
int
(
h5_main
.
shape
[
1
]
/
pts_per_cycle
)
h5_spec_vals
=
getAuxData
(
h5_main
,
auxDataName
=
[
'Spectroscopic_Values'
])[
0
]
h5_spec_vals
=
getAuxData
(
h5_main
,
auxDataName
=
[
'Spectroscopic_Values'
])[
0
]
...
...
pycroscopy/processing/image_processing.py
View file @
52cee572
...
@@ -584,7 +584,7 @@ class ImageWindow(object):
...
@@ -584,7 +584,7 @@ class ImageWindow(object):
for
islice
,
this_slice
in
enumerate
(
win_slices
):
for
islice
,
this_slice
in
enumerate
(
win_slices
):
selected
=
islice
%
np
.
rint
(
n_wins
/
10
)
==
0
selected
=
islice
%
np
.
rint
(
n_wins
/
10
)
==
0
if
selected
:
if
selected
:
per_done
=
np
.
rint
(
100
*
(
islice
)
/
(
n_wins
)
)
per_done
=
np
.
rint
(
100
*
islice
/
n_wins
)
print
(
'Reconstructing Image...{}% -- step # {}'
.
format
(
per_done
,
islice
))
print
(
'Reconstructing Image...{}% -- step # {}'
.
format
(
per_done
,
islice
))
counts
[
this_slice
]
+=
ones
counts
[
this_slice
]
+=
ones
...
@@ -708,7 +708,7 @@ class ImageWindow(object):
...
@@ -708,7 +708,7 @@ class ImageWindow(object):
for
islice
,
this_slice
in
enumerate
(
win_slices
):
for
islice
,
this_slice
in
enumerate
(
win_slices
):
if
islice
%
np
.
rint
(
n_wins
/
10
)
==
0
:
if
islice
%
np
.
rint
(
n_wins
/
10
)
==
0
:
per_done
=
np
.
rint
(
100
*
(
islice
)
/
(
n_wins
)
)
per_done
=
np
.
rint
(
100
*
islice
/
n_wins
)
print
(
'Reconstructing Image...{}% -- step # {}'
.
format
(
per_done
,
islice
))
print
(
'Reconstructing Image...{}% -- step # {}'
.
format
(
per_done
,
islice
))
counts
[
this_slice
]
+=
ones
counts
[
this_slice
]
+=
ones
...
...
pycroscopy/processing/image_transformation.py
View file @
52cee572
...
@@ -160,9 +160,9 @@ class FeatureExtractorParallel(object):
...
@@ -160,9 +160,9 @@ class FeatureExtractorParallel(object):
return
keypts
,
descs
return
keypts
,
descs
# start pool of workers
# start pool of workers
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
pool
=
mp
.
Pool
(
processes
)
pool
=
mp
.
Pool
(
processes
)
tasks
=
[
(
imp
)
for
imp
in
self
.
data
]
tasks
=
[
imp
for
imp
in
self
.
data
]
chunk
=
int
(
self
.
data
.
shape
[
0
]
/
processes
)
chunk
=
int
(
self
.
data
.
shape
[
0
]
/
processes
)
jobs
=
pool
.
imap
(
detect
,
tasks
,
chunksize
=
chunk
)
jobs
=
pool
.
imap
(
detect
,
tasks
,
chunksize
=
chunk
)
...
@@ -769,7 +769,6 @@ class geoTransformerParallel(object):
...
@@ -769,7 +769,6 @@ class geoTransformerParallel(object):
"""
"""
def
__init__
(
self
):
def
__init__
(
self
):
self
.
__init__
self
.
data
=
[]
self
.
data
=
[]
self
.
features
=
[]
self
.
features
=
[]
...
@@ -844,7 +843,7 @@ class geoTransformerParallel(object):
...
@@ -844,7 +843,7 @@ class geoTransformerParallel(object):
# start pool of workers
# start pool of workers
pool
=
mp
.
Pool
(
processes
)
pool
=
mp
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
tasks
=
[(
desc1
,
desc2
)
for
desc1
,
desc2
in
zip
(
desc
[:],
desc
[
1
:])]
tasks
=
[(
desc1
,
desc2
)
for
desc1
,
desc2
in
zip
(
desc
[:],
desc
[
1
:])]
chunk
=
int
(
len
(
desc
)
/
processes
)
chunk
=
int
(
len
(
desc
)
/
processes
)
...
@@ -898,7 +897,7 @@ class geoTransformerParallel(object):
...
@@ -898,7 +897,7 @@ class geoTransformerParallel(object):
# start pool of workers
# start pool of workers
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
pool
=
mp
.
Pool
(
processes
)
pool
=
mp
.
Pool
(
processes
)
tasks
=
[(
key1
[
match
[:,
0
]],
key2
[
match
[:,
1
]])
tasks
=
[(
key1
[
match
[:,
0
]],
key2
[
match
[:,
1
]])
for
match
,
key1
,
key2
in
zip
(
matches
,
keypts
[:],
keypts
[
1
:])]
for
match
,
key1
,
key2
in
zip
(
matches
,
keypts
[:],
keypts
[
1
:])]
...
@@ -1039,7 +1038,7 @@ class geoTransformerParallel(object):
...
@@ -1039,7 +1038,7 @@ class geoTransformerParallel(object):
for
imp
,
transform
,
itm
in
zip
(
transImages
,
chainTransforms
,
range
(
0
,
transImages
.
shape
[
0
])):
for
imp
,
transform
,
itm
in
zip
(
transImages
,
chainTransforms
,
range
(
0
,
transImages
.
shape
[
0
])):
transimp
=
warping
([
imp
,
transform
])
transimp
=
warping
([
imp
,
transform
])
transImages
[
itm
]
=
transimp
transImages
[
itm
]
=
transimp
print
(
'Image #%i'
%
(
itm
)
)
print
(
'Image #%i'
%
itm
)
return
transImages
,
chainTransforms
return
transImages
,
chainTransforms
...
@@ -1061,7 +1060,7 @@ class geoTransformerParallel(object):
...
@@ -1061,7 +1060,7 @@ class geoTransformerParallel(object):
processes
=
kwargs
.
get
(
'processors'
,
1
)
processes
=
kwargs
.
get
(
'processors'
,
1
)
pool
=
mp
.
Pool
(
processes
)
pool
=
mp
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
def
register
(
images
):
def
register
(
images
):
imp1
,
imp2
=
images
[
0
],
images
[
1
]
imp1
,
imp2
=
images
[
0
],
images
[
1
]
...
@@ -1100,7 +1099,6 @@ class geoTransformerSerial(object):
...
@@ -1100,7 +1099,6 @@ class geoTransformerSerial(object):
"""
"""
def
__init__
(
self
):
def
__init__
(
self
):
self
.
__init__
self
.
data
=
[]
self
.
data
=
[]
self
.
features
=
[]
self
.
features
=
[]
...
@@ -1173,7 +1171,7 @@ class geoTransformerSerial(object):
...
@@ -1173,7 +1171,7 @@ class geoTransformerSerial(object):
# start pool of workers
# start pool of workers
pool
=
mp
.
Pool
(
processes
)
pool
=
mp
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
tasks
=
[(
desc1
,
desc2
)
for
desc1
,
desc2
in
zip
(
desc
[:],
desc
[
1
:])]
tasks
=
[(
desc1
,
desc2
)
for
desc1
,
desc2
in
zip
(
desc
[:],
desc
[
1
:])]
chunk
=
int
(
len
(
desc
)
/
processes
)
chunk
=
int
(
len
(
desc
)
/
processes
)
...
@@ -1343,7 +1341,7 @@ class geoTransformerSerial(object):
...
@@ -1343,7 +1341,7 @@ class geoTransformerSerial(object):
for
imp
,
transform
,
itm
in
zip
(
transImages
,
chainTransforms
,
range
(
0
,
transImages
.
shape
[
0
])):
for
imp
,
transform
,
itm
in
zip
(
transImages
,
chainTransforms
,
range
(
0
,
transImages
.
shape
[
0
])):
transimp
=
warping
([
imp
,
transform
])
transimp
=
warping
([
imp
,
transform
])
transImages
[
itm
]
=
transimp
transImages
[
itm
]
=
transimp
print
(
'Image #%i'
%
(
itm
)
)
print
(
'Image #%i'
%
itm
)
return
transImages
,
chainTransforms
return
transImages
,
chainTransforms
...
@@ -1365,7 +1363,7 @@ class geoTransformerSerial(object):
...
@@ -1365,7 +1363,7 @@ class geoTransformerSerial(object):
processes
=
kwargs
.
get
(
'processors'
,
1
)
processes
=
kwargs
.
get
(
'processors'
,
1
)
pool
=
mp
.
Pool
(
processes
)
pool
=
mp
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
def
register
(
images
):
def
register
(
images
):
imp1
,
imp2
=
images
[
0
],
images
[
1
]
imp1
,
imp2
=
images
[
0
],
images
[
1
]
...
...
pycroscopy/visualizers/BEPSvisSHO/ioFuncs.py
View file @
52cee572
'''
"""
Created on Apr 20, 2016
Created on Apr 20, 2016
@author: Chris Smith -- csmith55@utk.edu
@author: Chris Smith -- csmith55@utk.edu
'''
"""
import
sys
import
sys
import
numpy
as
np
import
numpy
as
np
...
@@ -15,11 +15,11 @@ from pycroscopy.io.hdf_utils import reshape_to_Ndims
...
@@ -15,11 +15,11 @@ from pycroscopy.io.hdf_utils import reshape_to_Ndims
def
loadDataFunc
(
filePath
,
**
kwargs
):
def
loadDataFunc
(
filePath
,
**
kwargs
):
'''
"""
Function to load the N-D data from a .mat file
Function to load the N-D data from a .mat file
Output: N-D numpy data array, Nx2 x-vector array