Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
Vasudevan, Rama K
pycroscopy
Commits
52cee572
Commit
52cee572
authored
Aug 22, 2017
by
Unknown
Browse files
Code cleanups
parent
45cab2ca
Changes
17
Expand all
Hide whitespace changes
Inline
Side-by-side
pycroscopy/analysis/optimize.py
View file @
52cee572
...
...
@@ -156,7 +156,7 @@ class Optimize(object):
self
.
solver_type
=
solver_type
self
.
solver_options
=
solver_options
if
self
.
solver_type
not
in
scipy
.
optimize
.
__dict__
.
keys
():
warn
(
'Solver %s does not exist!. For additional info see scipy.optimize'
%
(
solver_type
)
)
warn
(
'Solver %s does not exist!. For additional info see scipy.optimize'
%
solver_type
)
sys
.
exit
()
if
obj_func
[
'class'
]
is
None
:
self
.
obj_func
=
obj_func
[
'obj_func'
]
...
...
pycroscopy/io/hdf_utils.py
View file @
52cee572
...
...
@@ -129,8 +129,8 @@ def getAuxData(parent_data, auxDataName=None):
auxDataName
=
parent_data
.
attrs
.
keys
()
elif
type
(
auxDataName
)
not
in
[
list
,
tuple
,
set
]:
auxDataName
=
[
auxDataName
]
# typically a single string
data_list
=
list
()
try
:
data_list
=
[]
file_ref
=
parent_data
.
file
for
auxName
in
auxDataName
:
ref
=
parent_data
.
attrs
[
auxName
]
...
...
@@ -197,14 +197,15 @@ def get_attributes(parent_data, attr_names=None):
attr_names
=
[
attr_names
]
att_dict
=
{}
try
:
for
attr
in
attr_names
:
for
attr
in
attr_names
:
try
:
att_dict
[
attr
]
=
get_attr
(
parent_data
,
attr
)
except
KeyError
:
warn
(
'%s is not an attribute of %s'
%
(
str
(
attr
),
parent_data
.
name
))
except
:
raise
except
KeyError
:
warn
(
'%s is not an attribute of %s'
%
(
str
(
attr
),
parent_data
.
name
))
except
:
raise
return
att_dict
...
...
@@ -410,6 +411,7 @@ def getH5RegRefIndices(ref, h5_main, return_method='slices'):
ref_inds
=
return_func
(
start
,
end
)
else
:
warn
(
'No method currently exists for converting this type of reference.'
)
ref_inds
=
np
.
empty
(
0
)
else
:
raise
TypeError
(
'Input ref must be an HDF5 Region Reference'
)
...
...
@@ -740,11 +742,9 @@ def reshape_to_Ndims(h5_main, h5_pos=None, h5_spec=None, get_labels=False):
ds_labels
=
np
.
hstack
([
pos_labs
,
spec_labs
])
re
sults
=
(
ds_Nd2
,
True
,
ds_labels
)
re
turn
ds_Nd2
,
True
,
ds_labels
else
:
results
=
(
ds_Nd2
,
True
)
return
results
return
ds_Nd2
,
True
def
reshape_from_Ndims
(
ds_Nd
,
h5_pos
=
None
,
h5_spec
=
None
):
...
...
pycroscopy/io/io_utils.py
View file @
52cee572
...
...
@@ -23,14 +23,14 @@ def check_ssh():
return
'SSH_CLIENT'
in
os
.
environ
or
'SSH_TTY'
in
os
.
environ
def
uiGetFile
(
filter
=
'H5 file (*.h5)'
,
caption
=
'Select File'
):
def
uiGetFile
(
file_
filter
=
'H5 file (*.h5)'
,
caption
=
'Select File'
):
"""
Presents a File dialog used for selecting the .mat file
and returns the absolute filepath of the selecte file
\n
Parameters
----------
filter : String or list of strings
file_
filter : String or list of strings
file extensions to look for
caption : (Optional) String
Title for the file browser window
...
...
@@ -51,7 +51,7 @@ def uiGetFile(filter='H5 file (*.h5)', caption='Select File'):
raise
else
:
app
=
QtWidgets
.
QApplication
([])
path
=
QtWidgets
.
QFileDialog
.
getOpenFileName
(
caption
=
caption
,
filter
=
filter
)[
0
]
path
=
QtWidgets
.
QFileDialog
.
getOpenFileName
(
caption
=
caption
,
filter
=
file_
filter
)[
0
]
app
.
closeAllWindows
()
app
.
exit
()
del
app
...
...
@@ -66,7 +66,7 @@ def uiGetFile(filter='H5 file (*.h5)', caption='Select File'):
raise
else
:
app
=
QtGui
.
QApplication
([])
path
=
QtGui
.
QFileDialog
.
getOpenFileName
(
caption
=
caption
,
filter
=
filter
)
path
=
QtGui
.
QFileDialog
.
getOpenFileName
(
caption
=
caption
,
filter
=
file_
filter
)
app
.
exit
()
del
app
...
...
pycroscopy/io/microdata.py
View file @
52cee572
...
...
@@ -21,7 +21,7 @@ class MicroData(object):
"""
def
__init__
(
self
,
name
,
parent
):
'''
"""
Parameters
----------
name : String
...
...
@@ -29,7 +29,7 @@ class MicroData(object):
parent : String
HDF5 path to the parent of this object. Typically used when
appending to an existing HDF5 file
'''
"""
self
.
name
=
name
self
.
attrs
=
dict
()
self
.
parent
=
parent
...
...
@@ -65,18 +65,18 @@ class MicroDataGroup(MicroData):
pass
def
addChildren
(
self
,
children
):
'''
"""
Adds Children to the class to make a tree structure.
Parameters
----------
children : list of MicroData objects
Children can be a mixture of groups and datasets
Returns
-------
None
'''
"""
for
child
in
children
:
if
isinstance
(
child
,
MicroData
):
child
.
parent
=
self
.
parent
+
self
.
name
...
...
pycroscopy/io/translators/be_odf_relaxation.py
View file @
52cee572
...
...
@@ -181,8 +181,11 @@ class BEodfRelaxationTranslator(Translator):
ds_wfm_typ
=
MicroDataset
(
'Bin_Wfm_Type'
,
exec_bin_vec
)
# Create Spectroscopic Values and Spectroscopic Values Labels datasets
spec_vals
,
spec_vals_labs
,
spec_vals_units
=
createSpecVals
(
UDVS_mat
,
spec_inds
,
bin_freqs
,
exec_bin_vec
,
parm_dict
,
UDVS_labs
,
UDVS_units
)
spec_vals
,
spec_inds
,
spec_vals_labs
,
spec_vals_units
,
spec_vals_names
=
createSpecVals
(
UDVS_mat
,
spec_inds
,
bin_freqs
,
exec_bin_vec
,
parm_dict
,
UDVS_labs
,
UDVS_units
)
spec_vals_slices
=
dict
()
for
row_ind
,
row_name
in
enumerate
(
spec_vals_labs
):
...
...
@@ -359,7 +362,7 @@ class BEodfRelaxationTranslator(Translator):
FFT_full
=
np
.
fft
.
fftshift
(
np
.
fft
.
fft
(
BE_wave
))
bin_FFT
=
np
.
conjugate
(
FFT_full
[
bin_inds
])
return
(
bin_inds
,
bin_w
,
bin_FFT
,
BE_wave
,
dc_amp_vec_full
)
return
bin_inds
,
bin_w
,
bin_FFT
,
BE_wave
,
dc_amp_vec_full
def
_parse_file_path
(
self
,
data_filepath
):
"""
...
...
@@ -393,7 +396,7 @@ class BEodfRelaxationTranslator(Translator):
path_dict
[
'read_imag'
]
=
imag_path
path_dict
[
'old_mat_parms'
]
=
data_filepath
return
(
basename
,
path_dict
)
return
basename
,
path_dict
@
staticmethod
def
__getParmsFromOldMat
(
file_path
):
...
...
@@ -493,7 +496,7 @@ class BEodfRelaxationTranslator(Translator):
elif
VS_parms
[
0
]
==
2
:
# AC mode
parm_dict
[
'VS_mode'
]
=
'AC modulation mode with time reversal'
parm_dict
[
'VS_amplitude_[V]'
]
=
0.5
*
(
VS_final_loop_amp
)
parm_dict
[
'VS_amplitude_[V]'
]
=
0.5
*
VS_final_loop_amp
parm_dict
[
'VS_offset_[V]'
]
=
0
# this is not correct. Fix manually when it comes to UDVS generation?
else
:
...
...
@@ -620,4 +623,4 @@ class BEodfRelaxationTranslator(Translator):
UD_VS_table
[
BE_IF_switch
==
1
,
5
]
=
UD_VS_table
[
BE_IF_switch
==
1
,
1
]
UD_VS_table
[
BE_OF_switch
==
1
,
6
]
=
UD_VS_table
[
BE_IF_switch
==
1
,
1
]
return
(
UD_VS_table_label
,
UD_VS_table_unit
,
UD_VS_table
)
return
UD_VS_table_label
,
UD_VS_table_unit
,
UD_VS_table
pycroscopy/io/translators/df_utils/be_utils.py
View file @
52cee572
...
...
@@ -299,7 +299,7 @@ def normalizeBEresponse(spectrogram_mat, FFT_BE_wave, harmonic):
# Generate transfer functions
F_AO_spectrogram
=
np
.
transpose
(
np
.
tile
(
FFT_BE_wave
/
scaling_factor
,
[
spectrogram_mat
.
shape
[
1
],
1
]))
# Divide by transfer function
spectrogram_mat
=
spectrogram_mat
/
(
F_AO_spectrogram
)
spectrogram_mat
=
spectrogram_mat
/
F_AO_spectrogram
return
spectrogram_mat
...
...
@@ -729,7 +729,7 @@ def createSpecVals(udvs_mat, spec_inds, bin_freqs, bin_wfm_type, parm_dict,
Check if more that one unique value
Append column number to iSpec_var if true
"""
if
(
uvals
.
size
>
1
)
:
if
uvals
.
size
>
1
:
iSpec_var
=
np
.
append
(
iSpec_var
,
int
(
i
))
iSpec_var
=
np
.
asarray
(
iSpec_var
,
np
.
int
)
...
...
@@ -1179,7 +1179,7 @@ BEHistogram Class and Functions
"""
class
BEHistogram
()
:
class
BEHistogram
:
# TODO: Turn into proper class
# TODO: Parallelize Histogram generation
"""
...
...
@@ -1552,7 +1552,7 @@ class BEHistogram():
udvs_bins
=
np
.
where
(
x_hist
[
1
]
==
udvs_step
)[
0
]
if
debug
:
print
(
np
.
shape
(
x_hist
))
data_mat
=
h5_main
[
pix_chunks
[
ichunk
]:
pix_chunks
[
ichunk
+
1
],
(
udvs_bins
)
]
data_mat
=
h5_main
[
pix_chunks
[
ichunk
]:
pix_chunks
[
ichunk
+
1
],
udvs_bins
]
"""
Get the frequecies that correspond to the current UDVS bins from the total x_hist
...
...
pycroscopy/io/translators/df_utils/parse_dm3.py
View file @
52cee572
...
...
@@ -357,7 +357,7 @@ def dm_read_string(f, outdata=None):
put_into_file
(
f
,
">"
+
str
(
slen
)
+
"s"
,
outdata
)
return
header_size
else
:
assert
(
False
)
assert
False
slen
=
get_from_file
(
f
,
">L"
)
raws
=
get_from_file
(
f
,
">"
+
str
(
slen
)
+
"s"
)
if
verbose
:
...
...
pycroscopy/io/translators/general_dynamic_mode.py
View file @
52cee572
...
...
@@ -162,7 +162,7 @@ class GDMTranslator(Translator):
else
:
print
(
'File not found for: row {} col {}'
.
format
(
row_ind
,
col_ind
))
pos_ind
+=
1
if
(
100.0
*
(
pos_ind
)
/
num_pix
)
%
10
==
0
:
if
(
100.0
*
pos_ind
/
num_pix
)
%
10
==
0
:
print
(
'completed translating {} %'
.
format
(
int
(
100
*
pos_ind
/
num_pix
)))
hdf
.
close
()
...
...
pycroscopy/io/translators/sporc.py
View file @
52cee572
...
...
@@ -146,7 +146,7 @@ class SporcTranslator(Translator):
else
:
print
(
'File for row {} col {} not found'
.
format
(
row_ind
,
col_ind
))
pos_ind
+=
1
if
(
100.0
*
(
pos_ind
)
/
num_pix
)
%
10
==
0
:
if
(
100.0
*
pos_ind
/
num_pix
)
%
10
==
0
:
print
(
'Finished reading {} % of data'
.
format
(
int
(
100
*
pos_ind
/
num_pix
)))
hdf
.
close
()
...
...
pycroscopy/processing/atom_finding.py
View file @
52cee572
This diff is collapsed.
Click to expand it.
pycroscopy/processing/feature_extraction.py
View file @
52cee572
...
...
@@ -157,9 +157,9 @@ class FeatureExtractorParallel(object):
return
keypts
,
descs
# start pool of workers
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
pool
=
mp
.
Pool
(
processes
)
tasks
=
[
(
imp
)
for
imp
in
self
.
data
]
tasks
=
[
imp
for
imp
in
self
.
data
]
chunk
=
int
(
self
.
data
.
shape
[
0
]
/
processes
)
jobs
=
pool
.
imap
(
detect
,
tasks
,
chunksize
=
chunk
)
...
...
pycroscopy/processing/gmode_utils.py
View file @
52cee572
...
...
@@ -744,7 +744,7 @@ def reshape_from_lines_to_pixels(h5_main, pts_per_cycle, scan_step_x_m=1):
if
h5_main
.
shape
[
1
]
%
pts_per_cycle
!=
0
:
warn
(
'Error in reshaping the provided dataset to pixels. Check points per pixel'
)
raise
ValueError
return
num_cols
=
int
(
h5_main
.
shape
[
1
]
/
pts_per_cycle
)
h5_spec_vals
=
getAuxData
(
h5_main
,
auxDataName
=
[
'Spectroscopic_Values'
])[
0
]
...
...
pycroscopy/processing/image_processing.py
View file @
52cee572
...
...
@@ -584,7 +584,7 @@ class ImageWindow(object):
for
islice
,
this_slice
in
enumerate
(
win_slices
):
selected
=
islice
%
np
.
rint
(
n_wins
/
10
)
==
0
if
selected
:
per_done
=
np
.
rint
(
100
*
(
islice
)
/
(
n_wins
)
)
per_done
=
np
.
rint
(
100
*
islice
/
n_wins
)
print
(
'Reconstructing Image...{}% -- step # {}'
.
format
(
per_done
,
islice
))
counts
[
this_slice
]
+=
ones
...
...
@@ -708,7 +708,7 @@ class ImageWindow(object):
for
islice
,
this_slice
in
enumerate
(
win_slices
):
if
islice
%
np
.
rint
(
n_wins
/
10
)
==
0
:
per_done
=
np
.
rint
(
100
*
(
islice
)
/
(
n_wins
)
)
per_done
=
np
.
rint
(
100
*
islice
/
n_wins
)
print
(
'Reconstructing Image...{}% -- step # {}'
.
format
(
per_done
,
islice
))
counts
[
this_slice
]
+=
ones
...
...
pycroscopy/processing/image_transformation.py
View file @
52cee572
...
...
@@ -160,9 +160,9 @@ class FeatureExtractorParallel(object):
return
keypts
,
descs
# start pool of workers
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
pool
=
mp
.
Pool
(
processes
)
tasks
=
[
(
imp
)
for
imp
in
self
.
data
]
tasks
=
[
imp
for
imp
in
self
.
data
]
chunk
=
int
(
self
.
data
.
shape
[
0
]
/
processes
)
jobs
=
pool
.
imap
(
detect
,
tasks
,
chunksize
=
chunk
)
...
...
@@ -769,7 +769,6 @@ class geoTransformerParallel(object):
"""
def
__init__
(
self
):
self
.
__init__
self
.
data
=
[]
self
.
features
=
[]
...
...
@@ -844,7 +843,7 @@ class geoTransformerParallel(object):
# start pool of workers
pool
=
mp
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
tasks
=
[(
desc1
,
desc2
)
for
desc1
,
desc2
in
zip
(
desc
[:],
desc
[
1
:])]
chunk
=
int
(
len
(
desc
)
/
processes
)
...
...
@@ -898,7 +897,7 @@ class geoTransformerParallel(object):
# start pool of workers
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
pool
=
mp
.
Pool
(
processes
)
tasks
=
[(
key1
[
match
[:,
0
]],
key2
[
match
[:,
1
]])
for
match
,
key1
,
key2
in
zip
(
matches
,
keypts
[:],
keypts
[
1
:])]
...
...
@@ -1039,7 +1038,7 @@ class geoTransformerParallel(object):
for
imp
,
transform
,
itm
in
zip
(
transImages
,
chainTransforms
,
range
(
0
,
transImages
.
shape
[
0
])):
transimp
=
warping
([
imp
,
transform
])
transImages
[
itm
]
=
transimp
print
(
'Image #%i'
%
(
itm
)
)
print
(
'Image #%i'
%
itm
)
return
transImages
,
chainTransforms
...
...
@@ -1061,7 +1060,7 @@ class geoTransformerParallel(object):
processes
=
kwargs
.
get
(
'processors'
,
1
)
pool
=
mp
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
def
register
(
images
):
imp1
,
imp2
=
images
[
0
],
images
[
1
]
...
...
@@ -1100,7 +1099,6 @@ class geoTransformerSerial(object):
"""
def
__init__
(
self
):
self
.
__init__
self
.
data
=
[]
self
.
features
=
[]
...
...
@@ -1173,7 +1171,7 @@ class geoTransformerSerial(object):
# start pool of workers
pool
=
mp
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
tasks
=
[(
desc1
,
desc2
)
for
desc1
,
desc2
in
zip
(
desc
[:],
desc
[
1
:])]
chunk
=
int
(
len
(
desc
)
/
processes
)
...
...
@@ -1343,7 +1341,7 @@ class geoTransformerSerial(object):
for
imp
,
transform
,
itm
in
zip
(
transImages
,
chainTransforms
,
range
(
0
,
transImages
.
shape
[
0
])):
transimp
=
warping
([
imp
,
transform
])
transImages
[
itm
]
=
transimp
print
(
'Image #%i'
%
(
itm
)
)
print
(
'Image #%i'
%
itm
)
return
transImages
,
chainTransforms
...
...
@@ -1365,7 +1363,7 @@ class geoTransformerSerial(object):
processes
=
kwargs
.
get
(
'processors'
,
1
)
pool
=
mp
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
)
)
print
(
'launching %i kernels...'
%
processes
)
def
register
(
images
):
imp1
,
imp2
=
images
[
0
],
images
[
1
]
...
...
pycroscopy/visualizers/BEPSvisSHO/ioFuncs.py
View file @
52cee572
'''
"""
Created on Apr 20, 2016
@author: Chris Smith -- csmith55@utk.edu
'''
"""
import
sys
import
numpy
as
np
...
...
@@ -15,11 +15,11 @@ from pycroscopy.io.hdf_utils import reshape_to_Ndims
def
loadDataFunc
(
filePath
,
**
kwargs
):
'''
"""
Function to load the N-D data from a .mat file
Output: N-D numpy data array, Nx2 x-vector array
array indices are (Step, #rows, #cols, cycle#)
'''
"""
data
=
loadmat
(
filePath
)
data_mat
=
data
[
'loop_mat'
]
data_mat
=
data_mat
[:,
:,
:,
:]
...
...
@@ -32,18 +32,18 @@ def loadDataFunc(filePath, **kwargs):
def
readData
(
h5_path
,
dset_name
=
'SHO_Fit_Guess'
):
'''
"""
Reads the hdf5 data file and calls appropriate reader based on data type
Input:
h5_path -- the absolute file path to the hdf5 file to be read in.
dset_name -- the name of the main dataset
Outputs:
data_mat -- the transformed data read to be plotted
xvec -- numpy array containing the possible plot data of the slice viewer
xvec_labs -- numpy array of labels and units for the xvec array
'''
"""
hdf
=
ioHDF5
(
h5_path
)
...
...
@@ -64,18 +64,18 @@ def readData(h5_path, dset_name='SHO_Fit_Guess'):
def
readDCData
(
h5_group
):
'''
"""
Reads the data for DC modulation experiments
Inputs:
h5_group -- hdf5 group holding the SHO_Fit Data for the chosen
h5_group -- hdf5 group holding the SHO_Fit Data for the chosen
Measurement group
Outputs:
data_guess -- the transformed data to be plotted
xvec -- numpy array containing the possible plot data of the slice viewer
xvec_labs -- numpy array of labels and units for the xvec array
'''
"""
h5_chan
=
h5_group
[
'Channel_000'
]
h5_main
=
h5_chan
[
'Raw_Data'
]
h5_file
=
h5_main
.
file
...
...
@@ -212,18 +212,18 @@ def readDCData(h5_group):
def
readACData
(
h5_group
):
'''
"""
Reads the data for AC modulation experiments
Inputs:
h5_group -- hdf5 group holding the SHO_Fit Data for the chosen
h5_group -- hdf5 group holding the SHO_Fit Data for the chosen
Measurement group
Outputs:
data_guess -- the transformed data to be plotted
xvec -- numpy array containing the possible plot data of the slice viewer
xvec_labs -- numpy array of labels and units for the xvec array
'''
"""
h5_chan
=
h5_group
[
'Channel_000'
]
h5_main
=
h5_chan
[
'Raw_Data'
]
h5_specv
=
h5_chan
[
'Spectroscopic_Values'
]
...
...
@@ -367,9 +367,9 @@ def getSpectralData(point, data_mat):
def
__getPos
(
h5_pos
):
'''
"""
Return the number of rows and columns in the dataset
'''
"""
num_rows
=
len
(
np
.
unique
(
h5_pos
[:,
0
]))
try
:
num_cols
=
len
(
np
.
unique
(
h5_pos
[:,
1
]))
...
...
@@ -380,9 +380,9 @@ def __getPos(h5_pos):
def
__findDataset
(
h5_file
,
ds_name
):
'''
"""
Uses visit() to find all datasets with the desired name
'''
"""
print
(
'Finding all instances of'
,
ds_name
)
ds
=
[]
...
...
pycroscopy/visualizers/BEPSvisSHO/plotFunctions.py
View file @
52cee572
...
...
@@ -10,14 +10,14 @@ from pyqtgraph import QtGui
class
BEPSwindow
(
QtGui
.
QMainWindow
):
'''
"""
Window object that will handle all the plotting
'''
"""
def
__init__
(
self
,
**
kwargs
):
'''
"""
Create the initial window
'''
"""
super
(
BEPSwindow
,
self
).
__init__
()
winTitle
=
kwargs
.
get
(
'winTitle'
,
'BEPS Visualization'
)
self
.
setWindowTitle
(
winTitle
)
...
...
@@ -98,10 +98,10 @@ class BEPSwindow(QtGui.QMainWindow):
# %%
def
setup
(
self
,
h5_path
=
None
):
'''
Call the readData functions from ioFuncs to setup the
"""
Call the readData functions from ioFuncs to setup the
arrays for later uses and get the proper parameters
'''
"""
if
not
h5_path
:
h5_path
=
pg
.
FileDialog
.
getOpenFileName
(
caption
=
'Select H5 file'
,
filter
=
"H5 file (*.h5)"
)
...
...
@@ -373,26 +373,26 @@ class BEPSwindow(QtGui.QMainWindow):
return
imv1
,
imv2
,
imv3
def
__setupTwoD
(
self
,
xlabel
,
ylabel
):
'''
"""
Creates the needed widgets for plotting 1D data
Inputs:
xlabel -- list of labels and units to use for x-axis of plots
ylabel -- list of labels and units to use for y-axis of plots
Outputs:
imv1 -- ImageView Widget in which the map will be plotted
imv2 -- PlotWidget in which the loops will be plotted
imv3 -- PlotWidget in which the timeline will be plotted
Shared:
plt1 -- PlotItem associated with the map to show position axes
plt1 -- PlotItem associated with the map to show position axes
and the title
roi1 -- The crosshairs roi object that determines the position for
roi1 -- The crosshairs roi object that determines the position for
which the loops are plotted in imv2
roiplt1 -- The box roi object associated with imv1
posline -- the
'''
posline -- the
"""
plt1
=
pg
.
PlotItem
()
imv1
=
pg
.
ImageView
(
view
=
plt1
)
imv2
=
pg
.
PlotWidget
()
...
...
@@ -435,9 +435,9 @@ class BEPSwindow(QtGui.QMainWindow):
return
imv1
,
imv2
,
imv3
def
__setInitialData
(
self
,
data_guess
,
data_results
,
xvec
,
data_main
,
freq_vec
):
'''
"""
Tell the window what data it should be plotting from
'''
"""
points
=
np
.
arange
(
len
(
xvec
[
0
,
0
,
0
,
:])
+
1
)
self
.
num_bins
=
len
(
freq_vec
)
...
...
@@ -465,43 +465,43 @@ class BEPSwindow(QtGui.QMainWindow):
return
def
__setDataOneD
(
self
,
data_guess
,
data_results
,
xvec
,
data_main
,
freq_vec
,
points
):
'''
"""
Sets the initial data for the case of one spacial dimension
Inputs:
data_guess -- 6D numpy array holding BEPS data.
data_guess -- 6D numpy array holding BEPS data.
Indices are:
[Field, SHO, Cycle #, UDVS Step, X-Pos, Y-Pos]
Field: "In-field" or "Out-of-field" for DC
"Forward" or "Reverse" for AC
SHO: SHO parameter to be plotted
Amplitude, Resonance Frequency, Q-factor, Phase,
Amplitude, Resonance Frequency, Q-factor, Phase,
or R^2 value
Cycle: Which cycle should be plotted
UDVS: The UDVS step of the current plot
X-Pos: X position from dataset
Y-Pos: Y position from dataset
These indices should be present for all datasets
These indices should be present for all datasets
even if they only have a length of 1
xvec -- 4D numpy array containing the values which will be plotted
xvec -- 4D numpy array containing the values which will be plotted