Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
Vasudevan, Rama K
pycroscopy
Commits
84444cc3
Commit
84444cc3
authored
Jun 09, 2017
by
Somnath, Suhas
Browse files
python3 compatibility fixes
parent
fe671d8c
Changes
14
Hide whitespace changes
Inline
Side-by-side
pycroscopy/analysis/utils/be_loop.py
View file @
84444cc3
...
...
@@ -605,7 +605,7 @@ def generate_guess(vdc, pr_vec, show_plots=False):
"""Find the coordinates of the points where the vertical line through the
centroid intersects with the convex hull"""
y_intersections
=
[]
for
pair
in
x
range
(
outline_1
.
shape
[
0
]):
for
pair
in
range
(
outline_1
.
shape
[
0
]):
x_pt
=
find_intersection
(
outline_1
[
pair
],
outline_2
[
pair
],
[
geom_centroid
[
0
],
hull
.
min_bound
[
1
]],
[
geom_centroid
[
0
],
hull
.
max_bound
[
1
]])
...
...
@@ -617,7 +617,7 @@ def generate_guess(vdc, pr_vec, show_plots=False):
centroid intersects with the convex hull
'''
x_intersections
=
[]
for
pair
in
x
range
(
outline_1
.
shape
[
0
]):
for
pair
in
range
(
outline_1
.
shape
[
0
]):
x_pt
=
find_intersection
(
outline_1
[
pair
],
outline_2
[
pair
],
[
hull
.
min_bound
[
0
],
geom_centroid
[
1
]],
[
hull
.
max_bound
[
0
],
geom_centroid
[
1
]])
...
...
pycroscopy/io/be_hdf_utils.py
View file @
84444cc3
...
...
@@ -275,12 +275,12 @@ def generateTestSpectroscopicData(num_bins=7, num_steps=3, num_pos=4):
Data organized as [steps x bins, positions]
"""
full_data
=
np
.
zeros
((
num_steps
*
num_bins
,
num_pos
))
for
pos
in
x
range
(
num_pos
):
for
pos
in
range
(
num_pos
):
bin_count
=
0
for
step
in
x
range
(
num_steps
):
for
bind
in
x
range
(
num_bins
):
full_data
[
bin_count
,
pos
]
=
(
pos
+
1
)
*
100
+
(
step
+
1
)
*
10
+
(
bind
+
1
)
bin_count
+=
1
for
step
in
range
(
num_steps
):
for
bind
in
range
(
num_bins
):
full_data
[
bin_count
,
pos
]
=
(
pos
+
1
)
*
100
+
(
step
+
1
)
*
10
+
(
bind
+
1
)
bin_count
+=
1
return
full_data
...
...
pycroscopy/io/hdf_utils.py
View file @
84444cc3
...
...
@@ -848,7 +848,8 @@ def copyAttributes(source, dest, skip_refs=True):
"""
Copy attributes from one h5object to another
"""
for
attr
,
atval
in
source
.
attrs
.
iteritems
():
for
attr
in
source
.
attrs
.
keys
():
atval
=
source
.
attrs
[
attr
]
"""
Don't copy references unless asked
"""
...
...
pycroscopy/io/translators/df_utils/io_image.py
View file @
84444cc3
...
...
@@ -93,7 +93,8 @@ def unnest_parm_dicts(image_parms, prefix=''):
"""
new_parms
=
dict
()
for
name
,
val
in
image_parms
.
iteritems
():
for
name
in
image_parms
.
keys
():
val
=
image_parms
[
name
]
# print 'name',name,'val',val
name
=
'-'
.
join
([
prefix
]
+
name
.
split
()).
strip
(
'-'
)
if
isinstance
(
val
,
dict
):
...
...
pycroscopy/io/translators/df_utils/parse_dm3.py
View file @
84444cc3
from
__future__
import
division
,
print_function
,
absolute_import
,
unicode_literals
import
struct
import
array
import
logg
ing
import
warn
ing
s
import
re
try
:
import
StringIO
...
...
@@ -291,7 +291,7 @@ def get_structdmtypes_for_python_typeorobject(typeorobj):
return
None
,
get_dmtype_for_name
(
'struct'
)
elif
comparer
(
structarray
):
return
None
,
get_dmtype_for_name
(
'array'
)
logg
ing
.
warn
(
"No appropriate DMType found for %s, %s"
,
typeorobj
,
type
(
typeorobj
))
warn
ing
s
.
warn
(
"No appropriate DMType found for %s, %s"
,
typeorobj
,
type
(
typeorobj
))
return
None
...
...
@@ -441,7 +441,7 @@ def dm_read_array(f, outdata=None):
write_array
(
f
,
outdata
)
return
array_header
else
:
logg
ing
.
warn
(
"Unsupported type for conversion to array:%s"
,
outdata
)
warn
ing
s
.
warn
(
"Unsupported type for conversion to array:%s"
,
outdata
)
else
:
# supports arrays of structs and arrays of types,
...
...
pycroscopy/io/translators/general_dynamic_mode.py
View file @
84444cc3
...
...
@@ -135,8 +135,8 @@ class GDMTranslator(Translator):
# Now read the raw data files:
pos_ind
=
0
for
row_ind
in
x
range
(
1
,
num_rows
+
1
):
for
col_ind
in
x
range
(
1
,
num_cols
+
1
):
for
row_ind
in
range
(
1
,
num_rows
+
1
):
for
col_ind
in
range
(
1
,
num_cols
+
1
):
file_path
=
path
.
join
(
folder_path
,
'fSweep_r'
+
str
(
row_ind
)
+
'_c'
+
str
(
col_ind
)
+
'.mat'
)
print
(
'Working on row {} col {}'
.
format
(
row_ind
,
col_ind
))
if
path
.
exists
(
file_path
):
...
...
pycroscopy/io/translators/image.py
View file @
84444cc3
...
...
@@ -193,7 +193,7 @@ class ImageTranslator(Translator):
old_parms
=
h5_meas
.
attrs
old_parms
.
pop
(
'machine_id'
,
None
)
old_parms
.
pop
(
'timestame'
,
None
)
test
=
[
meas_grp
.
attrs
[
key
]
==
old_parms
[
key
]
for
key
in
old_parms
.
iter
keys
()]
test
=
[
meas_grp
.
attrs
[
key
]
==
old_parms
[
key
]
for
key
in
old_parms
.
keys
()]
if
all
(
test
):
return
h5_raw
# the clear (actually the repack) does not work on the ubuntu VM / Windows.
...
...
pycroscopy/io/translators/ndata_translator.py
View file @
84444cc3
...
...
@@ -490,8 +490,8 @@ class NDataTranslator(Translator):
# Create new measurement group for each set of parameters
meas_grp
=
MicroDataGroup
(
'Measurement_'
)
# Write the parameters as attributes of the group
for
key
,
val
in
meas_parms
.
iteritem
s
():
meas_grp
.
attrs
[
key
]
=
val
for
key
in
meas_parms
.
key
s
():
meas_grp
.
attrs
[
key
]
=
meas_parms
[
key
]
chan_grp
=
MicroDataGroup
(
'Channel_000'
)
meas_grp
.
addChildren
([
chan_grp
])
...
...
pycroscopy/io/translators/sporc.py
View file @
84444cc3
...
...
@@ -124,8 +124,8 @@ class SporcTranslator(Translator):
# Now read the raw data files:
pos_ind
=
0
for
row_ind
in
x
range
(
1
,
num_rows
+
1
):
for
col_ind
in
x
range
(
1
,
num_cols
+
1
):
for
row_ind
in
range
(
1
,
num_rows
+
1
):
for
col_ind
in
range
(
1
,
num_cols
+
1
):
file_path
=
path
.
join
(
folder_path
,
'result_r'
+
str
(
row_ind
)
+
'_c'
+
str
(
col_ind
)
+
'.mat'
)
#print('Working on row {} col {}'.format(row_ind,col_ind))
if
path
.
exists
(
file_path
):
...
...
pycroscopy/processing/cluster.py
View file @
84444cc3
...
...
@@ -247,8 +247,8 @@ class Cluster(object):
Get the parameters of the estimator used and write them
as attributes of the group
'''
for
parm
,
val
in
self
.
estimator
.
get_params
().
iteritem
s
():
cluster_grp
.
attrs
[
parm
]
=
val
for
parm
in
self
.
estimator
.
get_params
().
key
s
():
cluster_grp
.
attrs
[
parm
]
=
self
.
estimator
.
get_params
()[
parm
]
hdf
=
ioHDF5
(
self
.
h5_main
.
file
)
h5_clust_refs
=
hdf
.
writeData
(
cluster_grp
)
...
...
pycroscopy/processing/decomposition.py
View file @
84444cc3
...
...
@@ -156,8 +156,8 @@ class Decomposition(object):
Get the parameters of the estimator used and write them
as attributes of the group
'''
for
parm
,
val
in
self
.
estimator
.
get_params
().
iteritem
s
():
decomp_grp
.
attrs
[
parm
]
=
val
for
parm
in
self
.
estimator
.
get_params
().
key
s
():
decomp_grp
.
attrs
[
parm
]
=
self
.
estimator
.
get_params
()[
parm
]
hdf
=
ioHDF5
(
self
.
h5_main
.
file
)
h5_decomp_refs
=
hdf
.
writeData
(
decomp_grp
)
...
...
pycroscopy/processing/image_processing.py
View file @
84444cc3
...
...
@@ -1256,7 +1256,7 @@ class ImageWindow(object):
fimabs
=
np
.
abs
(
fim
)
fimabs_max
=
np
.
zeros
(
r_n
-
1
)
for
k
in
x
range
(
r_n
-
1
):
for
k
in
range
(
r_n
-
1
):
r1
=
r_vec
[
k
]
r2
=
r_vec
[
k
+
1
]
r_ind
=
np
.
where
((
r_mat
>=
r1
)
&
(
r_mat
<=
r2
)
==
True
)
...
...
@@ -1269,7 +1269,7 @@ class ImageWindow(object):
'''
count
=
0
local_max
=
[]
for
k
in
x
range
(
1
,
fimabs_max
.
size
-
1
):
for
k
in
range
(
1
,
fimabs_max
.
size
-
1
):
if
fimabs_max
[
k
-
1
]
<
fimabs_max
[
k
]
and
fimabs_max
[
k
]
>
fimabs_max
[
k
+
1
]:
count
+=
1
local_max
.
append
(
k
)
...
...
pycroscopy/processing/image_transformation.py
View file @
84444cc3
...
...
@@ -6,7 +6,6 @@ Created on Tue Oct 6 15:34:12 2015
"""
from
__future__
import
division
,
print_function
,
absolute_import
import
math
from
skimage.feature
import
match_descriptors
,
register_translation
from
skimage.measure
import
ransac
from
skimage.transform
import
warp
,
SimilarityTransform
...
...
@@ -14,6 +13,7 @@ import warnings
import
h5py
import
numpy
as
np
import
skimage.feature
import
multiprocessing
as
mp
class
ImageTransformation
(
object
):
...
...
@@ -160,7 +160,7 @@ class FeatureExtractorParallel(object):
# start pool of workers
print
(
'launching %i kernels...'
%
(
processes
))
pool
=
m
ultiProcess
.
Pool
(
processes
)
pool
=
m
p
.
Pool
(
processes
)
tasks
=
[(
imp
)
for
imp
in
self
.
data
]
chunk
=
int
(
self
.
data
.
shape
[
0
]
/
processes
)
jobs
=
pool
.
imap
(
detect
,
tasks
,
chunksize
=
chunk
)
...
...
@@ -351,9 +351,9 @@ def _center_and_normalize_points(points):
centroid
=
np
.
mean
(
points
,
axis
=
0
)
rms
=
math
.
sqrt
(
np
.
sum
((
points
-
centroid
)
**
2
)
/
points
.
shape
[
0
])
rms
=
np
.
sqrt
(
np
.
sum
((
points
-
centroid
)
**
2
)
/
points
.
shape
[
0
])
norm_factor
=
math
.
sqrt
(
2
)
/
rms
norm_factor
=
np
.
sqrt
(
2
)
/
rms
matrix
=
np
.
array
([[
norm_factor
,
0
,
-
norm_factor
*
centroid
[
0
]],
[
0
,
norm_factor
,
-
norm_factor
*
centroid
[
1
]],
...
...
@@ -546,8 +546,8 @@ class RigidTransform(object):
rotation
=
0
self
.
params
=
np
.
array
([
[
math
.
cos
(
rotation
),
-
math
.
sin
(
rotation
),
0
],
[
math
.
sin
(
rotation
),
math
.
cos
(
rotation
),
0
],
[
np
.
cos
(
rotation
),
-
np
.
sin
(
rotation
),
0
],
[
np
.
sin
(
rotation
),
np
.
cos
(
rotation
),
0
],
[
0
,
0
,
1
]
])
...
...
@@ -708,7 +708,7 @@ class RigidTransform(object):
@
property
def
rotation
(
self
):
return
math
.
atan2
(
self
.
params
[
1
,
0
],
self
.
params
[
1
,
1
])
return
np
.
atan2
(
self
.
params
[
1
,
0
],
self
.
params
[
1
,
1
])
@
property
def
translation
(
self
):
...
...
@@ -803,7 +803,7 @@ class geoTransformerParallel(object):
return
matches
# start pool of workers
pool
=
m
ultiprocess
.
Pool
(
processes
)
pool
=
m
p
.
Pool
(
processes
)
print
(
'launching %i kernels...'
%
(
processes
))
tasks
=
[
(
desc1
,
desc2
)
for
desc1
,
desc2
in
zip
(
desc
[:],
desc
[
1
:])
]
...
...
@@ -1125,6 +1125,7 @@ class geoTransformerSerial(object):
desc
=
self
.
features
[
-
1
]
keypts
=
self
.
features
[
0
]
maxDis
=
kwargs
.
get
(
'maximum_distance'
,
np
.
infty
)
processes
=
kwargs
.
get
(
'processes'
,
2
)
def
match
(
desc
):
...
...
pycroscopy/processing/svd_utils.py
View file @
84444cc3
...
...
@@ -139,7 +139,7 @@ def doSVD(h5_main, num_comps=None):
# copy attributes
copy_main_attributes
(
h5_main
,
h5_V
)
h5_V
.
attrs
[
'units'
]
=
[
'a. u.'
]
h5_V
.
attrs
[
'units'
]
=
np
.
array
([
'a. u.'
],
dtype
=
'S'
)
del
ds_S
,
ds_V
,
ds_U
,
svd_grp
...
...
@@ -165,11 +165,11 @@ def doSVD(h5_main, num_comps=None):
Check h5_main for plot group references.
Copy them into V if they exist
'''
for
key
,
ref
in
h5_main
.
attrs
.
iteritem
s
():
for
key
in
h5_main
.
attrs
.
key
s
():
if
'_Plot_Group'
not
in
key
:
continue
ref_inds
=
getH5RegRefIndices
(
ref
,
h5_main
,
return_method
=
'corners'
)
ref_inds
=
getH5RegRefIndices
(
h5_main
.
attrs
[
key
]
,
h5_main
,
return_method
=
'corners'
)
ref_inds
=
ref_inds
.
reshape
([
-
1
,
2
,
2
])
ref_inds
[:,
1
,
0
]
=
h5_V
.
shape
[
0
]
-
1
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment