Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
Vasudevan, Rama K
pycroscopy
Commits
6476b094
Commit
6476b094
authored
Feb 08, 2019
by
ssomnath
Browse files
Cleaned up import statements and PEP8 cleaning
parent
a6d61eb4
Changes
1
Show whitespace changes
Inline
Side-by-side
pycroscopy/io/translators/AR_hdf5.py
View file @
6476b094
...
...
@@ -8,22 +8,15 @@ import numpy as np
import
h5py
import
os
import
sys
import
re
#
used to get note values
import
re
#
used to get note values
from
pyUSID.io.translator
import
Translator
from
pyUSID.io.write_utils
import
make_indices_matrix
def
install
(
package
):
subprocess
.
call
([
sys
.
executable
,
"-m"
,
"pip"
,
"install"
,
package
])
# Package for downloading online files:
# Finally import pyUSID.
try
:
import
pyUSID
as
usid
except
ImportError
:
warn
(
'pyUSID not found. Will install with pip.'
)
import
pip
install
(
'pyUSID'
)
import
pyUSID
as
usid
from
pyUSID.io.write_utils
import
Dimension
,
make_indices_matrix
from
pyUSID.io.hdf_utils
import
create_indexed_group
,
write_main_dataset
,
write_simple_attrs
,
print_tree
,
get_attributes
if
sys
.
version_info
.
major
==
3
:
unicode
=
str
class
ARhdf5
(
Translator
):
'''
...
...
@@ -56,8 +49,7 @@ class ARhdf5(Translator):
self
.
channels_name
=
[]
self
.
points_per_sec
=
None
def
translate
(
self
,
data_filepath
,
out_filename
,
verbose
=
False
,
debug
=
False
):
def
translate
(
self
,
data_filepath
,
out_filename
,
verbose
=
False
,
debug
=
False
):
'''
The main function that translates the provided file into a .h5 file
...
...
@@ -81,7 +73,7 @@ class ARhdf5(Translator):
self
.
debug
=
debug
#Open the datafile
#
Open the datafile
try
:
data_filepath
=
os
.
path
.
abspath
(
data_filepath
)
ARh5_file
=
h5py
.
File
(
data_filepath
,
'r'
)
...
...
@@ -89,7 +81,7 @@ class ARhdf5(Translator):
print
(
'Unable to open the file'
,
data_filepath
)
raise
#Get info from the origin file like Notes and Segments
#
Get info from the origin file like Notes and Segments
self
.
notes
=
ARh5_file
.
attrs
[
'Note'
]
self
.
segments
=
ARh5_file
[
'ForceMap'
][
'Segments'
]
#shape: (X, Y, 4)
self
.
segments_name
=
list
(
ARh5_file
[
'ForceMap'
].
attrs
[
'Segments'
])
...
...
@@ -109,9 +101,9 @@ class ARhdf5(Translator):
extension_idx
=
self
.
segments_name
.
index
(
'Ext'
)
short_ext
=
np
.
amin
(
np
.
array
(
self
.
segments
[:,
:,
extension_idx
]))
longest_ext
=
np
.
amax
(
np
.
array
(
self
.
segments
[:,
:,
extension_idx
]))
difference
=
longest_ext
-
short_ext
#
this is a difference between integers
tot_length
=
(
np
.
amax
(
self
.
segments
)
-
difference
)
+
1
# +1 otherwise \
#
array(tot_length) will be of 1 position shorter
difference
=
longest_ext
-
short_ext
#
this is a difference between integers
tot_length
=
(
np
.
amax
(
self
.
segments
)
-
difference
)
+
1
# +1 otherwise
array(tot_length) will be of 1 position shorter
points_trimmed
=
np
.
array
(
self
.
segments
[:,
:,
extension_idx
])
-
short_ext
if
self
.
debug
:
print
(
'Data were trimmed in the extension segment of {} points'
.
format
(
difference
))
...
...
@@ -122,7 +114,7 @@ class ARhdf5(Translator):
h5_file
=
h5py
.
File
(
h5_path
,
'w'
)
# Create the measurement group
h5_meas_group
=
usid
.
hdf_utils
.
create_indexed_group
(
h5_file
,
'Measurement'
)
h5_meas_group
=
create_indexed_group
(
h5_file
,
'Measurement'
)
# Create all channels and main datasets
# at this point the main dataset are just function of time
...
...
@@ -131,15 +123,14 @@ class ARhdf5(Translator):
y_dim
=
np
.
linspace
(
0
,
np
.
float
(
self
.
note_value
(
'FastScanSize'
)),
self
.
map_size
[
'Y'
])
z_dim
=
np
.
arange
(
tot_length
)
/
np
.
float
(
self
.
points_per_sec
)
pos_dims
=
[
usid
.
write_utils
.
Dimension
(
'Cols'
,
'm'
,
x_dim
),
usid
.
write_utils
.
Dimension
(
'Rows'
,
'm'
,
y_dim
)]
spec_dims
=
[
usid
.
write_utils
.
Dimension
(
'Time'
,
's'
,
z_dim
)]
pos_dims
=
[
Dimension
(
'Cols'
,
'm'
,
x_dim
),
Dimension
(
'Rows'
,
'm'
,
y_dim
)]
spec_dims
=
[
Dimension
(
'Time'
,
's'
,
z_dim
)]
# This is quite time consuming, but on magnetic drive
# is limited from the disk, and therefore is not useful
# This is quite time consuming, but on magnetic drive is limited from the disk, and therefore is not useful
# to parallelize these loops
for
index
,
channel
in
enumerate
(
self
.
channels_name
):
cur_chan
=
usid
.
hdf_utils
.
create_indexed_group
(
h5_meas_group
,
'Channel'
)
cur_chan
=
create_indexed_group
(
h5_meas_group
,
'Channel'
)
main_dset
=
np
.
empty
((
self
.
map_size
[
'X'
],
self
.
map_size
[
'Y'
],
tot_length
))
for
column
in
np
.
arange
(
self
.
map_size
[
'X'
]):
for
row
in
np
.
arange
(
self
.
map_size
[
'Y'
]):
...
...
@@ -152,7 +143,7 @@ class ARhdf5(Translator):
if
index
==
0
:
first_main_dset
=
cur_chan
quant_unit
=
self
.
get_def_unit
(
channel
)
h5_raw
=
usid
.
hdf_utils
.
write_main_dataset
(
cur_chan
,
# parent HDF5 group
h5_raw
=
write_main_dataset
(
cur_chan
,
# parent HDF5 group
main_dset
,
# 2D array of raw data
'Raw_'
+
channel
,
# Name of main dset
channel
,
# Physical quantity
...
...
@@ -161,7 +152,7 @@ class ARhdf5(Translator):
spec_dims
,
#spectroscopy dimensions
)
else
:
h5_raw
=
usid
.
hdf_utils
.
write_main_dataset
(
cur_chan
,
# parent HDF5 group
h5_raw
=
write_main_dataset
(
cur_chan
,
# parent HDF5 group
main_dset
,
# 2D array of raw data
'Raw_'
+
channel
,
# Name of main dset
channel
,
# Physical quantity
...
...
@@ -178,13 +169,13 @@ class ARhdf5(Translator):
# Make Channels with IMAGES.
# Position indices/values are the same of all other channels
# Spectroscopic indices/valus are they are just one single dimension
img_spec_dims
=
[
usid
.
write_utils
.
Dimension
(
'arb'
,
'a.u.'
,
[
1
])]
img_spec_dims
=
[
Dimension
(
'arb'
,
'a.u.'
,
[
1
])]
for
index
,
image
in
enumerate
(
ARh5_file
[
'Image'
].
keys
()):
main_dset
=
np
.
reshape
(
np
.
array
(
ARh5_file
[
'Image'
][
image
]),
(
-
1
,
1
),
order
=
'F'
)
cur_chan
=
usid
.
hdf_utils
.
create_indexed_group
(
h5_meas_group
,
'Channel'
)
cur_chan
=
create_indexed_group
(
h5_meas_group
,
'Channel'
)
if
index
==
0
:
first_image_dset
=
cur_chan
h5_raw
=
usid
.
hdf_utils
.
write_main_dataset
(
cur_chan
,
# parent HDF5 group
h5_raw
=
write_main_dataset
(
cur_chan
,
# parent HDF5 group
main_dset
,
# 2D array of image (shape: P*Q x 1)
'Img_'
+
image
,
# Name of main dset
image
,
# Physical quantity
...
...
@@ -196,7 +187,7 @@ class ARhdf5(Translator):
h5_pos_vals
=
first_main_dset
[
'Position_Values'
],
)
else
:
h5_raw
=
usid
.
hdf_utils
.
write_main_dataset
(
cur_chan
,
# parent HDF5 group
h5_raw
=
write_main_dataset
(
cur_chan
,
# parent HDF5 group
main_dset
,
# 2D array of image (shape: P*Q x 1)
'Img_'
+
image
,
# Name of main dset
image
,
# Physical quantity
...
...
@@ -214,24 +205,24 @@ class ARhdf5(Translator):
new_segments
=
{}
for
seg
,
name
in
enumerate
(
self
.
segments_name
):
new_segments
.
update
({
name
:
self
.
segments
[
0
,
0
,
seg
]
-
short_ext
})
usid
.
hdf_utils
.
write_simple_attrs
(
h5_meas_group
,
{
'Segments'
:
new_segments
,
write_simple_attrs
(
h5_meas_group
,
{
'Segments'
:
new_segments
,
'Points_trimmed'
:
points_trimmed
,
'Notes'
:
self
.
notes
})
usid
.
hdf_utils
.
write_simple_attrs
(
h5_file
,
write_simple_attrs
(
h5_file
,
{
'translator'
:
'ARhdf5'
,
'instrument'
:
'Asylum Research '
+
self
.
note_value
(
'MicroscopeModel'
),
'AR sftware version'
:
self
.
note_value
(
'Version'
)})
if
self
.
debug
:
print
(
usid
.
hdf_utils
.
print_tree
(
h5_file
))
print
(
print_tree
(
h5_file
))
print
(
'
\n
'
)
for
key
,
val
in
usid
.
hdf_utils
.
get_attributes
(
h5_meas_group
).
items
():
for
key
,
val
in
get_attributes
(
h5_meas_group
).
items
():
if
key
!=
'Notes'
:
print
(
'{} : {}'
.
format
(
key
,
val
))
else
:
print
(
'{} : {}'
.
format
(
key
,
'notes string too long to be written here.'
))
#Clean up
#
Clean up
ARh5_file
.
close
()
h5_file
.
close
()
self
.
translated
=
True
...
...
@@ -254,7 +245,7 @@ class ARhdf5(Translator):
try
:
match
=
re
.
search
(
r
"^"
+
name
+
":\s+(.+$)"
,
self
.
notes
,
re
.
M
)
if
not
match
:
raise
raise
Exception
except
:
match
=
re
.
search
(
r
"^"
+
name
+
":+(.+$)"
,
self
.
notes
,
re
.
M
)
if
(
match
):
...
...
@@ -284,7 +275,7 @@ class ARhdf5(Translator):
"""
# Check if chan_name is string
if
not
isinstance
(
chan_name
,
basestring
):
if
not
isinstance
(
chan_name
,
(
str
,
unicode
)
):
raise
TypeError
(
'The channel name must be of type string'
)
# Find the default unit
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment