Commit 26ed9bcb authored by Somnath, Suhas's avatar Somnath, Suhas Committed by unknown
Browse files

python3 compatibility fixes

parent 9b80cbfe
......@@ -605,7 +605,7 @@ def generate_guess(vdc, pr_vec, show_plots=False):
"""Find the coordinates of the points where the vertical line through the
centroid intersects with the convex hull"""
y_intersections = []
for pair in xrange(outline_1.shape[0]):
for pair in range(outline_1.shape[0]):
x_pt = find_intersection(outline_1[pair], outline_2[pair],
[geom_centroid[0], hull.min_bound[1]],
[geom_centroid[0], hull.max_bound[1]])
......@@ -617,7 +617,7 @@ def generate_guess(vdc, pr_vec, show_plots=False):
centroid intersects with the convex hull
'''
x_intersections = []
for pair in xrange(outline_1.shape[0]):
for pair in range(outline_1.shape[0]):
x_pt = find_intersection(outline_1[pair], outline_2[pair],
[hull.min_bound[0], geom_centroid[1]],
[hull.max_bound[0], geom_centroid[1]])
......
......@@ -275,12 +275,12 @@ def generateTestSpectroscopicData(num_bins=7, num_steps=3, num_pos=4):
Data organized as [steps x bins, positions]
"""
full_data = np.zeros((num_steps * num_bins, num_pos))
for pos in xrange(num_pos):
for pos in range(num_pos):
bin_count=0
for step in xrange(num_steps):
for bind in xrange(num_bins):
full_data[bin_count,pos] = (pos+1)*100 + (step+1)*10 + (bind+1)
bin_count+=1
for step in range(num_steps):
for bind in range(num_bins):
full_data[bin_count, pos] = (pos+1)*100 + (step+1)*10 + (bind+1)
bin_count += 1
return full_data
......
......@@ -848,7 +848,8 @@ def copyAttributes(source, dest, skip_refs=True):
"""
Copy attributes from one h5object to another
"""
for attr, atval in source.attrs.iteritems():
for attr in source.attrs.keys():
atval = source.attrs[attr]
"""
Don't copy references unless asked
"""
......
......@@ -93,7 +93,8 @@ def unnest_parm_dicts(image_parms, prefix=''):
"""
new_parms = dict()
for name, val in image_parms.iteritems():
for name in image_parms.keys():
val = image_parms[name]
# print 'name',name,'val',val
name = '-'.join([prefix]+name.split()).strip('-')
if isinstance(val, dict):
......
from __future__ import division, print_function, absolute_import, unicode_literals
import struct
import array
import logging
import warnings
import re
try:
import StringIO
......@@ -291,7 +291,7 @@ def get_structdmtypes_for_python_typeorobject(typeorobj):
return None, get_dmtype_for_name('struct')
elif comparer(structarray):
return None, get_dmtype_for_name('array')
logging.warn("No appropriate DMType found for %s, %s", typeorobj, type(typeorobj))
warnings.warn("No appropriate DMType found for %s, %s", typeorobj, type(typeorobj))
return None
......@@ -441,7 +441,7 @@ def dm_read_array(f, outdata=None):
write_array(f, outdata)
return array_header
else:
logging.warn("Unsupported type for conversion to array:%s", outdata)
warnings.warn("Unsupported type for conversion to array:%s", outdata)
else:
# supports arrays of structs and arrays of types,
......
......@@ -135,8 +135,8 @@ class GDMTranslator(Translator):
# Now read the raw data files:
pos_ind = 0
for row_ind in xrange(1,num_rows+1):
for col_ind in xrange(1,num_cols+1):
for row_ind in range(1,num_rows+1):
for col_ind in range(1,num_cols+1):
file_path = path.join(folder_path,'fSweep_r'+str(row_ind)+'_c'+str(col_ind)+'.mat')
print('Working on row {} col {}'.format(row_ind,col_ind))
if path.exists(file_path):
......
......@@ -193,7 +193,7 @@ class ImageTranslator(Translator):
old_parms = h5_meas.attrs
old_parms.pop('machine_id', None)
old_parms.pop('timestame', None)
test = [meas_grp.attrs[key] == old_parms[key] for key in old_parms.iterkeys()]
test = [meas_grp.attrs[key] == old_parms[key] for key in old_parms.keys()]
if all(test):
return h5_raw
# the clear (actually the repack) does not work on the ubuntu VM / Windows.
......
......@@ -490,8 +490,8 @@ class NDataTranslator(Translator):
# Create new measurement group for each set of parameters
meas_grp = MicroDataGroup('Measurement_')
# Write the parameters as attributes of the group
for key, val in meas_parms.iteritems():
meas_grp.attrs[key] = val
for key in meas_parms.keys():
meas_grp.attrs[key] = meas_parms[key]
chan_grp = MicroDataGroup('Channel_000')
meas_grp.addChildren([chan_grp])
......
......@@ -124,8 +124,8 @@ class SporcTranslator(Translator):
# Now read the raw data files:
pos_ind = 0
for row_ind in xrange(1,num_rows+1):
for col_ind in xrange(1,num_cols+1):
for row_ind in range(1, num_rows+1):
for col_ind in range(1, num_cols+1):
file_path = path.join(folder_path,'result_r'+str(row_ind)+'_c'+str(col_ind)+'.mat')
#print('Working on row {} col {}'.format(row_ind,col_ind))
if path.exists(file_path):
......
......@@ -247,8 +247,8 @@ class Cluster(object):
Get the parameters of the estimator used and write them
as attributes of the group
'''
for parm, val in self.estimator.get_params().iteritems():
cluster_grp.attrs[parm] = val
for parm in self.estimator.get_params().keys():
cluster_grp.attrs[parm] = self.estimator.get_params()[parm]
hdf = ioHDF5(self.h5_main.file)
h5_clust_refs = hdf.writeData(cluster_grp)
......
......@@ -156,8 +156,8 @@ class Decomposition(object):
Get the parameters of the estimator used and write them
as attributes of the group
'''
for parm, val in self.estimator.get_params().iteritems():
decomp_grp.attrs[parm] = val
for parm in self.estimator.get_params().keys():
decomp_grp.attrs[parm] = self.estimator.get_params()[parm]
hdf = ioHDF5(self.h5_main.file)
h5_decomp_refs = hdf.writeData(decomp_grp)
......
......@@ -1281,7 +1281,7 @@ class ImageWindow(object):
fimabs = np.abs(fim)
fimabs_max = np.zeros(r_n-1)
for k in xrange(r_n-1):
for k in range(r_n-1):
r1 = r_vec[k]
r2 = r_vec[k+1]
r_ind = np.where((r_mat >= r1) & (r_mat <= r2) == True)
......@@ -1294,7 +1294,7 @@ class ImageWindow(object):
'''
count = 0
local_max = []
for k in xrange(1, fimabs_max.size-1):
for k in range(1, fimabs_max.size-1):
if fimabs_max[k-1] < fimabs_max[k] and fimabs_max[k] > fimabs_max[k+1]:
count += 1
local_max.append(k)
......
......@@ -6,7 +6,6 @@ Created on Tue Oct 6 15:34:12 2015
"""
from __future__ import division, print_function, absolute_import
import math
from skimage.feature import match_descriptors, register_translation
from skimage.measure import ransac
from skimage.transform import warp, SimilarityTransform
......@@ -14,6 +13,7 @@ import warnings
import h5py
import numpy as np
import skimage.feature
import multiprocessing as mp
class ImageTransformation(object):
......@@ -160,7 +160,7 @@ class FeatureExtractorParallel(object):
# start pool of workers
print('launching %i kernels...' % (processes))
pool = multiProcess.Pool(processes)
pool = mp.Pool(processes)
tasks = [(imp) for imp in self.data]
chunk = int(self.data.shape[0] / processes)
jobs = pool.imap(detect, tasks, chunksize=chunk)
......@@ -351,9 +351,9 @@ def _center_and_normalize_points(points):
centroid = np.mean(points, axis=0)
rms = math.sqrt(np.sum((points - centroid) ** 2) / points.shape[0])
rms = np.sqrt(np.sum((points - centroid) ** 2) / points.shape[0])
norm_factor = math.sqrt(2) / rms
norm_factor = np.sqrt(2) / rms
matrix = np.array([[norm_factor, 0, -norm_factor * centroid[0]],
[0, norm_factor, -norm_factor * centroid[1]],
......@@ -546,8 +546,8 @@ class RigidTransform(object):
rotation = 0
self.params = np.array([
[math.cos(rotation), - math.sin(rotation), 0],
[math.sin(rotation), math.cos(rotation), 0],
[np.cos(rotation), - np.sin(rotation), 0],
[np.sin(rotation), np.cos(rotation), 0],
[ 0, 0, 1]
])
......@@ -708,7 +708,7 @@ class RigidTransform(object):
@property
def rotation(self):
return math.atan2(self.params[1, 0], self.params[1, 1])
return np.atan2(self.params[1, 0], self.params[1, 1])
@property
def translation(self):
......@@ -803,7 +803,7 @@ class geoTransformerParallel(object):
return matches
# start pool of workers
pool = multiprocess.Pool(processes)
pool = mp.Pool(processes)
print('launching %i kernels...'%(processes))
tasks = [ (desc1, desc2) for desc1, desc2 in zip(desc[:],desc[1:]) ]
......@@ -1125,6 +1125,7 @@ class geoTransformerSerial(object):
desc = self.features[-1]
keypts = self.features[0]
maxDis = kwargs.get('maximum_distance', np.infty)
processes = kwargs.get('processes', 2)
def match(desc):
......
......@@ -139,7 +139,7 @@ def doSVD(h5_main, num_comps=None):
# copy attributes
copy_main_attributes(h5_main, h5_V)
h5_V.attrs['units'] = ['a. u.']
h5_V.attrs['units'] = np.array(['a. u.'], dtype='S')
del ds_S, ds_V, ds_U, svd_grp
......@@ -165,11 +165,11 @@ def doSVD(h5_main, num_comps=None):
Check h5_main for plot group references.
Copy them into V if they exist
'''
for key, ref in h5_main.attrs.iteritems():
for key in h5_main.attrs.keys():
if '_Plot_Group' not in key:
continue
ref_inds = getH5RegRefIndices(ref, h5_main, return_method='corners')
ref_inds = getH5RegRefIndices(h5_main.attrs[key], h5_main, return_method='corners')
ref_inds = ref_inds.reshape([-1, 2, 2])
ref_inds[:, 1, 0] = h5_V.shape[0] - 1
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment