more functions: added 2d mask and filtering of 1d diffs...

This commit is contained in:
Marco Cammarata 2017-01-10 00:28:29 +01:00
parent 434fd6a3b1
commit 852f883942
7 changed files with 373 additions and 102 deletions

8
xray/__init__.py Normal file
View File

@ -0,0 +1,8 @@
from . import storage
from . import utils
from . import mask
try:
from . import azav
except ImportError:
print("Can't import azav ...")
from . import dataReduction

View File

@ -11,6 +11,7 @@ import glob
import pathlib
from . import storage
from . import utils
import fabio
import pyFAI
try:
@ -18,15 +19,27 @@ try:
except ImportError:
log.warn("Can't import matplotlib !")
def pyFAIread(fname):
def _read(fname):
""" read data from file using fabio """
import fabio
f = fabio.open(fname)
data = f.data
del f
del f; # close file
return data
def pyFAI_dict(ai):
def read(fnames):
""" read data from file(s) using fabio """
if isinstance(fnames,str):
data = _read(fnames)
else:
# read one image to know img size
temp = _read(fnames[0])
shape = [len(fnames),]+list(temp.shape)
data = np.empty(shape)
data[0] = temp
for i in range(1,len(fnames)): data[i] = _read(fnames[i])
return data
def ai_as_dict(ai):
""" ai is a pyFAI azimuthal intagrator"""
methods = dir(ai)
methods = [m for m in methods if m.find("get_") == 0]
@ -36,11 +49,11 @@ def pyFAI_dict(ai):
ret["detector"] = ai.detector.get_name()
return ret
def pyFAI_info(ai):
def ai_as_str(ai):
""" ai is a pyFAI azimuthal intagrator"""
return "#" + str(ai).replace("\n","\n#")
def pyFAI1d(ai, imgs, mask = None, npt_radial = 600, method = 'csr',safe=True,dark=10., polCorr = 1):
def do1d(ai, imgs, mask = None, npt_radial = 600, method = 'csr',safe=True,dark=10., polCorr = 1):
""" ai is a pyFAI azimuthal intagrator
it can be defined with pyFAI.load(ponifile)
mask: True are points to be masked out """
@ -57,7 +70,7 @@ def pyFAI1d(ai, imgs, mask = None, npt_radial = 600, method = 'csr',safe=True,da
out_s[_i] = sig
return q,np.squeeze(out_i),np.squeeze(out_s)
def pyFAI2d(ai, imgs, mask = None, npt_radial = 600, npt_azim=360,method = 'csr',safe=True,dark=10., polCorr = 1):
def do2d(ai, imgs, mask = None, npt_radial = 600, npt_azim=360,method = 'csr',safe=True,dark=10., polCorr = 1):
""" ai is a pyFAI azimuthal intagrator
it can be defined with pyFAI.load(ponifile)
mask: True are points to be masked out """
@ -72,32 +85,46 @@ def pyFAI2d(ai, imgs, mask = None, npt_radial = 600, npt_azim=360,method = 'csr'
out[_i] = i2d
return q,azTheta,np.squeeze(out)
def _getAI(poni,folder):
def getAI(poni,folder=None):
""" get AzimuthalIntegrator instance:
if poni is a dictionary use it to define one
if poni is a string look, it is used as filename to read.
in this case if folder is given it is used (together with all its
subfolder) as search path (along with ./ and home folder)
"""
if isinstance(poni,pyFAI.azimuthalIntegrator.AzimuthalIntegrator):
ai = poni
elif isinstance(poni,dict):
ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator(**poni)
else:
if poni == 'auto':
temp = os.path.abspath(folder)
path = pathlib.Path(temp)
folders = [ str(path), ]
for p in path.parents: folders.append(str(p))
elif isinstance(poni,str):
# look is file exists in cwd
if os.path.exists(poni):
ai = pyFAI.load(poni)
# if file does not exist look for one with that name around
else:
# build search paths
folders = []
if folder is not None:
temp = os.path.abspath(folder)
path = pathlib.Path(temp)
folders = [ str(path), ]
for p in path.parents: folders.append(str(p))
folders.append( "./" )
folders.append( os.path.expanduser("~/") )
# look for file
for path in folders:
poni = path + "/" + "pyfai.poni"
if os.path.exists(poni):
log.info("Found pyfai.poni in %s",path)
fname = path + "/" + poni
if os.path.exists(fname):
log.info("Found poni file %s",fname)
break
else:
log.debug("Could not find pyfai.poni in %s",path)
ai = pyFAI.load(poni)
log.debug("Could not poni file %s",fname)
ai = pyFAI.load(fname)
return ai
def doFolder(folder,files='*.edf*',nQ = 1500,force=False,mask=None,
saveChi=True,poni='auto',storageFile='auto',diagnostic=None):
saveChi=True,poni='pyfai.poni',storageFile='auto',diagnostic=None):
""" calc 1D curves from files in folder, returning a dictionary of stuff
nQ : number of Q-points (equispaced)
force : if True, redo from beginning even if previous data are found
@ -107,17 +134,16 @@ def doFolder(folder,files='*.edf*',nQ = 1500,force=False,mask=None,
saveChi: self-explanatory
poni : could be:
an AzimuthalIntegrator instance
a filename
a dictionary (use to bootstrap an AzimuthalIntegrator using
AzimuthalIntegrator(**poni)
the string 'auto' that will look for the file 'pyfai.poni' in:
a filename that will be look for in
1 'folder' first
2 in ../folder
3 in ../../folder
....
n-1 in pwd
n in homefolder
"""
a dictionary (use to bootstrap an AzimuthalIntegrator using
AzimuthalIntegrator(**poni)
"""
if storageFile == 'auto': storageFile = folder + "/" + "pyfai_1d.h5"
if os.path.exists(storageFile) and not force:
@ -126,7 +152,7 @@ def doFolder(folder,files='*.edf*',nQ = 1500,force=False,mask=None,
saved = None
# which poni file to use:
ai = _getAI(poni,folder)
ai = getAI(poni,folder)
files = utils.getFiles(folder,files)
if saved is not None:
@ -137,18 +163,18 @@ def doFolder(folder,files='*.edf*',nQ = 1500,force=False,mask=None,
if isinstance(mask,np.ndarray):
mask = mask.astype(bool)
elif mask is not None:
mask = pyFAIread(mask).astype(bool)
mask = read(mask).astype(bool)
data = np.empty( (len(files),nQ) )
err = np.empty( (len(files),nQ) )
for ifname,fname in enumerate(files):
img = pyFAIread(fname)
q,i,e = pyFAI1d(ai,img,mask=mask,npt_radial=nQ)
img = read(fname)
q,i,e = do1d(ai,img,mask=mask,npt_radial=nQ)
data[ifname] = i
err[ifname] = e
if saveChi:
chi_fname = utils.removeExt(fname) + ".chi"
utils.saveTxt(chi_fname,q,i,e,info=pyFAI_info(ai),overwrite=True)
utils.saveTxt(chi_fname,q,i,e,info=ai_as_str(ai),overwrite=True)
files = [ utils.getBasename(f) for f in files ]
files = np.asarray(files)
@ -157,7 +183,7 @@ def doFolder(folder,files='*.edf*',nQ = 1500,force=False,mask=None,
data = np.concatenate( (saved["data"] ,data ) )
err = np.concatenate( (saved["err"] ,err ) )
ret = dict(q=q,folder=folder,files=files,data=data,err=err,
pyfai=pyFAI_dict(ai),pyfai_info=pyFAI_info(ai),mask=mask)
pyfai=ai_as_dict(ai),pyfai_info=ai_as_str(ai),mask=mask)
# add info from diagnostic if provided
if diagnostic is not None:
@ -191,7 +217,7 @@ def leastsq_circle(x,y):
residu = np.sum((Ri - R)**2)
return xc, yc, R
def pyFAI_find_center(img,psize=100e-6,dist=0.1,wavelength=0.8e-10,**kwargs):
def find_center(img,psize=100e-6,dist=0.1,wavelength=0.8e-10,**kwargs):
plt.ion()
kw = dict( pixel1 = psize, pixel2 = psize, dist = dist,wavelength=wavelength )
kw.update(kwargs)
@ -199,8 +225,9 @@ def pyFAI_find_center(img,psize=100e-6,dist=0.1,wavelength=0.8e-10,**kwargs):
fig_img,ax_img = plt.subplots(1,1)
fig_pyfai,ax_pyfai = plt.subplots(1,1)
fig_pyfai = plt.figure(2)
ax_img.imshow(img)
temp= ax_img.imshow(img)
plt.sca(ax_img); # set figure to use for mouse interaction
temp.set_clim( *np.percentile(img,(2,95) ) )
ans = ""
print("Enter 'end' when done")
while ans != "end":
@ -213,12 +240,13 @@ def pyFAI_find_center(img,psize=100e-6,dist=0.1,wavelength=0.8e-10,**kwargs):
print("Selected center:",xc,yc)
ai.set_poni1(xc*psize)
ai.set_poni2(yc*psize)
q,az,i = pyFAI2d(ai,img)
ax_pyfai.pcolormesh(q,az,i)
q,az,i = do2d(ai,img)
mesh = ax_pyfai.pcolormesh(q,az,i)
mesh.set_clim( *np.percentile(i,(2,95) ) )
ax_pyfai.set_title(str( (xc,yc) ))
plt.pause(0.01)
plt.draw()
ans=input("Enter to continue with clinking or enter xc,yc values")
ans=input("Enter to continue with clinking or enter xc,yc values ")
print("Final values: (in pixels) %.3f %.3f"%(xc,yc))
return ai

View File

@ -13,7 +13,8 @@ def subtractReferences(i,idx_ref, useRatio = False):
""" given data in i (first index is shot num) and the indeces of the
references (idx_ref, array of integers) it interpolates the closest
reference data for each shot and subtracts it (or divides it, depending
on useRatio = [True|False]; """
on useRatio = [True|False];
Note: it works in place (i.e. it modifies i) """
iref=np.empty_like(i)
idx_ref = np.squeeze(idx_ref)
idx_ref = np.atleast_1d(idx_ref)
@ -47,7 +48,7 @@ def subtractReferences(i,idx_ref, useRatio = False):
return i
def averageScanPoints(scan,data,isRef=None,lpower=None,useRatio=False,\
funcForEveraging=np.nanmean):
funcForAveraging=np.nanmean):
""" given scanpoints in 'scan' and corresponding data in 'data'
average all data corresponding the exactly the same scanpoint.
If the values in scan are coming from a readback, rounding might be
@ -58,51 +59,55 @@ def averageScanPoints(scan,data,isRef=None,lpower=None,useRatio=False,\
subtracted/divided by the interpolated reference
if lpower is provided the data is divided by it (how it is done depends
if one uses the ratio or not
funcForEveraging: is usually np.nanmean or np.nanmedian. it can be any
funcForAveraging: is usually np.nanmean or np.nanmedian. it can be any
function that support axis=0 as keyword argument
"""
data = data.astype(np.float)
avData = np.nanmedian( data , axis = 0 )
if isRef is None: isRef = np.zeros( data.shape[0], dtype=bool )
assert data.shape[0] == isRef.shape[0]
# subtract reference only is there is at least one
if isRef.sum()>0:
data = subtractReferences(data,np.argwhere(isRef), useRatio=useRatio)
# create a copy (subtractReferences works in place)
diff = subtractReferences(data.copy(),np.argwhere(isRef), useRatio=useRatio)
else:
data = data.copy(); # create local copy
diff = data
# normalize signal for laser intensity if provided
if lpower is not None:
lpower = utils.reshapeToBroadcast(lpower,data)
if useRatio is False:
data /= lpower
diff /= lpower
else:
data = (data-1)/lpower+1
diff = (data-1)/lpower+1
scan_pos = np.unique(scan)
shape_out = [len(scan_pos),] + list(data.shape[1:])
ret = np.empty(shape_out)
err = np.empty(shape_out)
dataInScanPoint = []
shape_out = [len(scan_pos),] + list(diff.shape[1:])
ret = np.empty(shape_out)
err = np.empty(shape_out)
data_abs = np.empty(shape_out)
diffsInScanPoint = []
chi2_0 = []
for i,t in enumerate(scan_pos):
shot_idx = (scan == t)
# select data for the scan point
data_for_scan = data[shot_idx]
dataInScanPoint.append( data_for_scan )
diff_for_scan = diff[shot_idx]
diffsInScanPoint.append( diff_for_scan )
# calculate average
ret[i] = funcForEveraging(data_for_scan,axis=0)
ret[i] = funcForAveraging(diff_for_scan,axis=0)
data_abs[i] = funcForAveraging(data[shot_idx],axis=0)
# calculate std
noise = np.nanstd(data[shot_idx], axis = 0)
noise = np.nanstd(diff[shot_idx], axis = 0)
# calculate chi2 of different repetitions
chi2 = np.power( (data_for_scan - ret[i])/noise,2)
chi2 = np.power( (diff_for_scan - ret[i])/noise,2)
# sum over all axis but first
for _ in range(data_for_scan.ndim-1):
for _ in range(diff_for_scan.ndim-1):
chi2 = np.nansum( chi2, axis=-1 )
# store chi2_0
@ -111,8 +116,9 @@ def averageScanPoints(scan,data,isRef=None,lpower=None,useRatio=False,\
# store error of mean
err[i] = noise/np.sqrt(shot_idx.sum())
ret = dict(scan=scan_pos,data=ret,err=err,chi2_0=chi2_0,
dataInScanPoint=dataInScanPoint,avData=avData)
ret = dict(scan=scan_pos,data=ret,dataUnmasked=ret.copy(),err=err,errUnmasked=err.copy(),
chi2_0=chi2_0,diffsInScanPoint=diffsInScanPoint,dataAbsAvAll=avData,dataAbsAvScanPoint=data_abs,
dataAbs=data.copy())
ret = storage.DataStorage(ret)
return ret
@ -148,51 +154,107 @@ def calcTimeResolvedSignal(scan,data,reference="min",monitor=None,q=None,
if q is not None: ret["q"] = q
return ret
def errorFilter(data,threshold=4):
""" Very simple but effective filter for zinger like noise
def errorMask(data,threshold=5):
""" Q-by-Q mask !
Very simple but effective mask for zinger like noise
The noise is in general lower when using nanmean instead than
nanmedian but nanmean does not filter out 'spikes'.
This filter mitigate this effect by using nanmedian for the q points
nanmedian but nanmean does not mask out 'spikes'.
This mask mitigate this effect by using nanmedian for the q points
that have an higher than usual error (proxy for spikes ...)
tested with mi1245/dec2016/tiox/tiox1/run3
"""
assert data.data.ndim == 2
for iscan in range(len(data.dataInScanPoint)):
median_err = np.nanmedian(data.err[iscan])
idx = data.err[iscan] > threshold*median_err
data.data[iscan][idx] = \
np.nanmedian( data.dataInScanPoint[iscan][:,idx],axis=0 )
data.err[iscan][idx] = median_err
idx_mask = []
for iscan in range(len(data.diffsInScanPoint)):
temp = data.diffsInScanPoint[iscan]
# sqrt(len(temp)) = sqrt(numOfDiffs); it is needed to estimate error of single Diff
idx = np.abs(temp-np.median(temp,axis=0)) > threshold*data.err[iscan]*np.sqrt(len(temp))
idx_mask.append( idx )
if "masks" not in data: data['masks'] = dict()
data['masks']['error'] = idx_mask
return data
def chi2Mask(data,threshold=2):
"""
The noise is in general lower when using nanmean instead than
nanmedian but nanmean does not mask out 'spikes'.
This mask mitigate this effect by using nanmedian for the q points
that have an higher than usual error (proxy for spikes ...)
tested with mi1245/dec2016/tiox/tiox1/run3
"""
idx_mask = []
for iscan in range(len(data.diffsInScanPoint)):
idx = data.chi2_0[iscan] > threshold
# expand along other axis (q ...)
idx = utils.reshapeToBroadcast(idx,data.diffsInScanPoint[iscan])
idx_mask.append(idx)
if "masks" not in data: data['masks'] = dict()
data['masks']['chi2'] = idx_mask
return data
def applyMasks(data,which='all',funcForAveraging=np.nanmean):
# don't do anything if no mask is defined
if 'masks' not in data: return data
if which == 'all': which = list(data['masks'].keys())
totmask = []
for iscan in range(len(data.diffsInScanPoint)):
mask = data['masks'][which[0]][iscan]
for w in which[1:]:
mask = np.logical_and(mask,data['masks'][w][iscan])
mask = np.squeeze(mask)
totmask.append(mask); # store for later
# check is a q-by-q mask
if mask.shape == data.diffsInScanPoint[iscan].shape:
temp = np.ma.MaskedArray(data=data.diffsInScanPoint[iscan],mask=mask)
data.data[iscan] = funcForAveraging( temp,axis=0 )
else:
data.data[iscan] = funcForAveraging( data.diffsInScanPoint[iscan][~mask],axis=0)
data['mask'] = totmask
return data
def saveTxt(folder,data,delayToStr=True,info="",**kw):
""" data must be a data_storage instance """
def saveTxt(folder,data,delayToStr=True,basename='auto',info="",**kw):
""" data must be a DataStorage instance """
# folder ends usually with sample/run so use the last two subfolders
if basename == 'auto':
basename = "_".join(folder.rstrip("/").split("/")[-2:]) + "_"
q = data.q if "q" in data else np.arange(data.data.shape[-1])
# save one file with all average diffs
fname = "%s/diff_av_matrix.txt" % (folder)
fname = "%s/%sdiff_av_matrix.txt" % (folder,basename)
utils.saveTxt(fname,q,data.data,headerv=data.scan,**kw)
# save error bars in the matrix form
fname = "%s/%sdiff_av_matrix_err.txt" % (folder,basename)
utils.saveTxt(fname,q,data.err,headerv=data.scan,**kw)
for iscan,scan in enumerate(data.scan):
scan = utils.timeToStr(scan) if delayToStr else "%+10.5e" % scan
# try retreiving info on chi2
try:
chi2_0 = data.chi2_0[iscan]
info_delay = [ "# rep_num : chi2_0 ", ]
info_delay = [ "# rep_num : chi2_0 , discarded by chi2masking ?", ]
for irep,value in enumerate(chi2_0):
info_delay.append( "# %d : %.3f" % (irep,value))
if 'chi2' in data.masks: info_delay[-1] += " %s"%str(data.masks['chi2'][iscan][irep])
info_delay = "\n".join(info_delay)
if info != '': info_delay = "%s\n%s" % (info,info_delay)
except AttributeError:
info_delay = info
# save one file per timedelay with average diff (and err)
fname = "%s/diff_av_%s.txt" % (folder,scan)
utils.saveTxt(fname,q,data.data[iscan],e=data.err[iscan],
info=info_delay,**kw)
fname = "%s/%sdiff_av_%s.txt" % (folder,basename,scan)
if 'mask' in data:
tosave = np.vstack( (data.data[iscan],data.err[iscan],
data.dataUnmasked[iscan],data.errUnmasked[iscan] ) )
columns = 'q diffmask errmask diffnomask errnomask'.split()
else:
tosave = np.vstack( (data.data[iscan],data.err[iscan] ) )
columns = 'q diff err'.split()
utils.saveTxt(fname,q,tosave,info=info_delay,columns=columns)
# save one file per timedelay with all diffs for given delay
fname = "%s/diffs_%s.txt" % (folder,scan)
utils.saveTxt(fname,q,data.dataInScanPoint[iscan],info=info_delay,**kw)
fname = "%s/%sdiffs_%s.txt" % (folder,basename,scan)
utils.saveTxt(fname,q,data.diffsInScanPoint[iscan],info=info_delay,**kw)
def read_diff_av(folder,plot2D=False,save=None):

View File

@ -8,28 +8,53 @@ id9 = xray.id9
# use npz files (they can handle more stuff (list of arrays,unicode) than h5py)
id9.default_extension = '.npz'
#id9.default_extension_extension = '.h5'
#id9.default_extension = '.h5'
def azav(folder,nQ=1500,force=False,saveChi=True,
poni='auto',storageFile='auto',mask=470):
def readCalc():
fold = "../tiox/calculated_patterns/"
names = "alpha beta lam".split()
fnames = "alpha500K beta290K lambda".split()
calc = dict()
for name,fname in zip(names,fnames):
q,i=np.loadtxt(fold + "%s.xye.q"%fname,unpack=True)
calc[name] = xray.storage.DataStorage( dict( q=q, i=i ) )
return xray.storage.DataStorage(calc)
calc = readCalc()
def azav(folder,nQ=1500,force=False,saveChi=True,mask=470):
if isinstance(mask,int):
files = xray.utils.getFiles(folder,"*.edf*")
img = xray.azav.pyFAIread(files[0])
img = xray.azav.read(files[0])
temp = np.ones_like(img,dtype=bool)
temp[:mask] = False
mask = temp
return id9.doFolder_azav(folder,nQ=nQ,force=force,mask=mask,saveChi=saveChi,
poni=poni,storageFile=storageFile)
return id9.doFolder_azav(folder,nQ=nQ,force=force,mask=mask,saveChi=saveChi)
def datared(folder,monitor=(1,5),showPlot=True,**kw):
data,diffs = id9.doFolder_dataRed(folder,monitor=monitor,**kw)
if showPlot: xray.utils.plotdiffs(diffs.q,diffs.data,t=diffs.scan)
if showPlot:
xray.utils.plotdiffs(diffs.q,diffs.data,t=diffs.scan,
absSignal=diffs.dataAbsAvAll,absSignalScale=30)
plt.plot(calc.lam.q,calc.lam.i/1000+0.1,label='calc')
plt.title(folder + " norm %s" % str(monitor))
return data,diffs
def doall(folder,force=False):
azav(folder,force=force)
return datared(folder)
def anaAmplitue(run=6):
fname = "../tiox/tiox1/run%d/diffs.npz" % run
data = xray.storage.DataStorage(fname)
ranges = ( (1.75,1.85), (2.2,2.4), (3.25,3.4) )
nPlot = len(ranges)
fig,ax = plt.subplots(nPlot,1,sharex=True)
for r,a in zip(ranges,ax):
idx = (data.q>r[0]) & (data.q<r[1])
amplitude = np.abs(data.data[:,idx]).mean(axis=1)
a.plot(data.scan,amplitude,'-o')
a.set_title("Range %s"%(str(r)))
def plotCalc(scale=1):
fold = "../tiox/calculated_patterns/"

View File

@ -36,7 +36,7 @@ def readDelayFromDiagnostic(fname):
def doFolder_azav(folder,nQ=1500,force=False,mask=None,saveChi=True,
poni='auto',storageFile='auto'):
poni='pyfai.poni',storageFile='auto'):
""" very small wrapper around azav.doFolder, essentially just reading
the diagnostics.log """
@ -46,21 +46,28 @@ def doFolder_azav(folder,nQ=1500,force=False,mask=None,saveChi=True,
saveChi=saveChi,poni=poni,storageFile=storageFile,diagnostic=diag)
def doFolder_dataRed(folder,storageFile='auto',monitor=None,
funcForEveraging=np.nanmean,errFilter=True):
funcForAveraging=np.nanmean,errMask=5,chi2Mask=2,qlims=None):
if storageFile == 'auto' : storageFile = folder + "/" + "pyfai_1d" + default_extension
# read azimuthal averaged curves
data = storage.DataStorage(storageFile)
if qlims is not None:
idx = (data.q>qlims[0]) & (data.q<qlims[1])
data.data = data.data[:,idx]
data.q = data.q[idx]
# calculate differences
diffs = dataReduction.calcTimeResolvedSignal(data.delays,data.data,q=data.q,\
reference="min",monitor=monitor,funcForEveraging=funcForEveraging)
reference="min",monitor=monitor,funcForAveraging=funcForAveraging)
# filter if asked so
if funcForEveraging == np.nanmean and errFilter:
diffs = dataReduction.errorFilter(diffs)
# mask if asked so
if errMask>0:
diffs = dataReduction.errorMask(diffs,threshold=errMask)
if chi2Mask>0:
diffs = dataReduction.chi2Mask(diffs,threshold=chi2Mask)
diffs = dataReduction.applyMasks(diffs)
# save txt and npz file
dataReduction.saveTxt(folder,diffs,info=data.pyfai_info)

138
xray/mask.py Normal file
View File

@ -0,0 +1,138 @@
from __future__ import print_function
import sys
if sys.version_info.major == 2: input=raw_input
import logging as log
log.basicConfig(level=log.INFO)
import os
import numpy as np
import matplotlib.pyplot as plt
class MyMask(object):
def __init__(self,img=None):
self.comp = []
self.img = img
self.mask = None
def addCircle(self,xcen,ycen,radius):
self.comp.append( ["add","circle", [xcen,ycen,radius] ] )
def subtractCircle(self,xcen,ycen,radius):
self.comp.append( ["subtract","circle", [xcen,ycen,radius] ] )
def addRectangle(self,x1,y1,x2,y2):
if x1>x2: x1,x2=x2,x1
if y1>y2: y1,y2=y2,y1
self.comp.append( ["add","rectangle", [x1,y1,x2,y2] ])
def subtractRectangle(self,x1,y1,x2,y2):
if x1>x2: x1,x2=x2,x1
if y1>y2: y1,y2=y2,y1
self.comp.append( ["subtract","rectangle", [x1,y1,x2,y2] ])
def getMask(self,shape=None):
if shape is None: shape = self.img.shape
m = []
X,Y = np.meshgrid ( range(shape[0]),range(shape[1]) )
for o in self.comp:
whattodo = o[0]
kind=o[1]
pars=o[2]
if kind == "circle":
(xc,yc,r) = pars
d = np.sqrt((X-xc)**2+(Y-yc)**2)
#plt.imshow(d<r)
#raw_input()
m.append( d<r )
if kind == "rectangle":
(x1,y1,x2,y2) = pars
temp = (X>x1) & (X<x2) & ( Y>y1) & (Y<y2)
m.append( temp )
mask = np.zeros(shape,dtype=np.bool)
for i in range(len(m)):
whattodo = self.comp[i][0]
if (whattodo == "add"):
mask[m[i]] = True
else:
mask[m[i]] = False
self.mask = mask
return mask
def getMatplotlibMask(self,shape=None):
mask = self.getMask(shape=shape)
# convert
mpl_mask = np.zeros( (mask.shape[0],mask.shape[1],4) )
mpl_mask[:,:,:3] = 0.5; # gray color
mpl_mask[:,:,3] = mask/2; # give some transparency
return mpl_mask
def save(self,fname,inverted=False):
import fabio
mask = self.mask
if (inverted): mask = ~mask
i=fabio.edfimage.edfimage(mask.astype(np.uint8)); # edf does not support bool
i.save(fname)
def test():
mask = MyMask()
mask.addCircle(400,300,250)
mask.subtractCircle(400,300,150)
mask.addRectangle(350,250,1500,700)
mask.show()
return mask
def snap(point,shape,snapRange=20):
snapped = list(point)
if snapped[0] < snapRange: snapped[0] = 0
if snapped[0] > shape[0]-snapRange: snapped[0] = shape[0]
if snapped[1] < snapRange: snapped[1] = 0
if snapped[1] > shape[1]-snapRange: snapped[1] = shape[1]
return snapped
def getPoint(shape,snapRange):
c = plt.ginput()[0]
c = snap(c,shape,snapRange=snapRange)
return c
def makeMaskGui(img,snapRange=60):
""" snapRange controls border snapping (in pixels, use <= 0 to disable """
mask = MyMask(img)
ans='ok'
while (ans != 'done'):
plt.imshow(img)
plt.clim(np.percentile(img,(2,98)))
plt.imshow(mask.getMatplotlibMask())
plt.pause(0.01)
ans = input("What's next c/r/done? ")
if ans == "c":
print("Adding circle, click on center")
c = getPoint(img.shape,snapRange)
print("Adding circle, click on another point to define radius")
p = getPoint(img.shape,snapRange)
r = np.sqrt( (p[0]-c[0])**2 + (p[1]-c[1])**2 )
mask.addCircle(c[0],c[1],r)
if ans == "r":
print("Adding rectangle, click on one corner")
c1 = getPoint(img.shape,snapRange)
print("Adding rectangle, click on opposite corner")
c2 = getPoint(img.shape,snapRange)
mask.addRectangle(c1[0],c1[1],c2[0],c2[1])
plt.imshow(mask.getMatplotlibMask())
plt.pause(0.01)
fname = input("Enter a valid filename (ext .edf or .npy) to save the mask")
try:
if fname != '':
ext = os.path.splitext(fname)[1]
if ext == '.edf':
mask.save(fname)
elif ext == '.npy':
np.save(fname,mask.getMask())
except Exception as e:
log.error("Error in saving mask")
log.error(e)
finally:
return mask
if __name__ == "__main__":
test()
plt.show()
ans=input("Enter to finish")

View File

@ -19,13 +19,14 @@ except ImportError:
_time_regex = re.compile( "(-?\d+\.?\d*(?:ps|ns|us|ms)?)")
_timeInStr_regex = re.compile("_(-?\d+\.?\d*(?:ps|ns|us|ms)?)")
def getFiles(folder,basename="*.edf*"):
def getFiles(folder,basename="*.edf*",nFiles=None):
files = glob.glob(folder + "/" + basename)
files.sort()
if nFiles is not None: files = files[:nFiles]
return files
def getEdfFiles(folder):
return getFiles(folder,basename="*.edf*")
def getEdfFiles(folder,**kw):
return getFiles(folder,basename="*.edf*",**kw)
def getDelayFromString(string) :
match = _timeInStr_regex_regex.search(string)
@ -71,9 +72,9 @@ def removeExt(fname):
def getBasename(fname):
return removeExt(os.path.basename(fname));
def plotdata(q,data,t=None,plot=True,showTrend=True,title=None,clim='auto'):
def plotdata(q,data,x=None,plot=True,showTrend=True,title=None,clim='auto'):
if not (plot or showTrend): return
if t is None: t = np.arange(data.shape[0])
if x is None: x = np.arange(data.shape[0])
if clim == 'auto': clim = np.nanpercentile(data,(1.5,98.5))
one_plot = showTrend or plot
two_plot = showTrend and plot
@ -84,7 +85,7 @@ def plotdata(q,data,t=None,plot=True,showTrend=True,title=None,clim='auto'):
ax = np.atleast_1d(ax)
if showTrend:
plt.sca(ax[0])
plt.pcolormesh(t,q,data.T)
plt.pcolormesh(x,q,data.T)
plt.xlabel("image number, 0 being older")
plt.ylabel(r"q ($\AA^{-1}$)")
plt.clim( *clim )
@ -150,12 +151,11 @@ def plotdiffs(q,diffs,t,select=None,err=None,absSignal=None,absSignalScale=10,
fig.canvas.mpl_connect('pick_event', onpick)
def saveTxt(fname,q,i,e=None,headerv=None,info=None,overwrite=True):
def saveTxt(fname,q,data,headerv=None,info=None,overwrite=True,columns=''):
""" Write data to file 'fname' in text format.
Inputs:
q = x vector
i = 1D or 2D
e = 1D (discarded when i is 2D)
data = one or 2D array (first axis is q)
info = dictionary (saved as '# key : value') or string
headerv = vector to be used as header or string
"""
@ -170,13 +170,16 @@ def saveTxt(fname,q,i,e=None,headerv=None,info=None,overwrite=True):
else:
header = ""
if isinstance(headerv,str): header += "\n%s" % headerv
if i.ndim == 1:
x = np.vstack( (q,i,e) ) if e is not None else np.vstack( (q,i) )
if i.ndim == 2:
x = np.vstack( (q,i,) )
if data.ndim == 1:
x = np.vstack( (q,data) )
elif data.ndim == 2:
x = np.vstack( (q,data) )
if headerv is not None:
headerv = np.concatenate(( (i.shape[1],),headerv))
headerv = np.concatenate(( (data.shape[1],),headerv))
x = np.hstack( (headerv[:,np.newaxis],x) )
if columns != '':
s = "#" + " ".join( [str(c).center(12) for c in columns] )
header = header + "\n" + s if header != '' else s
np.savetxt(fname,x.T,fmt="%+10.5e",header=header,comments='')
def reshapeToBroadcast(what,ref):