2017-01-05 19:22:37 +01:00
|
|
|
from __future__ import print_function,division
|
|
|
|
|
2017-01-10 22:43:22 +01:00
|
|
|
import logging
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
np.seterr(all='ignore')
|
|
|
|
import os
|
|
|
|
import collections
|
|
|
|
import glob
|
|
|
|
import pathlib
|
2017-01-06 15:40:26 +01:00
|
|
|
from . import storage
|
|
|
|
from . import utils
|
2017-02-09 17:02:51 +01:00
|
|
|
from . import filters
|
2017-01-20 10:44:56 +01:00
|
|
|
import re
|
2017-01-10 00:28:29 +01:00
|
|
|
import fabio
|
2017-01-05 19:22:37 +01:00
|
|
|
import pyFAI
|
|
|
|
|
|
|
|
try:
|
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
except ImportError:
|
|
|
|
log.warn("Can't import matplotlib !")
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def _read(fname):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" read data from file using fabio """
|
|
|
|
f = fabio.open(fname)
|
|
|
|
data = f.data
|
2017-01-10 00:28:29 +01:00
|
|
|
del f; # close file
|
2017-01-05 19:22:37 +01:00
|
|
|
return data
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def read(fnames):
|
|
|
|
""" read data from file(s) using fabio """
|
|
|
|
if isinstance(fnames,str):
|
|
|
|
data = _read(fnames)
|
|
|
|
else:
|
|
|
|
# read one image to know img size
|
|
|
|
temp = _read(fnames[0])
|
|
|
|
shape = [len(fnames),]+list(temp.shape)
|
|
|
|
data = np.empty(shape)
|
|
|
|
data[0] = temp
|
|
|
|
for i in range(1,len(fnames)): data[i] = _read(fnames[i])
|
|
|
|
return data
|
|
|
|
|
|
|
|
def ai_as_dict(ai):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" ai is a pyFAI azimuthal intagrator"""
|
|
|
|
methods = dir(ai)
|
|
|
|
methods = [m for m in methods if m.find("get_") == 0]
|
|
|
|
names = [m[4:] for m in methods]
|
|
|
|
values = [getattr(ai,m)() for m in methods]
|
|
|
|
ret = dict( zip(names,values) )
|
|
|
|
ret["detector"] = ai.detector.get_name()
|
|
|
|
return ret
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def ai_as_str(ai):
|
2017-01-06 15:40:26 +01:00
|
|
|
""" ai is a pyFAI azimuthal intagrator"""
|
2017-01-13 14:49:48 +01:00
|
|
|
s=[ "# Detector : %s" % ai.detector.name,
|
|
|
|
"# Pixel [um] : %.2fx%.2f" % (ai.pixel1*1e6,ai.pixel2*1e6),
|
|
|
|
"# Distance [mm] : %.3f" % (ai.dist*1e3),
|
|
|
|
"# Center [mm] : %.3f,%.3f" % (ai.poni1*1e3,ai.poni2*1e3),
|
|
|
|
"# Center [px] : %.3f,%.3f" % (ai.poni1/ai.pixel1,ai.poni2/ai.pixel2),
|
|
|
|
"# Wavelength [A] : %.5f" % (ai.wavelength*1e10),
|
|
|
|
"# rot[1,2,3] [rad]: %.3f,%.3f,%.3f" % (ai.rot1,ai.rot2,ai.rot3) ]
|
|
|
|
return "\n".join(s)
|
2017-01-06 15:40:26 +01:00
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
def do1d(ai, imgs, mask = None, npt_radial = 600, method = 'csr',safe=True,dark=10., polCorr = 1,dezinger=None):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" ai is a pyFAI azimuthal intagrator
|
|
|
|
it can be defined with pyFAI.load(ponifile)
|
2017-03-15 18:23:20 +01:00
|
|
|
dezinger: None or float (used as percentile of ai.separate)
|
2017-01-05 19:22:37 +01:00
|
|
|
mask: True are points to be masked out """
|
|
|
|
# force float to be sure of type casting for img
|
|
|
|
if isinstance(dark,int): dark = float(dark);
|
|
|
|
if imgs.ndim == 2: imgs = (imgs,)
|
|
|
|
out_i = np.empty( ( len(imgs), npt_radial) )
|
|
|
|
out_s = np.empty( ( len(imgs), npt_radial) )
|
|
|
|
for _i,img in enumerate(imgs):
|
2017-03-15 18:23:20 +01:00
|
|
|
if dezinger is not None and dezinger > 0:
|
|
|
|
_,img=ai.separate(img,npt_rad=npt_radial,npt_azim=512,unit='q_A^-1',
|
|
|
|
method=method,mask=mask,percentile=dezinger)
|
2017-01-05 19:22:37 +01:00
|
|
|
q,i, sig = ai.integrate1d(img-dark, npt_radial, mask= mask, safe = safe,\
|
|
|
|
unit="q_A^-1", method = method, error_model = "poisson",
|
|
|
|
polarization_factor = polCorr)
|
|
|
|
out_i[_i] = i
|
|
|
|
out_s[_i] = sig
|
|
|
|
return q,np.squeeze(out_i),np.squeeze(out_s)
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def do2d(ai, imgs, mask = None, npt_radial = 600, npt_azim=360,method = 'csr',safe=True,dark=10., polCorr = 1):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" ai is a pyFAI azimuthal intagrator
|
|
|
|
it can be defined with pyFAI.load(ponifile)
|
|
|
|
mask: True are points to be masked out """
|
|
|
|
# force float to be sure of type casting for img
|
|
|
|
if isinstance(dark,int): dark = float(dark);
|
|
|
|
if imgs.ndim == 2: imgs = (imgs,)
|
|
|
|
out = np.empty( ( len(imgs), npt_azim,npt_radial) )
|
|
|
|
for _i,img in enumerate(imgs):
|
|
|
|
i2d,q,azTheta = ai.integrate2d(img-dark, npt_radial, npt_azim=npt_azim,
|
|
|
|
mask= mask, safe = safe,unit="q_A^-1", method = method,
|
|
|
|
polarization_factor = polCorr )
|
|
|
|
out[_i] = i2d
|
|
|
|
return q,azTheta,np.squeeze(out)
|
|
|
|
|
2017-03-03 23:09:34 +01:00
|
|
|
def getAI(poni=None,folder=None,**kwargs):
|
2017-01-10 00:28:29 +01:00
|
|
|
""" get AzimuthalIntegrator instance:
|
2017-03-03 23:09:34 +01:00
|
|
|
→ if poni is a string, it is used as filename to read.
|
2017-01-10 00:28:29 +01:00
|
|
|
in this case if folder is given it is used (together with all its
|
|
|
|
subfolder) as search path (along with ./ and home folder)
|
2017-03-03 23:09:34 +01:00
|
|
|
→ kwargs if present can be used to define (or override) parameters from files,
|
|
|
|
dist,xcen,ycen,poni1,poni2,rot1,rot2,rot3,pixel1,pixel2,splineFile,
|
|
|
|
detector,wavelength
|
2017-01-10 00:28:29 +01:00
|
|
|
"""
|
2017-01-05 19:22:37 +01:00
|
|
|
if isinstance(poni,pyFAI.azimuthalIntegrator.AzimuthalIntegrator):
|
|
|
|
ai = poni
|
2017-01-10 00:28:29 +01:00
|
|
|
elif isinstance(poni,str):
|
|
|
|
# look is file exists in cwd
|
2017-01-27 15:38:32 +01:00
|
|
|
if os.path.isfile(poni):
|
2017-02-09 17:02:51 +01:00
|
|
|
fname = poni
|
2017-01-10 00:28:29 +01:00
|
|
|
# if file does not exist look for one with that name around
|
|
|
|
else:
|
|
|
|
# build search paths
|
|
|
|
folders = []
|
|
|
|
if folder is not None:
|
|
|
|
temp = os.path.abspath(folder)
|
|
|
|
path = pathlib.Path(temp)
|
|
|
|
folders = [ str(path), ]
|
|
|
|
for p in path.parents: folders.append(str(p))
|
2017-01-05 19:22:37 +01:00
|
|
|
folders.append( "./" )
|
|
|
|
folders.append( os.path.expanduser("~/") )
|
2017-01-10 00:28:29 +01:00
|
|
|
# look for file
|
2017-01-05 19:22:37 +01:00
|
|
|
for path in folders:
|
2017-01-10 00:28:29 +01:00
|
|
|
fname = path + "/" + poni
|
2017-01-20 10:55:24 +01:00
|
|
|
if os.path.isfile(fname):
|
2017-01-10 00:28:29 +01:00
|
|
|
log.info("Found poni file %s",fname)
|
2017-01-05 19:22:37 +01:00
|
|
|
break
|
|
|
|
else:
|
2017-01-10 00:28:29 +01:00
|
|
|
log.debug("Could not poni file %s",fname)
|
|
|
|
ai = pyFAI.load(fname)
|
2017-03-03 23:09:34 +01:00
|
|
|
else:
|
|
|
|
ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator()
|
|
|
|
for par,value in kwargs.items(): setattr(ai,par,value)
|
|
|
|
# provide xcen and ycen for convenience (note: xcen changes poni2
|
|
|
|
# and ycen changes poni1)
|
|
|
|
if 'xcen' in kwargs: ai.poni2 = kwargs['xcen'] * ai.pixel2
|
|
|
|
if 'ycen' in kwargs: ai.poni1 = kwargs['ycen'] * ai.pixel1
|
|
|
|
ai.reset(); # needed in case of overridden parameters
|
2017-01-05 19:22:37 +01:00
|
|
|
return ai
|
|
|
|
|
2017-01-20 10:44:56 +01:00
|
|
|
g_mask_str = re.compile("(\w)\s*(<|>)\s*(\d+)")
|
|
|
|
|
2017-02-09 17:02:51 +01:00
|
|
|
def _interpretMask(mask,shape=None):
|
2017-01-20 10:44:56 +01:00
|
|
|
"""
|
|
|
|
if mask is an existing filename, returns it
|
|
|
|
if mask is a string like [x|y] [<|>] int;
|
|
|
|
for example y>500 will dis-regard out for y>500
|
|
|
|
"""
|
|
|
|
maskout = None
|
|
|
|
if isinstance(mask,str) and os.path.isfile(mask):
|
|
|
|
maskout = read(mask).astype(np.bool)
|
|
|
|
elif isinstance(mask,str) and not os.path.isfile(mask):
|
|
|
|
err_msg = ValueError("The string '%s' could not be interpreted as simple\
|
|
|
|
mask; it should be something like x>10"%mask)
|
|
|
|
assert shape is not None
|
|
|
|
# interpret string
|
|
|
|
maskout = np.zeros(shape,dtype=bool)
|
|
|
|
match = g_mask_str.match(mask)
|
|
|
|
if match is None: raise err_msg
|
|
|
|
(axis,sign,lim) = match.groups()
|
|
|
|
if axis not in ("x","y"): raise err_msg
|
|
|
|
if sign not in (">","<"): raise err_msg
|
|
|
|
lim = int(lim)
|
|
|
|
idx = slice(lim,None) if sign == ">" else slice(None,lim)
|
|
|
|
if axis == 'y':
|
|
|
|
maskout[idx,:] = True
|
|
|
|
else:
|
|
|
|
maskout[:,idx] = True
|
|
|
|
elif isinstance(mask,np.ndarray):
|
|
|
|
maskout = mask.astype(np.bool)
|
|
|
|
elif mask is None:
|
|
|
|
assert shape is not None
|
|
|
|
maskout = np.zeros(shape,dtype=bool)
|
|
|
|
else:
|
|
|
|
maskout = None
|
|
|
|
raise ValueError("Could not interpret %s as mask input"%mask)
|
|
|
|
|
|
|
|
if shape is not None and maskout.shape != shape:
|
|
|
|
raise ValueError("The mask shape %s does not match the shape given as\
|
|
|
|
argument %s"%(maskout.shape,shape))
|
|
|
|
return maskout
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-02-09 17:02:51 +01:00
|
|
|
def interpretMask(masks,shape=None):
|
|
|
|
"""
|
|
|
|
if masks is a list of masks, eachone can be:
|
|
|
|
* an existing filename
|
|
|
|
* a string like [x|y] [<|>] int;
|
|
|
|
"""
|
|
|
|
if not isinstance( masks, (list,tuple,np.ndarray) ):
|
|
|
|
masks = (masks,)
|
|
|
|
masks = [_interpretMask(mask,shape) for mask in masks]
|
|
|
|
# put them all together
|
|
|
|
mask = masks[0]
|
|
|
|
for m in masks[1:]:
|
|
|
|
mask = np.logical_or(mask,m)
|
|
|
|
return mask
|
|
|
|
|
|
|
|
def removeBackground(data,qlims=(0,10),max_iter=30,background_regions=[],force=False,
|
|
|
|
storageFile=None,save=True,**removeBkg):
|
|
|
|
""" similar function to the zray.utils one, this works on dataset created by
|
|
|
|
doFolder """
|
2017-03-15 18:23:20 +01:00
|
|
|
idx = utils.findSlice(data.orig.q,qlims)
|
2017-02-09 17:02:51 +01:00
|
|
|
# see if there are some to do ...
|
|
|
|
if force:
|
|
|
|
idx_start = 0
|
|
|
|
else:
|
|
|
|
idx_start = len(data.data)
|
2017-03-15 18:23:20 +01:00
|
|
|
if idx_start < len(data.orig.data):
|
|
|
|
_q,_data = utils.removeBackground(data.orig.q[idx],data.orig.data[:,idx],
|
2017-02-09 17:02:51 +01:00
|
|
|
max_iter=max_iter,background_regions=background_regions,**removeBkg)
|
|
|
|
data.q = _q
|
|
|
|
data.data = np.concatenate( (data.data,_data ) )
|
|
|
|
data.err = np.concatenate( (data.err ,data.err[idx_start,idx] ) )
|
|
|
|
if save: data.save(storageFile); # if None uses .filename
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
def doFolder(folder,files='*.edf*',nQ = 1500,force=False,mask=None,dark=10,
|
|
|
|
norm='auto',save_pyfai=False,saveChi=True,poni='pyfai.poni',
|
|
|
|
storageFile='auto',save=True,logDict=None,dezinger=None,skip_first=0,last=None):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" calc 1D curves from files in folder, returning a dictionary of stuff
|
|
|
|
nQ : number of Q-points (equispaced)
|
|
|
|
force : if True, redo from beginning even if previous data are found
|
|
|
|
if False, do only new files
|
2017-02-09 17:02:51 +01:00
|
|
|
mask : can be a list of [filenames|array of booleans|mask string]
|
|
|
|
pixels that are True are dis-regarded
|
2017-01-05 19:22:37 +01:00
|
|
|
saveChi: self-explanatory
|
2017-03-15 18:23:20 +01:00
|
|
|
dezinger: None or 0 to disable; good value is ~50. Needs good center and mask
|
|
|
|
logDict: dictionary(-like) structure. has to have 'file' key
|
|
|
|
save_pyfai: store all pyfai's internal arrays (~110 MB)
|
2017-01-05 19:22:37 +01:00
|
|
|
poni : could be:
|
|
|
|
→ an AzimuthalIntegrator instance
|
2017-01-10 00:28:29 +01:00
|
|
|
→ a filename that will be look for in
|
2017-01-05 19:22:37 +01:00
|
|
|
1 'folder' first
|
|
|
|
2 in ../folder
|
|
|
|
3 in ../../folder
|
|
|
|
....
|
|
|
|
n-1 in pwd
|
|
|
|
n in homefolder
|
2017-01-10 00:28:29 +01:00
|
|
|
→ a dictionary (use to bootstrap an AzimuthalIntegrator using
|
|
|
|
AzimuthalIntegrator(**poni)
|
|
|
|
"""
|
2017-01-06 15:40:26 +01:00
|
|
|
if storageFile == 'auto': storageFile = folder + "/" + "pyfai_1d.h5"
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-01-20 10:55:24 +01:00
|
|
|
if os.path.isfile(storageFile) and not force:
|
2017-01-07 23:53:12 +01:00
|
|
|
saved = storage.DataStorage(storageFile)
|
2017-02-09 17:02:51 +01:00
|
|
|
log.info("Found %d images in storage file"%saved.data.shape[0])
|
2017-01-05 19:22:37 +01:00
|
|
|
else:
|
|
|
|
saved = None
|
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
files = utils.getFiles(folder,files)
|
|
|
|
if logDict is not None:
|
|
|
|
files = [f for f in files if utils.getBasename(f) in logDict['file'] ]
|
|
|
|
files = files[skip_first:last]
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
if saved is not None:
|
|
|
|
files = [f for f in files if f not in saved["files"]]
|
2017-02-09 17:02:51 +01:00
|
|
|
log.info("Will do azimuthal integration for %d files"%(len(files)))
|
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
files = np.asarray(files)
|
|
|
|
basenames = np.asarray( [ utils.getBasename(file) for file in files] )
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
if len(files) > 0:
|
2017-02-09 17:02:51 +01:00
|
|
|
# which poni file to use:
|
|
|
|
ai = getAI(poni,folder)
|
|
|
|
|
|
|
|
|
2017-01-20 10:44:56 +01:00
|
|
|
shape = read(files[0]).shape
|
|
|
|
mask = interpretMask(mask,shape)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-01-06 18:06:34 +01:00
|
|
|
data = np.empty( (len(files),nQ) )
|
|
|
|
err = np.empty( (len(files),nQ) )
|
2017-01-05 19:22:37 +01:00
|
|
|
for ifname,fname in enumerate(files):
|
2017-01-10 00:28:29 +01:00
|
|
|
img = read(fname)
|
2017-03-16 23:16:34 +01:00
|
|
|
q,i,e = do1d(ai,img,mask=mask,npt_radial=nQ,dark=dark,dezinger=dezinger)
|
2017-01-05 19:22:37 +01:00
|
|
|
data[ifname] = i
|
|
|
|
err[ifname] = e
|
|
|
|
if saveChi:
|
2017-01-06 15:40:26 +01:00
|
|
|
chi_fname = utils.removeExt(fname) + ".chi"
|
2017-03-09 23:20:15 +01:00
|
|
|
utils.saveTxt(chi_fname,q,np.vstack((i,e)),info=ai_as_str(ai),overwrite=True)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
if saved is not None:
|
2017-03-15 18:23:20 +01:00
|
|
|
files = np.concatenate( (saved["files"] ,basenames ) )
|
2017-01-05 19:22:37 +01:00
|
|
|
data = np.concatenate( (saved["data"] ,data ) )
|
|
|
|
err = np.concatenate( (saved["err"] ,err ) )
|
2017-03-16 23:16:34 +01:00
|
|
|
theta_rad = utils.qToTheta(q,wavelength=ai.wavelength)
|
|
|
|
theta_deg = utils.qToTheta(q,wavelength=ai.wavelength,asDeg=True)
|
|
|
|
orig = dict(data=data.copy(),err=err.copy(),q=q.copy())
|
2017-01-05 19:22:37 +01:00
|
|
|
ret = dict(q=q,folder=folder,files=files,data=data,err=err,
|
2017-03-16 23:16:34 +01:00
|
|
|
orig = orig, theta_rad = theta_rad, theta_deg=theta_deg,
|
2017-01-10 00:28:29 +01:00
|
|
|
pyfai=ai_as_dict(ai),pyfai_info=ai_as_str(ai),mask=mask)
|
2017-03-16 23:16:34 +01:00
|
|
|
if not save_pyfai:
|
|
|
|
ret['pyfai']['chia'] = None
|
|
|
|
ret['pyfai']['dssa'] = None
|
|
|
|
ret['pyfai']['q'] = None
|
|
|
|
ret['pyfai']['ttha'] = None
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-01-07 23:53:12 +01:00
|
|
|
ret = storage.DataStorage(ret)
|
2017-03-15 18:23:20 +01:00
|
|
|
|
|
|
|
# add info from logDict if provided
|
|
|
|
if logDict is not None:
|
|
|
|
ret['log']=logDict
|
2017-02-09 17:02:51 +01:00
|
|
|
# sometime saving is not necessary (if one has to do it after subtracting background
|
|
|
|
if storageFile is not None and save: ret.save(storageFile)
|
2017-01-05 19:22:37 +01:00
|
|
|
else:
|
|
|
|
ret = saved
|
2017-01-06 15:40:26 +01:00
|
|
|
return ret
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
|
2017-02-09 17:02:51 +01:00
|
|
|
def removeBackground(data,qlims=(0,10),max_iter=30,background_regions=[],
|
|
|
|
storageFile=None,save=True,**removeBkg):
|
|
|
|
""" similar function to the zray.utils one, this works on dataset created by
|
|
|
|
doFolder """
|
|
|
|
idx = utils.findSlice(data.q,qlims)
|
|
|
|
data.q,data.data = utils.removeBackground(data.q[idx],data.data[:,idx],
|
|
|
|
max_iter=max_iter,background_regions=background_regions,**removeBkg)
|
|
|
|
data.err = data.err[idx]
|
|
|
|
if save: data.save(storageFile); # if None uses .filename
|
|
|
|
return data
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
def _calc_R(x,y, xc, yc):
|
|
|
|
""" calculate the distance of each 2D points from the center (xc, yc) """
|
|
|
|
return np.sqrt((x-xc)**2 + (y-yc)**2)
|
|
|
|
|
|
|
|
def _chi2(c, x, y):
|
|
|
|
""" calculate the algebraic distance between the data points and the mean
|
|
|
|
circle centered at c=(xc, yc) """
|
|
|
|
Ri = _calc_R(x, y, *c)
|
|
|
|
return Ri - Ri.mean()
|
|
|
|
|
|
|
|
def leastsq_circle(x,y):
|
|
|
|
from scipy import optimize
|
|
|
|
# coordinates of the barycenter
|
|
|
|
center_estimate = np.nanmean(x), np.nanmean(y)
|
|
|
|
center, ier = optimize.leastsq(_chi2, center_estimate, args=(x,y))
|
|
|
|
xc, yc = center
|
|
|
|
Ri = _calc_R(x, y, *center)
|
|
|
|
R = Ri.mean()
|
|
|
|
residu = np.sum((Ri - R)**2)
|
|
|
|
return xc, yc, R
|
|
|
|
|
2017-01-13 14:49:48 +01:00
|
|
|
def find_center(img,psize=100e-6,dist=0.1,wavelength=0.8e-10,center=None,reference=None,**kwargs):
|
|
|
|
""" center is the initial centr (can be None)
|
|
|
|
reference is a reference position to be plot in 2D plots """
|
2017-01-05 19:22:37 +01:00
|
|
|
plt.ion()
|
|
|
|
kw = dict( pixel1 = psize, pixel2 = psize, dist = dist,wavelength=wavelength )
|
|
|
|
kw.update(kwargs)
|
|
|
|
ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator(**kw)
|
|
|
|
fig_img,ax_img = plt.subplots(1,1)
|
|
|
|
fig_pyfai,ax_pyfai = plt.subplots(1,1)
|
|
|
|
fig_pyfai = plt.figure(2)
|
2017-01-10 00:28:29 +01:00
|
|
|
temp= ax_img.imshow(img)
|
2017-01-05 19:22:37 +01:00
|
|
|
plt.sca(ax_img); # set figure to use for mouse interaction
|
2017-01-10 00:28:29 +01:00
|
|
|
temp.set_clim( *np.percentile(img,(2,95) ) )
|
2017-01-05 19:22:37 +01:00
|
|
|
ans = ""
|
|
|
|
print("Enter 'end' when done")
|
|
|
|
while ans != "end":
|
2017-01-13 14:49:48 +01:00
|
|
|
if center is None:
|
2017-01-05 19:22:37 +01:00
|
|
|
print("Click on beam center:")
|
|
|
|
plt.sca(ax_img); # set figure to use for mouse interaction
|
2017-01-13 14:49:48 +01:00
|
|
|
center = plt.ginput()[0]
|
|
|
|
print("Selected center:",center)
|
2017-01-27 15:38:32 +01:00
|
|
|
ai.set_poni1(center[1]*psize)
|
|
|
|
ai.set_poni2(center[0]*psize)
|
2017-01-10 00:28:29 +01:00
|
|
|
q,az,i = do2d(ai,img)
|
|
|
|
mesh = ax_pyfai.pcolormesh(q,az,i)
|
|
|
|
mesh.set_clim( *np.percentile(i,(2,95) ) )
|
2017-01-13 14:49:48 +01:00
|
|
|
ax_pyfai.set_title(str(center))
|
|
|
|
if reference is not None: ax_pyfai.axvline(reference)
|
2017-01-05 19:22:37 +01:00
|
|
|
plt.pause(0.01)
|
|
|
|
plt.draw()
|
2017-02-09 17:02:51 +01:00
|
|
|
plt.draw_all()
|
2017-01-10 00:28:29 +01:00
|
|
|
ans=input("Enter to continue with clinking or enter xc,yc values ")
|
2017-01-13 14:49:48 +01:00
|
|
|
if ans == '':
|
|
|
|
center = None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
center = list(map(float,ans.split(",")))
|
|
|
|
except Exception as e:
|
|
|
|
center = None
|
|
|
|
if center == []: center = None
|
|
|
|
print("Final values: (in pixels) %.3f %.3f"%(center[0],center[1]))
|
2017-01-05 19:22:37 +01:00
|
|
|
return ai
|
|
|
|
|
2017-01-06 15:40:26 +01:00
|
|
|
def average(fileOrFolder,delays=slice(None),scale=1,norm=None,returnAll=False,plot=False,
|
|
|
|
showTrend=False):
|
2017-01-07 23:53:12 +01:00
|
|
|
data = storage.DataStorage(fileOrFolder)
|
2017-01-06 15:40:26 +01:00
|
|
|
if isinstance(delays,slice):
|
|
|
|
idx = np.arange(data.delays.shape[0])[delays]
|
|
|
|
elif isinstance(delays,(int,float)):
|
|
|
|
idx = data.delays == float(delays)
|
|
|
|
else:
|
|
|
|
idx = data.delays < 0
|
|
|
|
if idx.sum() == 0:
|
|
|
|
print("No data with the current filter")
|
|
|
|
return None
|
|
|
|
i = data.data[idx]
|
|
|
|
q = data.q
|
|
|
|
if isinstance(norm,(tuple,list)):
|
|
|
|
idx = ( q>norm[0] ) & (q<norm[1])
|
|
|
|
norm = np.nanmean(i[:,idx],axis=1)
|
|
|
|
i = i/norm[:,np.newaxis]
|
|
|
|
if isinstance(norm,np.ndarray):
|
|
|
|
i = i/norm[:,np.newaxis]
|
|
|
|
title = "%s %s" % (fileOrFolder,str(delays))
|
2017-01-06 18:06:34 +01:00
|
|
|
utils.plotdata(q,i*scale,showTrend=showTrend,plot=plot,title=title)
|
2017-01-06 15:40:26 +01:00
|
|
|
if returnAll:
|
|
|
|
return q,i.mean(axis=0)*scale,i
|
|
|
|
else:
|
|
|
|
return q,i.mean(axis=0)*scale
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
#### Utilities for chi files ####
|
|
|
|
def chiRead(fname,scale=1):
|
|
|
|
q,i = np.loadtxt(fname,unpack=True,usecols=(0,1))
|
|
|
|
return q,i*scale
|
|
|
|
|
|
|
|
def chiPlot(fname,useTheta=False,E=12.4):
|
|
|
|
q,i = chiRead(fname)
|
|
|
|
lam = 12.4/E
|
|
|
|
theta = 2*180/3.14*np.arcsin(q*lam/4/3.14)
|
2017-01-06 15:40:26 +01:00
|
|
|
x = theta if useTheta else q
|
2017-01-05 19:22:37 +01:00
|
|
|
plt.plot(x,i,label=fname)
|
|
|
|
|
2017-01-06 15:40:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
def chiAverage(folder,basename="",scale=1,norm=None,returnAll=False,plot=False,showTrend=False,clim='auto'):
|
2017-01-05 19:22:37 +01:00
|
|
|
files = glob.glob("%s/%s*chi"%(folder,basename))
|
|
|
|
files.sort()
|
|
|
|
print(files)
|
|
|
|
if len(files) == 0:
|
|
|
|
print("No file found (basename %s)" % basename)
|
|
|
|
return None
|
|
|
|
q,_ = chiRead(files[0])
|
|
|
|
i = np.asarray( [ chiRead(f)[1] for f in files ] )
|
2017-01-06 15:40:26 +01:00
|
|
|
if isinstance(norm,(tuple,list)):
|
2017-01-05 19:22:37 +01:00
|
|
|
idx = ( q>norm[0] ) & (q<norm[1])
|
|
|
|
norm = np.nanmean(i[:,idx],axis=1)
|
|
|
|
i = i/norm[:,np.newaxis]
|
2017-02-09 17:02:51 +01:00
|
|
|
elif isinstance(norm,np.ndarray):
|
2017-01-06 15:40:26 +01:00
|
|
|
i = i/norm[:,np.newaxis]
|
|
|
|
title = "%s %s" % (folder,basename)
|
2017-01-27 15:38:32 +01:00
|
|
|
utils.plotdata(q,i,plot=plot,showTrend=showTrend,title=title,clim=clim)
|
2017-01-05 19:22:37 +01:00
|
|
|
if (showTrend and plot): plt.subplot(1,2,1)
|
|
|
|
if showTrend:
|
|
|
|
plt.pcolormesh(np.arange(i.shape[0]),q,i.T)
|
|
|
|
plt.xlabel("image number, 0 being older")
|
|
|
|
plt.ylabel(r"q ($\AA^{-1}$)")
|
|
|
|
if (showTrend and plot): plt.subplot(1,2,2)
|
|
|
|
if plot:
|
|
|
|
plt.plot(q,i.mean(axis=0)*scale)
|
|
|
|
if (plot or showTrend):
|
|
|
|
plt.title(folder+"/"+basename)
|
|
|
|
if returnAll:
|
|
|
|
return q,i.mean(axis=0)*scale,i
|
|
|
|
else:
|
|
|
|
return q,i.mean(axis=0)*scale
|