2017-01-05 19:22:37 +01:00
|
|
|
from __future__ import print_function,division
|
|
|
|
|
2017-01-10 22:43:22 +01:00
|
|
|
import logging
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
np.seterr(all='ignore')
|
|
|
|
import os
|
|
|
|
import collections
|
|
|
|
import glob
|
|
|
|
import pathlib
|
2017-01-06 15:40:26 +01:00
|
|
|
from . import storage
|
|
|
|
from . import utils
|
2017-01-20 10:44:56 +01:00
|
|
|
import re
|
2017-01-10 00:28:29 +01:00
|
|
|
import fabio
|
2017-01-05 19:22:37 +01:00
|
|
|
import pyFAI
|
|
|
|
|
|
|
|
try:
|
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
except ImportError:
|
|
|
|
log.warn("Can't import matplotlib !")
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def _read(fname):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" read data from file using fabio """
|
|
|
|
f = fabio.open(fname)
|
|
|
|
data = f.data
|
2017-01-10 00:28:29 +01:00
|
|
|
del f; # close file
|
2017-01-05 19:22:37 +01:00
|
|
|
return data
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def read(fnames):
|
|
|
|
""" read data from file(s) using fabio """
|
|
|
|
if isinstance(fnames,str):
|
|
|
|
data = _read(fnames)
|
|
|
|
else:
|
|
|
|
# read one image to know img size
|
|
|
|
temp = _read(fnames[0])
|
|
|
|
shape = [len(fnames),]+list(temp.shape)
|
|
|
|
data = np.empty(shape)
|
|
|
|
data[0] = temp
|
|
|
|
for i in range(1,len(fnames)): data[i] = _read(fnames[i])
|
|
|
|
return data
|
|
|
|
|
|
|
|
def ai_as_dict(ai):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" ai is a pyFAI azimuthal intagrator"""
|
|
|
|
methods = dir(ai)
|
|
|
|
methods = [m for m in methods if m.find("get_") == 0]
|
|
|
|
names = [m[4:] for m in methods]
|
|
|
|
values = [getattr(ai,m)() for m in methods]
|
|
|
|
ret = dict( zip(names,values) )
|
|
|
|
ret["detector"] = ai.detector.get_name()
|
|
|
|
return ret
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def ai_as_str(ai):
|
2017-01-06 15:40:26 +01:00
|
|
|
""" ai is a pyFAI azimuthal intagrator"""
|
2017-01-13 14:49:48 +01:00
|
|
|
s=[ "# Detector : %s" % ai.detector.name,
|
|
|
|
"# Pixel [um] : %.2fx%.2f" % (ai.pixel1*1e6,ai.pixel2*1e6),
|
|
|
|
"# Distance [mm] : %.3f" % (ai.dist*1e3),
|
|
|
|
"# Center [mm] : %.3f,%.3f" % (ai.poni1*1e3,ai.poni2*1e3),
|
|
|
|
"# Center [px] : %.3f,%.3f" % (ai.poni1/ai.pixel1,ai.poni2/ai.pixel2),
|
|
|
|
"# Wavelength [A] : %.5f" % (ai.wavelength*1e10),
|
|
|
|
"# rot[1,2,3] [rad]: %.3f,%.3f,%.3f" % (ai.rot1,ai.rot2,ai.rot3) ]
|
|
|
|
return "\n".join(s)
|
2017-01-06 15:40:26 +01:00
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def do1d(ai, imgs, mask = None, npt_radial = 600, method = 'csr',safe=True,dark=10., polCorr = 1):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" ai is a pyFAI azimuthal intagrator
|
|
|
|
it can be defined with pyFAI.load(ponifile)
|
|
|
|
mask: True are points to be masked out """
|
|
|
|
# force float to be sure of type casting for img
|
|
|
|
if isinstance(dark,int): dark = float(dark);
|
|
|
|
if imgs.ndim == 2: imgs = (imgs,)
|
|
|
|
out_i = np.empty( ( len(imgs), npt_radial) )
|
|
|
|
out_s = np.empty( ( len(imgs), npt_radial) )
|
|
|
|
for _i,img in enumerate(imgs):
|
|
|
|
q,i, sig = ai.integrate1d(img-dark, npt_radial, mask= mask, safe = safe,\
|
|
|
|
unit="q_A^-1", method = method, error_model = "poisson",
|
|
|
|
polarization_factor = polCorr)
|
|
|
|
out_i[_i] = i
|
|
|
|
out_s[_i] = sig
|
|
|
|
return q,np.squeeze(out_i),np.squeeze(out_s)
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def do2d(ai, imgs, mask = None, npt_radial = 600, npt_azim=360,method = 'csr',safe=True,dark=10., polCorr = 1):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" ai is a pyFAI azimuthal intagrator
|
|
|
|
it can be defined with pyFAI.load(ponifile)
|
|
|
|
mask: True are points to be masked out """
|
|
|
|
# force float to be sure of type casting for img
|
|
|
|
if isinstance(dark,int): dark = float(dark);
|
|
|
|
if imgs.ndim == 2: imgs = (imgs,)
|
|
|
|
out = np.empty( ( len(imgs), npt_azim,npt_radial) )
|
|
|
|
for _i,img in enumerate(imgs):
|
|
|
|
i2d,q,azTheta = ai.integrate2d(img-dark, npt_radial, npt_azim=npt_azim,
|
|
|
|
mask= mask, safe = safe,unit="q_A^-1", method = method,
|
|
|
|
polarization_factor = polCorr )
|
|
|
|
out[_i] = i2d
|
|
|
|
return q,azTheta,np.squeeze(out)
|
|
|
|
|
2017-01-10 00:28:29 +01:00
|
|
|
def getAI(poni,folder=None):
|
|
|
|
""" get AzimuthalIntegrator instance:
|
|
|
|
→ if poni is a dictionary use it to define one
|
|
|
|
→ if poni is a string look, it is used as filename to read.
|
|
|
|
in this case if folder is given it is used (together with all its
|
|
|
|
subfolder) as search path (along with ./ and home folder)
|
|
|
|
"""
|
2017-01-05 19:22:37 +01:00
|
|
|
if isinstance(poni,pyFAI.azimuthalIntegrator.AzimuthalIntegrator):
|
|
|
|
ai = poni
|
|
|
|
elif isinstance(poni,dict):
|
|
|
|
ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator(**poni)
|
2017-01-10 00:28:29 +01:00
|
|
|
elif isinstance(poni,str):
|
|
|
|
# look is file exists in cwd
|
2017-01-27 15:38:32 +01:00
|
|
|
if os.path.isfile(poni):
|
2017-01-10 00:28:29 +01:00
|
|
|
ai = pyFAI.load(poni)
|
|
|
|
# if file does not exist look for one with that name around
|
|
|
|
else:
|
|
|
|
# build search paths
|
|
|
|
folders = []
|
|
|
|
if folder is not None:
|
|
|
|
temp = os.path.abspath(folder)
|
|
|
|
path = pathlib.Path(temp)
|
|
|
|
folders = [ str(path), ]
|
|
|
|
for p in path.parents: folders.append(str(p))
|
2017-01-05 19:22:37 +01:00
|
|
|
folders.append( "./" )
|
|
|
|
folders.append( os.path.expanduser("~/") )
|
2017-01-10 00:28:29 +01:00
|
|
|
# look for file
|
2017-01-05 19:22:37 +01:00
|
|
|
for path in folders:
|
2017-01-10 00:28:29 +01:00
|
|
|
fname = path + "/" + poni
|
2017-01-20 10:55:24 +01:00
|
|
|
if os.path.isfile(fname):
|
2017-01-10 00:28:29 +01:00
|
|
|
log.info("Found poni file %s",fname)
|
2017-01-05 19:22:37 +01:00
|
|
|
break
|
|
|
|
else:
|
2017-01-10 00:28:29 +01:00
|
|
|
log.debug("Could not poni file %s",fname)
|
|
|
|
ai = pyFAI.load(fname)
|
2017-01-05 19:22:37 +01:00
|
|
|
return ai
|
|
|
|
|
2017-01-20 10:44:56 +01:00
|
|
|
g_mask_str = re.compile("(\w)\s*(<|>)\s*(\d+)")
|
|
|
|
|
|
|
|
def interpretMask(mask,shape=None):
|
|
|
|
"""
|
|
|
|
if mask is an existing filename, returns it
|
|
|
|
if mask is a string like [x|y] [<|>] int;
|
|
|
|
for example y>500 will dis-regard out for y>500
|
|
|
|
"""
|
|
|
|
maskout = None
|
|
|
|
if isinstance(mask,str) and os.path.isfile(mask):
|
|
|
|
maskout = read(mask).astype(np.bool)
|
|
|
|
elif isinstance(mask,str) and not os.path.isfile(mask):
|
|
|
|
err_msg = ValueError("The string '%s' could not be interpreted as simple\
|
|
|
|
mask; it should be something like x>10"%mask)
|
|
|
|
assert shape is not None
|
|
|
|
# interpret string
|
|
|
|
maskout = np.zeros(shape,dtype=bool)
|
|
|
|
match = g_mask_str.match(mask)
|
|
|
|
if match is None: raise err_msg
|
|
|
|
(axis,sign,lim) = match.groups()
|
|
|
|
if axis not in ("x","y"): raise err_msg
|
|
|
|
if sign not in (">","<"): raise err_msg
|
|
|
|
lim = int(lim)
|
|
|
|
idx = slice(lim,None) if sign == ">" else slice(None,lim)
|
|
|
|
if axis == 'y':
|
|
|
|
maskout[idx,:] = True
|
|
|
|
else:
|
|
|
|
maskout[:,idx] = True
|
|
|
|
elif isinstance(mask,np.ndarray):
|
|
|
|
maskout = mask.astype(np.bool)
|
|
|
|
elif mask is None:
|
|
|
|
assert shape is not None
|
|
|
|
maskout = np.zeros(shape,dtype=bool)
|
|
|
|
else:
|
|
|
|
maskout = None
|
|
|
|
raise ValueError("Could not interpret %s as mask input"%mask)
|
|
|
|
|
|
|
|
if shape is not None and maskout.shape != shape:
|
|
|
|
raise ValueError("The mask shape %s does not match the shape given as\
|
|
|
|
argument %s"%(maskout.shape,shape))
|
|
|
|
return maskout
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
def doFolder(folder,files='*.edf*',nQ = 1500,force=False,mask=None,
|
2017-01-10 00:28:29 +01:00
|
|
|
saveChi=True,poni='pyfai.poni',storageFile='auto',diagnostic=None):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" calc 1D curves from files in folder, returning a dictionary of stuff
|
|
|
|
nQ : number of Q-points (equispaced)
|
|
|
|
force : if True, redo from beginning even if previous data are found
|
|
|
|
if False, do only new files
|
|
|
|
mask : can be a filename or an array of booleans; pixels that are True
|
|
|
|
are dis-regarded
|
|
|
|
saveChi: self-explanatory
|
|
|
|
poni : could be:
|
|
|
|
→ an AzimuthalIntegrator instance
|
2017-01-10 00:28:29 +01:00
|
|
|
→ a filename that will be look for in
|
2017-01-05 19:22:37 +01:00
|
|
|
1 'folder' first
|
|
|
|
2 in ../folder
|
|
|
|
3 in ../../folder
|
|
|
|
....
|
|
|
|
n-1 in pwd
|
|
|
|
n in homefolder
|
2017-01-10 00:28:29 +01:00
|
|
|
→ a dictionary (use to bootstrap an AzimuthalIntegrator using
|
|
|
|
AzimuthalIntegrator(**poni)
|
|
|
|
"""
|
2017-01-06 15:40:26 +01:00
|
|
|
if storageFile == 'auto': storageFile = folder + "/" + "pyfai_1d.h5"
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-01-20 10:55:24 +01:00
|
|
|
if os.path.isfile(storageFile) and not force:
|
2017-01-07 23:53:12 +01:00
|
|
|
saved = storage.DataStorage(storageFile)
|
2017-01-05 19:22:37 +01:00
|
|
|
else:
|
|
|
|
saved = None
|
|
|
|
|
|
|
|
# which poni file to use:
|
2017-01-10 00:28:29 +01:00
|
|
|
ai = getAI(poni,folder)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-01-06 18:06:34 +01:00
|
|
|
files = utils.getFiles(folder,files)
|
2017-01-05 19:22:37 +01:00
|
|
|
if saved is not None:
|
2017-01-06 15:40:26 +01:00
|
|
|
files = [f for f in files if utils.getBasename(f) not in saved["files"]]
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
if len(files) > 0:
|
2017-01-20 10:44:56 +01:00
|
|
|
shape = read(files[0]).shape
|
|
|
|
mask = interpretMask(mask,shape)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-01-06 18:06:34 +01:00
|
|
|
data = np.empty( (len(files),nQ) )
|
|
|
|
err = np.empty( (len(files),nQ) )
|
2017-01-05 19:22:37 +01:00
|
|
|
for ifname,fname in enumerate(files):
|
2017-01-10 00:28:29 +01:00
|
|
|
img = read(fname)
|
|
|
|
q,i,e = do1d(ai,img,mask=mask,npt_radial=nQ)
|
2017-01-05 19:22:37 +01:00
|
|
|
data[ifname] = i
|
|
|
|
err[ifname] = e
|
|
|
|
if saveChi:
|
2017-01-06 15:40:26 +01:00
|
|
|
chi_fname = utils.removeExt(fname) + ".chi"
|
2017-01-10 00:28:29 +01:00
|
|
|
utils.saveTxt(chi_fname,q,i,e,info=ai_as_str(ai),overwrite=True)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-01-06 15:40:26 +01:00
|
|
|
files = [ utils.getBasename(f) for f in files ]
|
2017-01-05 19:22:37 +01:00
|
|
|
files = np.asarray(files)
|
|
|
|
if saved is not None:
|
|
|
|
files = np.concatenate( (saved["files"] ,files ) )
|
|
|
|
data = np.concatenate( (saved["data"] ,data ) )
|
|
|
|
err = np.concatenate( (saved["err"] ,err ) )
|
|
|
|
ret = dict(q=q,folder=folder,files=files,data=data,err=err,
|
2017-01-10 00:28:29 +01:00
|
|
|
pyfai=ai_as_dict(ai),pyfai_info=ai_as_str(ai),mask=mask)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
# add info from diagnostic if provided
|
|
|
|
if diagnostic is not None:
|
|
|
|
for k in diagnostic:
|
|
|
|
ret[k] = np.asarray( [diagnostic[k][f] for f in ret['files']] )
|
2017-01-07 23:53:12 +01:00
|
|
|
ret = storage.DataStorage(ret)
|
2017-01-06 15:40:26 +01:00
|
|
|
if storageFile is not None: ret.save(storageFile)
|
2017-01-05 19:22:37 +01:00
|
|
|
else:
|
|
|
|
ret = saved
|
2017-01-06 15:40:26 +01:00
|
|
|
return ret
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _calc_R(x,y, xc, yc):
|
|
|
|
""" calculate the distance of each 2D points from the center (xc, yc) """
|
|
|
|
return np.sqrt((x-xc)**2 + (y-yc)**2)
|
|
|
|
|
|
|
|
def _chi2(c, x, y):
|
|
|
|
""" calculate the algebraic distance between the data points and the mean
|
|
|
|
circle centered at c=(xc, yc) """
|
|
|
|
Ri = _calc_R(x, y, *c)
|
|
|
|
return Ri - Ri.mean()
|
|
|
|
|
|
|
|
def leastsq_circle(x,y):
|
|
|
|
from scipy import optimize
|
|
|
|
# coordinates of the barycenter
|
|
|
|
center_estimate = np.nanmean(x), np.nanmean(y)
|
|
|
|
center, ier = optimize.leastsq(_chi2, center_estimate, args=(x,y))
|
|
|
|
xc, yc = center
|
|
|
|
Ri = _calc_R(x, y, *center)
|
|
|
|
R = Ri.mean()
|
|
|
|
residu = np.sum((Ri - R)**2)
|
|
|
|
return xc, yc, R
|
|
|
|
|
2017-01-13 14:49:48 +01:00
|
|
|
def find_center(img,psize=100e-6,dist=0.1,wavelength=0.8e-10,center=None,reference=None,**kwargs):
|
|
|
|
""" center is the initial centr (can be None)
|
|
|
|
reference is a reference position to be plot in 2D plots """
|
2017-01-05 19:22:37 +01:00
|
|
|
plt.ion()
|
|
|
|
kw = dict( pixel1 = psize, pixel2 = psize, dist = dist,wavelength=wavelength )
|
|
|
|
kw.update(kwargs)
|
|
|
|
ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator(**kw)
|
|
|
|
fig_img,ax_img = plt.subplots(1,1)
|
|
|
|
fig_pyfai,ax_pyfai = plt.subplots(1,1)
|
|
|
|
fig_pyfai = plt.figure(2)
|
2017-01-10 00:28:29 +01:00
|
|
|
temp= ax_img.imshow(img)
|
2017-01-05 19:22:37 +01:00
|
|
|
plt.sca(ax_img); # set figure to use for mouse interaction
|
2017-01-10 00:28:29 +01:00
|
|
|
temp.set_clim( *np.percentile(img,(2,95) ) )
|
2017-01-05 19:22:37 +01:00
|
|
|
ans = ""
|
|
|
|
print("Enter 'end' when done")
|
|
|
|
while ans != "end":
|
2017-01-13 14:49:48 +01:00
|
|
|
if center is None:
|
2017-01-05 19:22:37 +01:00
|
|
|
print("Click on beam center:")
|
|
|
|
plt.sca(ax_img); # set figure to use for mouse interaction
|
2017-01-13 14:49:48 +01:00
|
|
|
center = plt.ginput()[0]
|
|
|
|
print("Selected center:",center)
|
2017-01-27 15:38:32 +01:00
|
|
|
ai.set_poni1(center[1]*psize)
|
|
|
|
ai.set_poni2(center[0]*psize)
|
2017-01-10 00:28:29 +01:00
|
|
|
q,az,i = do2d(ai,img)
|
|
|
|
mesh = ax_pyfai.pcolormesh(q,az,i)
|
|
|
|
mesh.set_clim( *np.percentile(i,(2,95) ) )
|
2017-01-13 14:49:48 +01:00
|
|
|
ax_pyfai.set_title(str(center))
|
|
|
|
if reference is not None: ax_pyfai.axvline(reference)
|
2017-01-05 19:22:37 +01:00
|
|
|
plt.pause(0.01)
|
|
|
|
plt.draw()
|
2017-01-10 00:28:29 +01:00
|
|
|
ans=input("Enter to continue with clinking or enter xc,yc values ")
|
2017-01-13 14:49:48 +01:00
|
|
|
if ans == '':
|
|
|
|
center = None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
center = list(map(float,ans.split(",")))
|
|
|
|
except Exception as e:
|
|
|
|
center = None
|
|
|
|
if center == []: center = None
|
|
|
|
print("Final values: (in pixels) %.3f %.3f"%(center[0],center[1]))
|
2017-01-05 19:22:37 +01:00
|
|
|
return ai
|
|
|
|
|
2017-01-06 15:40:26 +01:00
|
|
|
def average(fileOrFolder,delays=slice(None),scale=1,norm=None,returnAll=False,plot=False,
|
|
|
|
showTrend=False):
|
2017-01-07 23:53:12 +01:00
|
|
|
data = storage.DataStorage(fileOrFolder)
|
2017-01-06 15:40:26 +01:00
|
|
|
if isinstance(delays,slice):
|
|
|
|
idx = np.arange(data.delays.shape[0])[delays]
|
|
|
|
elif isinstance(delays,(int,float)):
|
|
|
|
idx = data.delays == float(delays)
|
|
|
|
else:
|
|
|
|
idx = data.delays < 0
|
|
|
|
if idx.sum() == 0:
|
|
|
|
print("No data with the current filter")
|
|
|
|
return None
|
|
|
|
i = data.data[idx]
|
|
|
|
q = data.q
|
|
|
|
if isinstance(norm,(tuple,list)):
|
|
|
|
idx = ( q>norm[0] ) & (q<norm[1])
|
|
|
|
norm = np.nanmean(i[:,idx],axis=1)
|
|
|
|
i = i/norm[:,np.newaxis]
|
|
|
|
if isinstance(norm,np.ndarray):
|
|
|
|
i = i/norm[:,np.newaxis]
|
|
|
|
title = "%s %s" % (fileOrFolder,str(delays))
|
2017-01-06 18:06:34 +01:00
|
|
|
utils.plotdata(q,i*scale,showTrend=showTrend,plot=plot,title=title)
|
2017-01-06 15:40:26 +01:00
|
|
|
if returnAll:
|
|
|
|
return q,i.mean(axis=0)*scale,i
|
|
|
|
else:
|
|
|
|
return q,i.mean(axis=0)*scale
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
#### Utilities for chi files ####
|
|
|
|
def chiRead(fname,scale=1):
|
|
|
|
q,i = np.loadtxt(fname,unpack=True,usecols=(0,1))
|
|
|
|
return q,i*scale
|
|
|
|
|
|
|
|
def chiPlot(fname,useTheta=False,E=12.4):
|
|
|
|
q,i = chiRead(fname)
|
|
|
|
lam = 12.4/E
|
|
|
|
theta = 2*180/3.14*np.arcsin(q*lam/4/3.14)
|
2017-01-06 15:40:26 +01:00
|
|
|
x = theta if useTheta else q
|
2017-01-05 19:22:37 +01:00
|
|
|
plt.plot(x,i,label=fname)
|
|
|
|
|
2017-01-06 15:40:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
def chiAverage(folder,basename="",scale=1,norm=None,returnAll=False,plot=False,showTrend=False,clim='auto'):
|
2017-01-05 19:22:37 +01:00
|
|
|
files = glob.glob("%s/%s*chi"%(folder,basename))
|
|
|
|
files.sort()
|
|
|
|
print(files)
|
|
|
|
if len(files) == 0:
|
|
|
|
print("No file found (basename %s)" % basename)
|
|
|
|
return None
|
|
|
|
q,_ = chiRead(files[0])
|
|
|
|
i = np.asarray( [ chiRead(f)[1] for f in files ] )
|
2017-01-06 15:40:26 +01:00
|
|
|
if isinstance(norm,(tuple,list)):
|
2017-01-05 19:22:37 +01:00
|
|
|
idx = ( q>norm[0] ) & (q<norm[1])
|
|
|
|
norm = np.nanmean(i[:,idx],axis=1)
|
|
|
|
i = i/norm[:,np.newaxis]
|
2017-01-06 15:40:26 +01:00
|
|
|
if isinstance(norm,np.ndarray):
|
|
|
|
i = i/norm[:,np.newaxis]
|
|
|
|
title = "%s %s" % (folder,basename)
|
2017-01-27 15:38:32 +01:00
|
|
|
utils.plotdata(q,i,plot=plot,showTrend=showTrend,title=title,clim=clim)
|
2017-01-05 19:22:37 +01:00
|
|
|
if (showTrend and plot): plt.subplot(1,2,1)
|
|
|
|
if showTrend:
|
|
|
|
plt.pcolormesh(np.arange(i.shape[0]),q,i.T)
|
|
|
|
plt.xlabel("image number, 0 being older")
|
|
|
|
plt.ylabel(r"q ($\AA^{-1}$)")
|
|
|
|
if (showTrend and plot): plt.subplot(1,2,2)
|
|
|
|
if plot:
|
|
|
|
plt.plot(q,i.mean(axis=0)*scale)
|
|
|
|
if (plot or showTrend):
|
|
|
|
plt.title(folder+"/"+basename)
|
|
|
|
if returnAll:
|
|
|
|
return q,i.mean(axis=0)*scale,i
|
|
|
|
else:
|
|
|
|
return q,i.mean(axis=0)*scale
|