lots of changes, now id9 and pyfai routines have their own wubmodule; worked on storage system (hdf5 or npz based)
This commit is contained in:
parent
0a7b628ac1
commit
abf786ee62
|
@ -0,0 +1,59 @@
|
||||||
|
import logging as log
|
||||||
|
log.basicConfig(level=log.INFO)
|
||||||
|
|
||||||
|
import os
|
||||||
|
import collections
|
||||||
|
import numpy as np
|
||||||
|
from .xray import azav
|
||||||
|
|
||||||
|
def _conv(x):
|
||||||
|
try:
|
||||||
|
x = float(x)
|
||||||
|
except:
|
||||||
|
x = np.nan
|
||||||
|
return x
|
||||||
|
|
||||||
|
def getFiles(folder,basename="*.edf*"):
|
||||||
|
files = glob.glob(folder + "/" + basename)
|
||||||
|
files.sort()
|
||||||
|
return files
|
||||||
|
|
||||||
|
def getEdfFiles(folder):
|
||||||
|
return getFiles(folder,basename="*.edf*")
|
||||||
|
|
||||||
|
def removeExt(fname):
|
||||||
|
""" special remove extension meant to work with compressed files.edf and .edf.gz files """
|
||||||
|
if fname[-3:] == ".gz": fname = fname[-3:]
|
||||||
|
return os.path.splitext(fname)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def readDelayFromDiagnostic(fname):
|
||||||
|
""" return an ordered dict dictionary of filename; for each key a rounded
|
||||||
|
value of delay is associated """
|
||||||
|
if os.path.isdir(fname): fname += "/diagnostics.log"
|
||||||
|
data = np.genfromtxt(fname,usecols=(2,3),\
|
||||||
|
dtype=None,converters={3: lambda x: _conv(x)},
|
||||||
|
names = ['fname','delay'])
|
||||||
|
files = data['fname'].astype(np.str); # to avoid encoding problems
|
||||||
|
delays = data['delay']
|
||||||
|
# skip lines that cannot be interpreted as float (like done, etc)
|
||||||
|
idx_ok = np.isfinite( delays )
|
||||||
|
files = files[idx_ok]
|
||||||
|
delays = delays[idx_ok]
|
||||||
|
delays = np.round(delays.astype(float),12)
|
||||||
|
return collections.OrderedDict( zip(files,delays) )
|
||||||
|
|
||||||
|
|
||||||
|
def doFolder_azav(folder,nQ=1500,force=False,mask=None,saveChi=True,
|
||||||
|
poni='auto',h5File='auto'):
|
||||||
|
""" very small wrapper around azav.doFolder, essentially just reading
|
||||||
|
the diagnostics.log """
|
||||||
|
|
||||||
|
if h5File == 'auto': n5File = folder + "/" + "pyfai_1d.h5"
|
||||||
|
|
||||||
|
diag = dict( delays = readDelayFromDiagnostic(folder) )
|
||||||
|
|
||||||
|
return azav.doFolder(folder,files="*.edf*",nQ=nQ,force=force,mask=mask,
|
||||||
|
saveChi=saveChi,poni=poni,h5File=h5File,diagnostic=diag)
|
||||||
|
|
||||||
|
|
151
mcutils.py
151
mcutils.py
|
@ -65,53 +65,6 @@ def noaxis(ax=None):
|
||||||
ax.spines['left'].set_visible(False)
|
ax.spines['left'].set_visible(False)
|
||||||
ax.spines['bottom'].set_visible(False)
|
ax.spines['bottom'].set_visible(False)
|
||||||
|
|
||||||
def xrayAttLenght(*args,**kw):
|
|
||||||
from periodictable import xsf
|
|
||||||
n = xsf.index_of_refraction(*args,**kw)
|
|
||||||
if not "wavelength" in kw:
|
|
||||||
wavelength = 12.398/np.asarray(kw["energy"])
|
|
||||||
else:
|
|
||||||
wavelength = np.asarray(kw["wavelength"])
|
|
||||||
attenuation_length = np.abs( (wavelength*1e-10)/ (4*np.pi*np.imag(n)) )
|
|
||||||
return attenuation_length
|
|
||||||
|
|
||||||
|
|
||||||
def xrayFluo(atom,density,energy=7.,length=30.,I0=1e10,\
|
|
||||||
det_radius=1.,det_dist=10.,det_material="Si",det_thick=300,verbose=True):
|
|
||||||
""" compound: anything periodictable would understand
|
|
||||||
density: in mM
|
|
||||||
length: sample length in um
|
|
||||||
energy: in keV, could be array
|
|
||||||
"""
|
|
||||||
import periodictable
|
|
||||||
from periodictable import xsf
|
|
||||||
wavelength = 12.398/energy
|
|
||||||
atom = periodictable.__dict__[atom]
|
|
||||||
# 1e3 -> from mM to M
|
|
||||||
# 1e3 -> from L to cm3
|
|
||||||
# so 1 mM = 1e-3M/L = 1e-6 M/cm3
|
|
||||||
density_g_cm3 = density*1e-6*atom.mass
|
|
||||||
n = xsf.index_of_refraction(atom,density=density_g_cm3,wavelength=wavelength)
|
|
||||||
attenuation_length = xrayAttLenght( atom,density=density_g_cm3,energy=energy )
|
|
||||||
# um-> m: 1e-6
|
|
||||||
fraction_absorbed = 1.-np.exp(-length*1e-6/attenuation_length)
|
|
||||||
if verbose:
|
|
||||||
print("Fraction of x-ray photons absorbed by the sample:",fraction_absorbed)
|
|
||||||
## detector ##
|
|
||||||
det_area = np.pi*det_radius**2
|
|
||||||
det_solid_angle = det_area/(4*np.pi*det_dist**2)
|
|
||||||
if verbose:
|
|
||||||
print("Detector fraction of solid angle:",det_solid_angle)
|
|
||||||
det_att_len = xrayAttLenght(det_material,wavelength=atom.K_alpha)
|
|
||||||
det_abs = 1-np.exp(-det_thick*1e-6/det_att_len)
|
|
||||||
if verbose:
|
|
||||||
print("Detector efficiency (assuming E fluo = E_K_alpha):",det_abs)
|
|
||||||
eff = fraction_absorbed*det_solid_angle*det_abs
|
|
||||||
if verbose:
|
|
||||||
print("Overall intensity (as ratio to incoming one):",eff)
|
|
||||||
return eff
|
|
||||||
|
|
||||||
|
|
||||||
def pulseDuration(t0,L,GVD):
|
def pulseDuration(t0,L,GVD):
|
||||||
return t0*np.sqrt( 1.+(L/(t0**2/2/np.abs(GVD))))
|
return t0*np.sqrt( 1.+(L/(t0**2/2/np.abs(GVD))))
|
||||||
|
|
||||||
|
@ -1220,110 +1173,6 @@ def insertInSortedArray(a,v):
|
||||||
a[idx]=v
|
a[idx]=v
|
||||||
return a
|
return a
|
||||||
|
|
||||||
##### X-ray images #############
|
|
||||||
|
|
||||||
def pyFAIread(fname):
|
|
||||||
import fabio
|
|
||||||
f = fabio.open(fname)
|
|
||||||
data = f.data
|
|
||||||
del f
|
|
||||||
return data
|
|
||||||
|
|
||||||
def pyFAI_dict(ai):
|
|
||||||
""" ai is a pyFAI azimuthal intagrator"""
|
|
||||||
methods = dir(ai)
|
|
||||||
methods = [m for m in methods if m.find("get_") == 0]
|
|
||||||
names = [m[4:] for m in methods]
|
|
||||||
values = [getattr(ai,m)() for m in methods]
|
|
||||||
ret = dict( zip(names,values) )
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def pyFAI1d(ai, imgs, mask = None, npt_radial = 600, method = 'csr',safe=True,dark=10., polCorr = 1):
|
|
||||||
""" ai is a pyFAI azimuthal intagrator
|
|
||||||
it can be defined with pyFAI.load(ponifile)
|
|
||||||
mask: True are points to be masked out """
|
|
||||||
# force float to be sure of type casting for img
|
|
||||||
if isinstance(dark,int): dark = float(dark);
|
|
||||||
if imgs.ndim == 2: imgs = (imgs,)
|
|
||||||
out_i = np.empty( ( len(imgs), npt_radial) )
|
|
||||||
out_s = np.empty( ( len(imgs), npt_radial) )
|
|
||||||
for _i,img in enumerate(imgs):
|
|
||||||
q,i, sig = ai.integrate1d(img-dark, npt_radial, mask= mask, safe = safe,\
|
|
||||||
unit="q_A^-1", method = method, error_model = "poisson",
|
|
||||||
polarization_factor = polCorr)
|
|
||||||
out_i[_i] = i
|
|
||||||
out_s[_i] = sig
|
|
||||||
return q,np.squeeze(out_i),np.squeeze(out_s)
|
|
||||||
|
|
||||||
def pyFAI2d(ai, imgs, mask = None, npt_radial = 600, npt_azim=360,method = 'csr',safe=True,dark=10., polCorr = 1):
|
|
||||||
""" ai is a pyFAI azimuthal intagrator
|
|
||||||
it can be defined with pyFAI.load(ponifile)
|
|
||||||
mask: True are points to be masked out """
|
|
||||||
# force float to be sure of type casting for img
|
|
||||||
if isinstance(dark,int): dark = float(dark);
|
|
||||||
if imgs.ndim == 2: imgs = (imgs,)
|
|
||||||
out = np.empty( ( len(imgs), npt_azim,npt_radial) )
|
|
||||||
for _i,img in enumerate(imgs):
|
|
||||||
i2d,q,azTheta = ai.integrate2d(img-dark, npt_radial, npt_azim=npt_azim,
|
|
||||||
mask= mask, safe = safe,unit="q_A^-1", method = method,
|
|
||||||
polarization_factor = polCorr )
|
|
||||||
out[_i] = i2d
|
|
||||||
return q,azTheta,np.squeeze(out)
|
|
||||||
|
|
||||||
def _calc_R(x,y, xc, yc):
|
|
||||||
""" calculate the distance of each 2D points from the center (xc, yc) """
|
|
||||||
return np.sqrt((x-xc)**2 + (y-yc)**2)
|
|
||||||
|
|
||||||
def _chi2(c, x, y):
|
|
||||||
""" calculate the algebraic distance between the data points and the mean
|
|
||||||
circle centered at c=(xc, yc) """
|
|
||||||
Ri = _calc_R(x, y, *c)
|
|
||||||
return Ri - Ri.mean()
|
|
||||||
|
|
||||||
def leastsq_circle(x,y):
|
|
||||||
from scipy import optimize
|
|
||||||
# coordinates of the barycenter
|
|
||||||
center_estimate = np.nanmean(x), np.nanmean(y)
|
|
||||||
center, ier = optimize.leastsq(_chi2, center_estimate, args=(x,y))
|
|
||||||
xc, yc = center
|
|
||||||
Ri = _calc_R(x, y, *center)
|
|
||||||
R = Ri.mean()
|
|
||||||
residu = np.sum((Ri - R)**2)
|
|
||||||
return xc, yc, R
|
|
||||||
|
|
||||||
def pyFAI_find_center(img,psize=100e-6,dist=0.1,wavelength=0.8e-10,**kwargs):
|
|
||||||
import pyFAI
|
|
||||||
plt.ion()
|
|
||||||
kw = dict( pixel1 = psize, pixel2 = psize, dist = dist,wavelength=wavelength )
|
|
||||||
kw.update(kwargs)
|
|
||||||
ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator(**kw)
|
|
||||||
fig_img,ax_img = plt.subplots(1,1)
|
|
||||||
fig_pyfai,ax_pyfai = plt.subplots(1,1)
|
|
||||||
fig_pyfai = plt.figure(2)
|
|
||||||
ax_img.imshow(img)
|
|
||||||
plt.sca(ax_img); # set figure to use for mouse interaction
|
|
||||||
ans = ""
|
|
||||||
print("Enter 'end' when done")
|
|
||||||
while ans != "end":
|
|
||||||
if ans == "":
|
|
||||||
print("Click on beam center:")
|
|
||||||
plt.sca(ax_img); # set figure to use for mouse interaction
|
|
||||||
xc,yc = plt.ginput()[0]
|
|
||||||
else:
|
|
||||||
xc,yc = map(float,ans.split(","))
|
|
||||||
print("Selected center:",xc,yc)
|
|
||||||
ai.set_poni1(xc*psize)
|
|
||||||
ai.set_poni2(yc*psize)
|
|
||||||
q,az,i = pyFAI2d(ai,img)
|
|
||||||
ax_pyfai.pcolormesh(q,az,i)
|
|
||||||
ax_pyfai.set_title(str( (xc,yc) ))
|
|
||||||
plt.pause(0.01)
|
|
||||||
plt.draw()
|
|
||||||
ans=input("Enter to continue with clinking or enter xc,yc values")
|
|
||||||
print("Final values: (in pixels) %.3f %.3f"%(xc,yc))
|
|
||||||
return ai
|
|
||||||
|
|
||||||
|
|
||||||
### Objects ###
|
### Objects ###
|
||||||
|
|
||||||
def objToDict(o,recursive=True):
|
def objToDict(o,recursive=True):
|
||||||
|
|
|
@ -0,0 +1,324 @@
|
||||||
|
from __future__ import print_function,division
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
log.basicConfig(level=log.INFO)
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
np.seterr(all='ignore')
|
||||||
|
import os
|
||||||
|
import collections
|
||||||
|
import glob
|
||||||
|
import pathlib
|
||||||
|
from . import storage as storage
|
||||||
|
import pyFAI
|
||||||
|
|
||||||
|
try:
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
except ImportError:
|
||||||
|
log.warn("Can't import matplotlib !")
|
||||||
|
|
||||||
|
def removeExt(fname):
|
||||||
|
""" special remove extension meant to work with compressed files.edf and .edf.gz files """
|
||||||
|
if fname[-3:] == ".gz": fname = fname[-3:]
|
||||||
|
return os.path.splitext(fname)[0]
|
||||||
|
|
||||||
|
def getBasename(fname):
|
||||||
|
return removeExt(os.path.basename(fname))
|
||||||
|
|
||||||
|
def pyFAIread(fname):
|
||||||
|
""" read data from file using fabio """
|
||||||
|
import fabio
|
||||||
|
f = fabio.open(fname)
|
||||||
|
data = f.data
|
||||||
|
del f
|
||||||
|
return data
|
||||||
|
|
||||||
|
def pyFAI_dict(ai):
|
||||||
|
""" ai is a pyFAI azimuthal intagrator"""
|
||||||
|
methods = dir(ai)
|
||||||
|
methods = [m for m in methods if m.find("get_") == 0]
|
||||||
|
names = [m[4:] for m in methods]
|
||||||
|
values = [getattr(ai,m)() for m in methods]
|
||||||
|
ret = dict( zip(names,values) )
|
||||||
|
ret["detector"] = ai.detector.get_name()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def pyFAI1d(ai, imgs, mask = None, npt_radial = 600, method = 'csr',safe=True,dark=10., polCorr = 1):
|
||||||
|
""" ai is a pyFAI azimuthal intagrator
|
||||||
|
it can be defined with pyFAI.load(ponifile)
|
||||||
|
mask: True are points to be masked out """
|
||||||
|
# force float to be sure of type casting for img
|
||||||
|
if isinstance(dark,int): dark = float(dark);
|
||||||
|
if imgs.ndim == 2: imgs = (imgs,)
|
||||||
|
out_i = np.empty( ( len(imgs), npt_radial) )
|
||||||
|
out_s = np.empty( ( len(imgs), npt_radial) )
|
||||||
|
for _i,img in enumerate(imgs):
|
||||||
|
q,i, sig = ai.integrate1d(img-dark, npt_radial, mask= mask, safe = safe,\
|
||||||
|
unit="q_A^-1", method = method, error_model = "poisson",
|
||||||
|
polarization_factor = polCorr)
|
||||||
|
out_i[_i] = i
|
||||||
|
out_s[_i] = sig
|
||||||
|
return q,np.squeeze(out_i),np.squeeze(out_s)
|
||||||
|
|
||||||
|
def pyFAI2d(ai, imgs, mask = None, npt_radial = 600, npt_azim=360,method = 'csr',safe=True,dark=10., polCorr = 1):
|
||||||
|
""" ai is a pyFAI azimuthal intagrator
|
||||||
|
it can be defined with pyFAI.load(ponifile)
|
||||||
|
mask: True are points to be masked out """
|
||||||
|
# force float to be sure of type casting for img
|
||||||
|
if isinstance(dark,int): dark = float(dark);
|
||||||
|
if imgs.ndim == 2: imgs = (imgs,)
|
||||||
|
out = np.empty( ( len(imgs), npt_azim,npt_radial) )
|
||||||
|
for _i,img in enumerate(imgs):
|
||||||
|
i2d,q,azTheta = ai.integrate2d(img-dark, npt_radial, npt_azim=npt_azim,
|
||||||
|
mask= mask, safe = safe,unit="q_A^-1", method = method,
|
||||||
|
polarization_factor = polCorr )
|
||||||
|
out[_i] = i2d
|
||||||
|
return q,azTheta,np.squeeze(out)
|
||||||
|
|
||||||
|
|
||||||
|
def pyFAI_saveChi(fname,q,i,e=None,ai=None,overwrite=False):
|
||||||
|
if os.path.exists(fname) and not overwrite:
|
||||||
|
log.warn("File %s exists, returning",fname)
|
||||||
|
return
|
||||||
|
if ai is not None:
|
||||||
|
if not isinstance(ai,dict): ai = pyFAI_dict(ai)
|
||||||
|
header = [ "# %s : %s" %(k,v) for (k,v) in zip(ai.keys(),ai.values()) ]
|
||||||
|
header = "\n".join(header)[1:]; # skip first #, will be added by np
|
||||||
|
else:
|
||||||
|
header = ""
|
||||||
|
x = np.stack( (q,i,e) ) if e is not None else np.stack( (q,i) )
|
||||||
|
np.savetxt(fname,x.T,fmt="%+10.5e",header=header)
|
||||||
|
|
||||||
|
class pyFAI_storage(dict):
|
||||||
|
""" Storage for pyfai integrated info """
|
||||||
|
def __init__(self,fileOrDict):
|
||||||
|
if isinstance(fileOrDict,dict):
|
||||||
|
self.filename = None
|
||||||
|
d = fileOrDict
|
||||||
|
else:
|
||||||
|
assert isinstance(fileOrDict,str)
|
||||||
|
self.filename = fileOrDict
|
||||||
|
d = storage.read(fileOrDict)
|
||||||
|
|
||||||
|
# allow accessing with .data, .delays, etc.
|
||||||
|
for k,v in d.items(): setattr(self,k,v)
|
||||||
|
|
||||||
|
# allow accessing as proper dict
|
||||||
|
self.update( **dict(d) )
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
setattr(self,key,value)
|
||||||
|
super().__setitem__(key, value)
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
delattr(self,key)
|
||||||
|
super().__delitem__(key)
|
||||||
|
|
||||||
|
def save(self,fname=None):
|
||||||
|
if fname is None: fname = self.filename
|
||||||
|
assert fname is not None
|
||||||
|
storage.save(fname,dict(self))
|
||||||
|
|
||||||
|
#def asdict(self): return dict(self)
|
||||||
|
|
||||||
|
def readNpzFile(h5File):
|
||||||
|
if os.path.isdir(h5File): h5File = "%s/pyfai_1d.h5" % h5File
|
||||||
|
return pyFAI_storage(h5File)
|
||||||
|
|
||||||
|
def _getAI(poni,folder):
|
||||||
|
if isinstance(poni,pyFAI.azimuthalIntegrator.AzimuthalIntegrator):
|
||||||
|
ai = poni
|
||||||
|
elif isinstance(poni,dict):
|
||||||
|
ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator(**poni)
|
||||||
|
else:
|
||||||
|
if poni == 'auto':
|
||||||
|
temp = os.path.abspath(folder)
|
||||||
|
path = pathlib.Path(temp)
|
||||||
|
folders = [ str(path), ]
|
||||||
|
for p in path.parents: folders.append(str(p))
|
||||||
|
folders.append( "./" )
|
||||||
|
folders.append( os.path.expanduser("~/") )
|
||||||
|
for path in folders:
|
||||||
|
poni = path + "/" + "pyfai.poni"
|
||||||
|
if os.path.exists(poni):
|
||||||
|
log.info("Found pyfai.poni in %s",path)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
log.debug("Could not find pyfai.poni in %s",path)
|
||||||
|
ai = pyFAI.load(poni)
|
||||||
|
return ai
|
||||||
|
|
||||||
|
|
||||||
|
def doFolder(folder,files='*.edf*',nQ = 1500,force=False,mask=None,
|
||||||
|
saveChi=True,poni='auto',h5File='auto',diagnostic=None):
|
||||||
|
""" calc 1D curves from files in folder, returning a dictionary of stuff
|
||||||
|
nQ : number of Q-points (equispaced)
|
||||||
|
force : if True, redo from beginning even if previous data are found
|
||||||
|
if False, do only new files
|
||||||
|
mask : can be a filename or an array of booleans; pixels that are True
|
||||||
|
are dis-regarded
|
||||||
|
saveChi: self-explanatory
|
||||||
|
poni : could be:
|
||||||
|
→ an AzimuthalIntegrator instance
|
||||||
|
→ a filename
|
||||||
|
→ a dictionary (use to bootstrap an AzimuthalIntegrator using
|
||||||
|
AzimuthalIntegrator(**poni)
|
||||||
|
→ the string 'auto' that will look for the file 'pyfai.poni' in:
|
||||||
|
1 'folder' first
|
||||||
|
2 in ../folder
|
||||||
|
3 in ../../folder
|
||||||
|
....
|
||||||
|
n-1 in pwd
|
||||||
|
n in homefolder
|
||||||
|
"""
|
||||||
|
|
||||||
|
if h5File == 'auto': h5File = folder + "/" + "pyfai_1d.h5"
|
||||||
|
|
||||||
|
if os.path.exists(h5File) and not force:
|
||||||
|
print("Loading")
|
||||||
|
saved = readNpzFile(h5File)
|
||||||
|
print("done")
|
||||||
|
else:
|
||||||
|
saved = None
|
||||||
|
|
||||||
|
# which poni file to use:
|
||||||
|
ai = _getAI(poni,folder)
|
||||||
|
|
||||||
|
files = glob.glob("%s/%s"%(folder,files))
|
||||||
|
files.sort()
|
||||||
|
if saved is not None:
|
||||||
|
files = [f for f in files if getBasename(f) not in saved["files"]]
|
||||||
|
|
||||||
|
if len(files) > 0:
|
||||||
|
# work out mask to use
|
||||||
|
if isinstance(mask,np.ndarray):
|
||||||
|
mask = mask.astype(bool)
|
||||||
|
elif mask is not None:
|
||||||
|
mask = pyFAIread(mask).astype(bool)
|
||||||
|
|
||||||
|
data = np.empty( (len(files),nQ),dtype=np.float32 )
|
||||||
|
err = np.empty( (len(files),nQ),dtype=np.float32 )
|
||||||
|
for ifname,fname in enumerate(files):
|
||||||
|
img = pyFAIread(fname)
|
||||||
|
q,i,e = pyFAI1d(ai,img,mask=mask,npt_radial=nQ)
|
||||||
|
data[ifname] = i
|
||||||
|
err[ifname] = e
|
||||||
|
if saveChi:
|
||||||
|
chi_fname = removeExt(fname) + ".chi"
|
||||||
|
pyFAI_saveChi(chi_fname,q,i,e,ai=ai,overwrite=True)
|
||||||
|
|
||||||
|
files = [ getBasename(f) for f in files ]
|
||||||
|
files = np.asarray(files)
|
||||||
|
if saved is not None:
|
||||||
|
files = np.concatenate( (saved["files"] ,files ) )
|
||||||
|
data = np.concatenate( (saved["data"] ,data ) )
|
||||||
|
err = np.concatenate( (saved["err"] ,err ) )
|
||||||
|
ret = dict(q=q,folder=folder,files=files,data=data,err=err,
|
||||||
|
pyfai=pyFAI_dict(ai),mask=mask)
|
||||||
|
|
||||||
|
# add info from diagnostic if provided
|
||||||
|
if diagnostic is not None:
|
||||||
|
for k in diagnostic:
|
||||||
|
ret[k] = np.asarray( [diagnostic[k][f] for f in ret['files']] )
|
||||||
|
|
||||||
|
if h5File is not None: np.savez(h5File,**ret)
|
||||||
|
else:
|
||||||
|
ret = saved
|
||||||
|
return pyFAI_storage(ret)
|
||||||
|
|
||||||
|
|
||||||
|
def _calc_R(x,y, xc, yc):
|
||||||
|
""" calculate the distance of each 2D points from the center (xc, yc) """
|
||||||
|
return np.sqrt((x-xc)**2 + (y-yc)**2)
|
||||||
|
|
||||||
|
def _chi2(c, x, y):
|
||||||
|
""" calculate the algebraic distance between the data points and the mean
|
||||||
|
circle centered at c=(xc, yc) """
|
||||||
|
Ri = _calc_R(x, y, *c)
|
||||||
|
return Ri - Ri.mean()
|
||||||
|
|
||||||
|
def leastsq_circle(x,y):
|
||||||
|
from scipy import optimize
|
||||||
|
# coordinates of the barycenter
|
||||||
|
center_estimate = np.nanmean(x), np.nanmean(y)
|
||||||
|
center, ier = optimize.leastsq(_chi2, center_estimate, args=(x,y))
|
||||||
|
xc, yc = center
|
||||||
|
Ri = _calc_R(x, y, *center)
|
||||||
|
R = Ri.mean()
|
||||||
|
residu = np.sum((Ri - R)**2)
|
||||||
|
return xc, yc, R
|
||||||
|
|
||||||
|
def pyFAI_find_center(img,psize=100e-6,dist=0.1,wavelength=0.8e-10,**kwargs):
|
||||||
|
plt.ion()
|
||||||
|
kw = dict( pixel1 = psize, pixel2 = psize, dist = dist,wavelength=wavelength )
|
||||||
|
kw.update(kwargs)
|
||||||
|
ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator(**kw)
|
||||||
|
fig_img,ax_img = plt.subplots(1,1)
|
||||||
|
fig_pyfai,ax_pyfai = plt.subplots(1,1)
|
||||||
|
fig_pyfai = plt.figure(2)
|
||||||
|
ax_img.imshow(img)
|
||||||
|
plt.sca(ax_img); # set figure to use for mouse interaction
|
||||||
|
ans = ""
|
||||||
|
print("Enter 'end' when done")
|
||||||
|
while ans != "end":
|
||||||
|
if ans == "":
|
||||||
|
print("Click on beam center:")
|
||||||
|
plt.sca(ax_img); # set figure to use for mouse interaction
|
||||||
|
xc,yc = plt.ginput()[0]
|
||||||
|
else:
|
||||||
|
xc,yc = map(float,ans.split(","))
|
||||||
|
print("Selected center:",xc,yc)
|
||||||
|
ai.set_poni1(xc*psize)
|
||||||
|
ai.set_poni2(yc*psize)
|
||||||
|
q,az,i = pyFAI2d(ai,img)
|
||||||
|
ax_pyfai.pcolormesh(q,az,i)
|
||||||
|
ax_pyfai.set_title(str( (xc,yc) ))
|
||||||
|
plt.pause(0.01)
|
||||||
|
plt.draw()
|
||||||
|
ans=input("Enter to continue with clinking or enter xc,yc values")
|
||||||
|
print("Final values: (in pixels) %.3f %.3f"%(xc,yc))
|
||||||
|
return ai
|
||||||
|
|
||||||
|
|
||||||
|
#### Utilities for chi files ####
|
||||||
|
def chiRead(fname,scale=1):
|
||||||
|
q,i = np.loadtxt(fname,unpack=True,usecols=(0,1))
|
||||||
|
return q,i*scale
|
||||||
|
|
||||||
|
def chiPlot(fname,useTheta=False,E=12.4):
|
||||||
|
q,i = chiRead(fname)
|
||||||
|
lam = 12.4/E
|
||||||
|
theta = 2*180/3.14*np.arcsin(q*lam/4/3.14)
|
||||||
|
if useTheta:
|
||||||
|
x = theta
|
||||||
|
else:
|
||||||
|
x = q
|
||||||
|
plt.plot(x,i,label=fname)
|
||||||
|
|
||||||
|
def chiAverage(folder,basename="",scale=1,returnAll=False,plot=False,showTrend=False,norm=None):
|
||||||
|
files = glob.glob("%s/%s*chi"%(folder,basename))
|
||||||
|
files.sort()
|
||||||
|
print(files)
|
||||||
|
if len(files) == 0:
|
||||||
|
print("No file found (basename %s)" % basename)
|
||||||
|
return None
|
||||||
|
q,_ = chiRead(files[0])
|
||||||
|
i = np.asarray( [ chiRead(f)[1] for f in files ] )
|
||||||
|
if norm is not None:
|
||||||
|
idx = ( q>norm[0] ) & (q<norm[1])
|
||||||
|
norm = np.nanmean(i[:,idx],axis=1)
|
||||||
|
i = i/norm[:,np.newaxis]
|
||||||
|
if (showTrend and plot): plt.subplot(1,2,1)
|
||||||
|
if showTrend:
|
||||||
|
plt.pcolormesh(np.arange(i.shape[0]),q,i.T)
|
||||||
|
plt.xlabel("image number, 0 being older")
|
||||||
|
plt.ylabel(r"q ($\AA^{-1}$)")
|
||||||
|
if (showTrend and plot): plt.subplot(1,2,2)
|
||||||
|
if plot:
|
||||||
|
plt.plot(q,i.mean(axis=0)*scale)
|
||||||
|
if (plot or showTrend):
|
||||||
|
plt.title(folder+"/"+basename)
|
||||||
|
if returnAll:
|
||||||
|
return q,i.mean(axis=0)*scale,i
|
||||||
|
else:
|
||||||
|
return q,i.mean(axis=0)*scale
|
|
@ -0,0 +1,154 @@
|
||||||
|
from __future__ import print_function,division
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
log.basicConfig(level=log.INFO)
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
np.seterr(all='ignore')
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
def subtractReferences(i,idx_ref, useRatio = False):
|
||||||
|
""" given data in i (first index is shot num) and the indeces of the
|
||||||
|
references (idx_ref, array of integers) it interpolates the closest
|
||||||
|
reference data for each shot and subtracts it (or divides it, depending
|
||||||
|
on useRatio = [True|False]; """
|
||||||
|
iref=np.empty_like(i)
|
||||||
|
idx_ref = np.squeeze(idx_ref)
|
||||||
|
idx_ref = np.atleast_1d(idx_ref)
|
||||||
|
if idx_ref.shape[0] == 1:
|
||||||
|
return i-i[idx_ref]
|
||||||
|
# references before first ref are "first ref"
|
||||||
|
iref[:idx_ref[0]] = i[idx_ref[0]]
|
||||||
|
# references after last ref are "last ref"
|
||||||
|
iref[idx_ref[-1]:] = i[idx_ref[-1]]
|
||||||
|
_ref = 0
|
||||||
|
for _i in range(idx_ref[0],idx_ref[-1]):
|
||||||
|
idx_ref_before = idx_ref[_ref]
|
||||||
|
idx_ref_after = idx_ref[_ref+1]
|
||||||
|
ref_before = i[idx_ref_before]
|
||||||
|
ref_after = i[idx_ref_after]
|
||||||
|
weight_before = float(_i-idx_ref_before)/(idx_ref_after-idx_ref_before)
|
||||||
|
weight_after = 1-weight_before
|
||||||
|
if weight_after == 1:
|
||||||
|
iref[_i] = ref_before
|
||||||
|
elif weight_before == 1:
|
||||||
|
iref[_i] = ref_after
|
||||||
|
else:
|
||||||
|
# normal reference for an on chi, the weighted average
|
||||||
|
iref[_i] = weight_before*ref_before + weight_after*ref_after
|
||||||
|
if _i>=idx_ref_after: _ref += 1
|
||||||
|
if useRatio:
|
||||||
|
i /= iref
|
||||||
|
else:
|
||||||
|
i -= iref
|
||||||
|
return i
|
||||||
|
|
||||||
|
def averageScanPoints(scan,data,isRef=None,lpower=None,useRatio=False,\
|
||||||
|
funcForEveraging=np.nanmean):
|
||||||
|
""" given scanpoints in 'scan' and corresponding data in 'data'
|
||||||
|
average all data corresponding the exactly the same scanpoint.
|
||||||
|
If the values in scan are coming from a readback, rounding might be
|
||||||
|
necessary.
|
||||||
|
No normalization is done inside this function
|
||||||
|
if isRef is provided must be a boolean array of the same shape as 'scan'
|
||||||
|
is there is at least one scanpoint marked as True, the data are
|
||||||
|
subtracted/divided by the interpolated reference
|
||||||
|
if lpower is provided the data is divided by it (how it is done depends
|
||||||
|
if one uses the ratio or not
|
||||||
|
funcForEveraging: is usually np.nanmean or np.nanmedian. it can be any
|
||||||
|
function that support axis=0 as keyword argument
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isRef is None: isRef = np.zeros( data.shape[0], dtype=bool )
|
||||||
|
assert data.shape[0] == isRef.shape[0]
|
||||||
|
# subtract reference only is there is at least one
|
||||||
|
if isRef.sum()>0:
|
||||||
|
data = subtractReferences(data,np.argwhere(isRef), useRatio=useRatio)
|
||||||
|
else:
|
||||||
|
data = data.copy(); # create local copy
|
||||||
|
|
||||||
|
# normalize signal for laser intensity if provided
|
||||||
|
if lpower is not None:
|
||||||
|
assert lpower.shape[0] == data.shape[0]
|
||||||
|
# expand lpower to allow broadcasting
|
||||||
|
shape = [data.shape[0],] + [1,]*(data.ndim-1)
|
||||||
|
lpower = lpower.reshape(shape)
|
||||||
|
if useRatio is False:
|
||||||
|
data /= lpower
|
||||||
|
else:
|
||||||
|
data = (data-1)/lpower+1
|
||||||
|
|
||||||
|
scan_pos = np.unique(scan)
|
||||||
|
shape_out = [len(scan_pos),] + list(data.shape[1:])
|
||||||
|
ret = np.empty(shape_out)
|
||||||
|
err = np.empty(shape_out)
|
||||||
|
dataInScanPoint = []
|
||||||
|
for i,t in enumerate(scan_pos):
|
||||||
|
shot_idx = (scan == t)
|
||||||
|
#if shot_idx.sum() > 0:
|
||||||
|
ret[i] = funcForEveraging(data[shot_idx],axis=0)
|
||||||
|
dataInScanPoint.append( data[shot_idx] )
|
||||||
|
err[i] = np.std(data[shot_idx], axis = 0)/np.sqrt(shot_idx.sum())
|
||||||
|
return dict(scan=scan_pos,data=ret,err=err,dataInScanPoint=dataInScanPoint)
|
||||||
|
|
||||||
|
|
||||||
|
def calcTimeResolvedSignal(scan,data,reference="min",monitor=None,q=None,**kw):
|
||||||
|
"""
|
||||||
|
reference: can be 'min', 'max', a float|integer or an array of booleans
|
||||||
|
q : is needed if monitor is a tuple|list (it is interpreted as
|
||||||
|
q-range normalization)
|
||||||
|
other keywords are passed to averageScanPoints
|
||||||
|
"""
|
||||||
|
if reference == "min":
|
||||||
|
isRef = (scan == scan.min())
|
||||||
|
elif reference == "max":
|
||||||
|
isRef = (scan == scan.max())
|
||||||
|
elif isinstance(reference,(float,int)):
|
||||||
|
isRef = (scan == reference)
|
||||||
|
else:
|
||||||
|
isRef = reference
|
||||||
|
|
||||||
|
# normalize if needed
|
||||||
|
if monitor is not None:
|
||||||
|
if isinstance(monitor,(tuple,list)):
|
||||||
|
assert q is not None
|
||||||
|
assert data.ndim == 2
|
||||||
|
idx = (q>= monitor[0]) & (q<= monitor[1])
|
||||||
|
monitor = np.nanmedian(data[:,idx],axis=1)
|
||||||
|
data = data/monitor
|
||||||
|
return averageScanPoints(scan,data,isRef=isRef,**kw)
|
||||||
|
|
||||||
|
|
||||||
|
def read_diff_av(folder,plot2D=False,save=None):
|
||||||
|
print("Never tested !!!")
|
||||||
|
basename = folder+"/"+"diff_av*"
|
||||||
|
files = glob.glob(basename)
|
||||||
|
files.sort()
|
||||||
|
if len(files) == 0:
|
||||||
|
print("No file found (basename %s)" % basename)
|
||||||
|
return None
|
||||||
|
temp = [os.path.basename(f[:-4]) for f in files]
|
||||||
|
delays = [f.split("_")[-1] for f in temp ]
|
||||||
|
diffav = collections.OrderedDict()
|
||||||
|
diffs = collections.OrderedDict()
|
||||||
|
for d,f in zip(delays,files):
|
||||||
|
data = np.loadtxt(f)
|
||||||
|
diffav[d]=data[:,1]
|
||||||
|
diffs[d] = np.loadtxt(folder+"/diffs_%s.dat"%d)[:,1:]
|
||||||
|
q =data[:,0]
|
||||||
|
t = np.asarray( [mc.strToTime(ti) for ti in delays] )
|
||||||
|
if plot2D:
|
||||||
|
idx = t>0
|
||||||
|
i = np.asarray( diffav.values() )
|
||||||
|
plt.pcolor(np.log10(t[idx]),q,i[idx].T)
|
||||||
|
plt.xlabel(r"$\log_{10}(t)$")
|
||||||
|
plt.ylabel(r"q ($\AA^{-1}$)")
|
||||||
|
it=np.asarray(diffav.values())
|
||||||
|
if save:
|
||||||
|
tosave = np.vstack( (q,it) )
|
||||||
|
header = np.hstack( (len(it),t) )
|
||||||
|
tosave = np.vstack( (header,tosave.T) )
|
||||||
|
np.savetxt(folder + "/all_diffs_av_matrix.txt",tosave)
|
||||||
|
return q,it,diffs,t
|
||||||
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
""" hdf5 file based storage; this modules adds the possibility to dump dict as
|
||||||
|
hdf5 File """
|
||||||
|
import numpy as np
|
||||||
|
import os
|
||||||
|
import h5py
|
||||||
|
import collections
|
||||||
|
|
||||||
|
|
||||||
|
def dictToH5Group(d,group):
|
||||||
|
for key,value in d.items():
|
||||||
|
if not isinstance(value,(dict,collections.OrderedDict)):
|
||||||
|
# hack for special s...
|
||||||
|
# h5py can't handle numpy unicode arrays
|
||||||
|
if isinstance(value,np.ndarray) and value.dtype.char == "U":
|
||||||
|
value = np.asarray([vv.encode('ascii') for vv in value])
|
||||||
|
# h5py can't save None
|
||||||
|
if value is None: value="NONE_PYTHON_OBJECT"
|
||||||
|
group[key] = value
|
||||||
|
else:
|
||||||
|
group.create_group(key)
|
||||||
|
dictToH5Group(value,group[key])
|
||||||
|
|
||||||
|
def dictToH5(h5,d):
|
||||||
|
h5 = h5py.File(h5,mode="w")
|
||||||
|
# group = h5.create_group("/")
|
||||||
|
dictToH5Group(d,h5["/"])
|
||||||
|
h5.close()
|
||||||
|
|
||||||
|
def h5dataToDict(h5):
|
||||||
|
if isinstance(h5,h5py.Dataset):
|
||||||
|
temp = h5[...]
|
||||||
|
# hack for special s...
|
||||||
|
# unwrap 0d arrays
|
||||||
|
if isinstance(temp,np.ndarray) and temp.ndim == 0:
|
||||||
|
temp=temp.item()
|
||||||
|
# h5py can't handle None
|
||||||
|
if temp == "NONE_PYTHON_OBJECT": temp=None
|
||||||
|
return temp
|
||||||
|
else:
|
||||||
|
ret = dict()
|
||||||
|
for k,v in h5.items(): ret[k] = h5dataToDict(v)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def h5ToDict(h5):
|
||||||
|
with h5py.File(h5,"r") as h:
|
||||||
|
ret = h5dataToDict( h["/"] )
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def npzToDict(npzFile):
|
||||||
|
with np.load(npzFile) as npz: d = dict(npz)
|
||||||
|
# unwrap 0d arrays
|
||||||
|
for key,value in d.items():
|
||||||
|
if isinstance(value,np.ndarray) and value.ndim == 0: d[key]=value.item()
|
||||||
|
return d
|
||||||
|
|
||||||
|
def dictToNpz(npzFile,d): np.savez(npzFile,**d)
|
||||||
|
|
||||||
|
def read(fname):
|
||||||
|
extension = os.path.splitext(fname)[1]
|
||||||
|
if extension == ".npz":
|
||||||
|
return npzToDict(fname)
|
||||||
|
elif extension == ".h5":
|
||||||
|
return h5ToDict(fname)
|
||||||
|
else:
|
||||||
|
raise ValueError("Extension must be h5 or npz, it was %s"%extension)
|
||||||
|
|
||||||
|
def save(fname,d):
|
||||||
|
extension = os.path.splitext(fname)[1]
|
||||||
|
if extension == ".npz":
|
||||||
|
return dictToNpz(fname,d)
|
||||||
|
elif extension == ".h5":
|
||||||
|
return dictToH5(fname,d)
|
||||||
|
else:
|
||||||
|
raise ValueError("Extension must be h5 or npz")
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
from __future__ import print_function,division
|
||||||
|
import numpy as np
|
||||||
|
import os
|
||||||
|
|
||||||
|
def xrayAttLenght(*args,**kw):
|
||||||
|
from periodictable import xsf
|
||||||
|
n = xsf.index_of_refraction(*args,**kw)
|
||||||
|
if not "wavelength" in kw:
|
||||||
|
wavelength = 12.398/np.asarray(kw["energy"])
|
||||||
|
else:
|
||||||
|
wavelength = np.asarray(kw["wavelength"])
|
||||||
|
attenuation_length = np.abs( (wavelength*1e-10)/ (4*np.pi*np.imag(n)) )
|
||||||
|
return attenuation_length
|
||||||
|
|
||||||
|
|
||||||
|
def xrayFluo(atom,density,energy=7.,length=30.,I0=1e10,\
|
||||||
|
det_radius=1.,det_dist=10.,det_material="Si",det_thick=300,verbose=True):
|
||||||
|
""" compound: anything periodictable would understand
|
||||||
|
density: in mM
|
||||||
|
length: sample length in um
|
||||||
|
energy: in keV, could be array
|
||||||
|
"""
|
||||||
|
import periodictable
|
||||||
|
from periodictable import xsf
|
||||||
|
wavelength = 12.398/energy
|
||||||
|
atom = periodictable.__dict__[atom]
|
||||||
|
# 1e3 -> from mM to M
|
||||||
|
# 1e3 -> from L to cm3
|
||||||
|
# so 1 mM = 1e-3M/L = 1e-6 M/cm3
|
||||||
|
density_g_cm3 = density*1e-6*atom.mass
|
||||||
|
n = xsf.index_of_refraction(atom,density=density_g_cm3,wavelength=wavelength)
|
||||||
|
attenuation_length = xrayAttLenght( atom,density=density_g_cm3,energy=energy )
|
||||||
|
# um-> m: 1e-6
|
||||||
|
fraction_absorbed = 1.-np.exp(-length*1e-6/attenuation_length)
|
||||||
|
if verbose:
|
||||||
|
print("Fraction of x-ray photons absorbed by the sample:",fraction_absorbed)
|
||||||
|
## detector ##
|
||||||
|
det_area = np.pi*det_radius**2
|
||||||
|
det_solid_angle = det_area/(4*np.pi*det_dist**2)
|
||||||
|
if verbose:
|
||||||
|
print("Detector fraction of solid angle:",det_solid_angle)
|
||||||
|
det_att_len = xrayAttLenght(det_material,wavelength=atom.K_alpha)
|
||||||
|
det_abs = 1-np.exp(-det_thick*1e-6/det_att_len)
|
||||||
|
if verbose:
|
||||||
|
print("Detector efficiency (assuming E fluo = E_K_alpha):",det_abs)
|
||||||
|
eff = fraction_absorbed*det_solid_angle*det_abs
|
||||||
|
if verbose:
|
||||||
|
print("Overall intensity (as ratio to incoming one):",eff)
|
||||||
|
return eff
|
Loading…
Reference in New Issue