2017-01-10 22:43:22 +01:00
|
|
|
import logging
|
|
|
|
log = logging.getLogger(__name__)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
import os
|
|
|
|
import collections
|
|
|
|
import numpy as np
|
2017-01-06 18:06:34 +01:00
|
|
|
from . import azav
|
|
|
|
from . import dataReduction
|
|
|
|
from . import utils
|
2017-01-07 23:53:12 +01:00
|
|
|
from . import storage
|
2017-02-09 17:17:22 +01:00
|
|
|
from . import filters
|
2017-01-06 15:40:26 +01:00
|
|
|
|
2017-03-16 23:16:53 +01:00
|
|
|
default_extension = ".h5"
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
def _conv(x):
|
|
|
|
try:
|
|
|
|
x = float(x)
|
|
|
|
except:
|
|
|
|
x = np.nan
|
|
|
|
return x
|
|
|
|
|
2017-02-09 17:17:22 +01:00
|
|
|
def _readDiagnostic(fname,retry=3):
|
|
|
|
ntry = 0
|
|
|
|
while ntry<retry:
|
|
|
|
try:
|
|
|
|
data = np.genfromtxt(fname,usecols=(2,3),\
|
|
|
|
dtype=None,converters={3: lambda x: _conv(x)},
|
|
|
|
names = ['fname','delay'])
|
|
|
|
return data
|
|
|
|
except Exception as e:
|
|
|
|
log.warn("Could not read diagnostic file, retrying soon,error was %s"%e)
|
|
|
|
ntry += 1
|
|
|
|
# it should not arrive here
|
|
|
|
raise ValueError("Could not read diagnostic file after %d attempts"%retry)
|
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
def readDiagnostic(fname):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" return an ordered dict dictionary of filename; for each key a rounded
|
|
|
|
value of delay is associated """
|
|
|
|
if os.path.isdir(fname): fname += "/diagnostics.log"
|
2017-02-09 17:17:22 +01:00
|
|
|
# try to read diagnostic couple of times
|
|
|
|
data = _readDiagnostic(fname,retry=4)
|
2017-01-06 15:40:26 +01:00
|
|
|
files = data['fname'].astype(str)
|
2017-01-05 19:22:37 +01:00
|
|
|
delays = data['delay']
|
|
|
|
# skip lines that cannot be interpreted as float (like done, etc)
|
|
|
|
idx_ok = np.isfinite( delays )
|
|
|
|
files = files[idx_ok]
|
2017-03-15 18:23:20 +01:00
|
|
|
files = np.asarray( [utils.getBasename(f) for f in files])
|
2017-01-05 19:22:37 +01:00
|
|
|
delays = delays[idx_ok]
|
|
|
|
delays = np.round(delays.astype(float),12)
|
2017-03-15 18:23:20 +01:00
|
|
|
return dict( file = files, scan = delays)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-03-09 23:20:57 +01:00
|
|
|
def _findDark(line):
|
|
|
|
_,value = line.split(":")
|
|
|
|
return float(value)
|
|
|
|
|
|
|
|
def _delayToNum(delay):
|
|
|
|
if delay.decode('ascii') == 'off':
|
|
|
|
delay = -10
|
|
|
|
else:
|
|
|
|
delay=utils.strToTime(delay)
|
|
|
|
return delay
|
|
|
|
|
|
|
|
def findLogFile(folder):
|
|
|
|
files = utils.getFiles(folder,basename='*.log')
|
|
|
|
files.remove(os.path.join(folder,"diagnostics.log"))
|
|
|
|
logfile = files[0]
|
|
|
|
if len(files)>1: log.warn("Found more than one *.log file that is not diagnostics.log: %s"%files)
|
|
|
|
return logfile
|
|
|
|
|
2017-03-11 11:28:10 +01:00
|
|
|
def readLogFile(fnameOrFolder,subtractDark=False,skip_first=0,asDataStorage=True,last=None):
|
2017-03-09 23:20:57 +01:00
|
|
|
""" read id9 style logfile """
|
|
|
|
if os.path.isdir(fnameOrFolder):
|
|
|
|
fname = findLogFile(fnameOrFolder)
|
|
|
|
else:
|
|
|
|
fname = fnameOrFolder
|
|
|
|
f = open(fname,"r")
|
|
|
|
lines = f.readlines()
|
|
|
|
f.close()
|
|
|
|
lines = [line.strip() for line in lines]
|
|
|
|
darks = {}
|
|
|
|
for line in lines:
|
|
|
|
if line.find("pd1 dark/sec")>=0: darks['pd1ic'] = _findDark(line)
|
|
|
|
if line.find("pd2 dark/sec")>=0: darks['pd2ic'] = _findDark(line)
|
|
|
|
if line.find("pd3 dark/sec")>=0: darks['pd3ic'] = _findDark(line)
|
|
|
|
if line.find("pd4 dark/sec")>=0: darks['pd4ic'] = _findDark(line)
|
|
|
|
for iline,line in enumerate(lines):
|
|
|
|
if line.lstrip()[0] != "#": break
|
|
|
|
data=np.genfromtxt(fname,skip_header=iline-1,names=True,comments="%",dtype=None,converters = {'delay': lambda s: _delayToNum(s)})
|
|
|
|
if subtractDark:
|
|
|
|
for diode in ['pd1ic','pd2ic','pd3ic','pd4ic']:
|
|
|
|
if diode in darks: data[diode]=data[diode]-darks[diode]*data['timeic']
|
2017-03-11 11:28:10 +01:00
|
|
|
data = data[skip_first:last]
|
|
|
|
if asDataStorage:
|
|
|
|
# rstrip("_") is used to clean up last _ that appera for some reason in file_
|
|
|
|
data = storage.DataStorage( dict((name.rstrip("_"),data[name]) for name in data.dtype.names ) )
|
|
|
|
data.file = data.file.astype(str)
|
2017-03-09 23:20:57 +01:00
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-02-09 17:17:22 +01:00
|
|
|
def doFolder_azav(folder,nQ=1500,files='*.edf*',force=False,mask=None,
|
2017-03-15 18:23:20 +01:00
|
|
|
saveChi=True,poni='pyfai.poni',storageFile='auto',dark=9.9,dezinger=None,
|
|
|
|
qlims=(0,10),removeBack=False,removeBack_kw=dict(),skip_first=0,
|
|
|
|
last=None):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" very small wrapper around azav.doFolder, essentially just reading
|
2017-03-15 18:23:20 +01:00
|
|
|
the id9 logfile or diagnostics.log """
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
try:
|
|
|
|
loginfo = readLogFile(folder,skip_first=skip_first,last=last)
|
|
|
|
except Exception as e:
|
|
|
|
log.warn("Could not read log file, trying to read diagnostics.log")
|
|
|
|
loginfo = readDiagnostic(folder)
|
2017-01-07 23:53:12 +01:00
|
|
|
if storageFile == 'auto' : storageFile = folder + "/" + "pyfai_1d" + default_extension
|
2017-02-09 17:17:22 +01:00
|
|
|
|
|
|
|
data = azav.doFolder(folder,files=files,nQ=nQ,force=force,mask=mask,
|
2017-03-15 18:23:20 +01:00
|
|
|
saveChi=saveChi,poni=poni,storageFile=storageFile,logDict=loginfo,dark=dark,save=False,dezinger=dezinger)
|
2017-02-09 17:17:22 +01:00
|
|
|
#try:
|
|
|
|
# if removeBack is not None:
|
|
|
|
# _,data.data = azav.removeBackground(data,qlims=qlims,**removeBack_kw)
|
|
|
|
#except Exception as e:
|
|
|
|
# log.error("Could not remove background, error was %s"%(str(e)))
|
|
|
|
|
|
|
|
# idx = utils.findSlice(data.q,qlims)
|
|
|
|
# n = np.nanmean(data.data[:,idx],axis=1)
|
|
|
|
# data.norm_range = qlims
|
|
|
|
# data.norm = n
|
|
|
|
# n = utils.reshapeToBroadcast(n,data.data)
|
|
|
|
# data.data_norm = data.data/n
|
|
|
|
|
|
|
|
data.save(storageFile)
|
|
|
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-01-06 15:40:26 +01:00
|
|
|
|
2017-01-13 14:49:48 +01:00
|
|
|
def doFolder_dataRed(azavStorage,monitor=None,funcForAveraging=np.nanmean,
|
2017-02-09 17:17:22 +01:00
|
|
|
qlims=None,outStorageFile='auto',reference='min'):
|
2017-03-15 18:23:20 +01:00
|
|
|
""" azavStorage if a DataStorage instance or the filename to read
|
|
|
|
monitor : normalization vector that can be given as
|
|
|
|
1. numpy array
|
|
|
|
2. a list (interpreted as q-range normalization)
|
|
|
|
3. a string to look for as key in the log, e.g.
|
|
|
|
monitor="pd2ic" would reult in using
|
|
|
|
azavStorage.log.pd2ic
|
|
|
|
|
|
|
|
"""
|
2017-01-06 18:06:34 +01:00
|
|
|
|
2017-01-13 14:49:48 +01:00
|
|
|
if isinstance(azavStorage,storage.DataStorage):
|
|
|
|
data = azavStorage
|
2017-01-27 15:39:32 +01:00
|
|
|
folder = azavStorage.folder
|
2017-01-20 10:55:24 +01:00
|
|
|
elif os.path.isfile(azavStorage):
|
2017-01-13 14:49:48 +01:00
|
|
|
folder = os.path.dirname(azavStorage)
|
|
|
|
data = storage.DataStorage(azavStorage)
|
2017-01-10 22:43:22 +01:00
|
|
|
else:
|
2017-01-13 14:49:48 +01:00
|
|
|
# assume is just a folder name
|
|
|
|
folder = azavStorage
|
|
|
|
azavStorage = folder + "/pyfai_1d" + default_extension
|
|
|
|
data = storage.DataStorage(azavStorage)
|
2017-01-06 18:06:34 +01:00
|
|
|
|
2017-02-09 17:17:22 +01:00
|
|
|
#assert data.q.shape[0] == data.data.shape[1] == data.err.shape[1]
|
2017-01-10 00:28:29 +01:00
|
|
|
if qlims is not None:
|
|
|
|
idx = (data.q>qlims[0]) & (data.q<qlims[1])
|
|
|
|
data.data = data.data[:,idx]
|
2017-02-09 17:17:22 +01:00
|
|
|
data.err = data.err[:,idx]
|
2017-01-10 00:28:29 +01:00
|
|
|
data.q = data.q[idx]
|
2017-02-09 17:17:22 +01:00
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
if isinstance(monitor,str): monitor = data['log'][monitor]
|
2017-02-09 17:17:22 +01:00
|
|
|
|
2017-01-06 18:06:34 +01:00
|
|
|
# calculate differences
|
2017-03-15 18:23:20 +01:00
|
|
|
diffs = dataReduction.calcTimeResolvedSignal(data.log.delay,data.data,
|
|
|
|
err=data.err,q=data.q,reference=reference,monitor=monitor,
|
2017-02-09 17:17:22 +01:00
|
|
|
funcForAveraging=funcForAveraging)
|
|
|
|
|
2017-01-06 18:06:34 +01:00
|
|
|
# save txt and npz file
|
2017-01-06 15:40:26 +01:00
|
|
|
dataReduction.saveTxt(folder,diffs,info=data.pyfai_info)
|
2017-01-13 14:49:48 +01:00
|
|
|
if outStorageFile == 'auto':
|
|
|
|
outStorageFile = folder + "/diffs" + default_extension
|
|
|
|
diffs.save(outStorageFile)
|
2017-01-06 18:06:34 +01:00
|
|
|
|
|
|
|
return data,diffs
|
2017-02-09 17:17:22 +01:00
|
|
|
|
2017-03-15 18:23:20 +01:00
|
|
|
def doFolder(folder,azav_kw = dict(), datared_kw = dict(),online=True, retryMax=20,force=False):
|
2017-02-09 17:17:22 +01:00
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
if folder == "./": folder = os.path.abspath(folder)
|
|
|
|
fig = plt.figure()
|
|
|
|
lastNum = None
|
|
|
|
keepGoing = True
|
|
|
|
lines = None
|
|
|
|
retryNum = 0
|
|
|
|
if online: print("Press Ctrl+C to stop")
|
|
|
|
while keepGoing and retryNum < retryMax:
|
|
|
|
try:
|
|
|
|
data = doFolder_azav(folder,**azav_kw)
|
|
|
|
# check if there are new data
|
|
|
|
if lastNum is None or lastNum<data.data.shape[0]:
|
|
|
|
data,diffs = doFolder_dataRed(data,**datared_kw)
|
|
|
|
if lines is None or len(lines) != diffs.data.shape[0]:
|
|
|
|
lines,_ = utils.plotdiffs(diffs,fig=fig,title=folder)
|
|
|
|
else:
|
|
|
|
utils.updateLines(lines,diffs.data)
|
|
|
|
plt.draw()
|
|
|
|
lastNum = data.data.shape[0]
|
|
|
|
retryNum = 0
|
|
|
|
else:
|
|
|
|
retryNum += 1
|
|
|
|
plt.pause(30)
|
2017-03-15 18:23:20 +01:00
|
|
|
if force: force=False; # does not make sense to have always True ...
|
2017-02-09 17:17:22 +01:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
keepGoing = False
|
|
|
|
if not online: keepGoing = False
|
|
|
|
return data,diffs
|