2017-01-10 22:43:22 +01:00
|
|
|
import logging
|
|
|
|
log = logging.getLogger(__name__)
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
import os
|
|
|
|
import collections
|
|
|
|
import numpy as np
|
2017-01-06 18:06:34 +01:00
|
|
|
from . import azav
|
|
|
|
from . import dataReduction
|
|
|
|
from . import utils
|
2017-01-07 23:53:12 +01:00
|
|
|
from . import storage
|
2017-02-09 17:17:22 +01:00
|
|
|
from . import filters
|
2017-01-06 15:40:26 +01:00
|
|
|
|
2017-01-07 23:53:12 +01:00
|
|
|
default_extension = ".npz"
|
2017-01-05 19:22:37 +01:00
|
|
|
|
|
|
|
def _conv(x):
|
|
|
|
try:
|
|
|
|
x = float(x)
|
|
|
|
except:
|
|
|
|
x = np.nan
|
|
|
|
return x
|
|
|
|
|
2017-02-09 17:17:22 +01:00
|
|
|
def _readDiagnostic(fname,retry=3):
|
|
|
|
ntry = 0
|
|
|
|
while ntry<retry:
|
|
|
|
try:
|
|
|
|
data = np.genfromtxt(fname,usecols=(2,3),\
|
|
|
|
dtype=None,converters={3: lambda x: _conv(x)},
|
|
|
|
names = ['fname','delay'])
|
|
|
|
return data
|
|
|
|
except Exception as e:
|
|
|
|
log.warn("Could not read diagnostic file, retrying soon,error was %s"%e)
|
|
|
|
ntry += 1
|
|
|
|
# it should not arrive here
|
|
|
|
raise ValueError("Could not read diagnostic file after %d attempts"%retry)
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
def readDelayFromDiagnostic(fname):
|
|
|
|
""" return an ordered dict dictionary of filename; for each key a rounded
|
|
|
|
value of delay is associated """
|
|
|
|
if os.path.isdir(fname): fname += "/diagnostics.log"
|
2017-02-09 17:17:22 +01:00
|
|
|
# try to read diagnostic couple of times
|
|
|
|
data = _readDiagnostic(fname,retry=4)
|
2017-01-06 15:40:26 +01:00
|
|
|
files = data['fname'].astype(str)
|
2017-01-05 19:22:37 +01:00
|
|
|
delays = data['delay']
|
|
|
|
# skip lines that cannot be interpreted as float (like done, etc)
|
|
|
|
idx_ok = np.isfinite( delays )
|
|
|
|
files = files[idx_ok]
|
|
|
|
delays = delays[idx_ok]
|
|
|
|
delays = np.round(delays.astype(float),12)
|
|
|
|
return collections.OrderedDict( zip(files,delays) )
|
|
|
|
|
2017-03-09 23:20:57 +01:00
|
|
|
def _findDark(line):
|
|
|
|
_,value = line.split(":")
|
|
|
|
return float(value)
|
|
|
|
|
|
|
|
def _delayToNum(delay):
|
|
|
|
if delay.decode('ascii') == 'off':
|
|
|
|
delay = -10
|
|
|
|
else:
|
|
|
|
delay=utils.strToTime(delay)
|
|
|
|
return delay
|
|
|
|
|
|
|
|
def findLogFile(folder):
|
|
|
|
files = utils.getFiles(folder,basename='*.log')
|
|
|
|
files.remove(os.path.join(folder,"diagnostics.log"))
|
|
|
|
logfile = files[0]
|
|
|
|
if len(files)>1: log.warn("Found more than one *.log file that is not diagnostics.log: %s"%files)
|
|
|
|
return logfile
|
|
|
|
|
2017-03-11 11:28:10 +01:00
|
|
|
def readLogFile(fnameOrFolder,subtractDark=False,skip_first=0,asDataStorage=True,last=None):
|
2017-03-09 23:20:57 +01:00
|
|
|
""" read id9 style logfile """
|
|
|
|
if os.path.isdir(fnameOrFolder):
|
|
|
|
fname = findLogFile(fnameOrFolder)
|
|
|
|
else:
|
|
|
|
fname = fnameOrFolder
|
|
|
|
f = open(fname,"r")
|
|
|
|
lines = f.readlines()
|
|
|
|
f.close()
|
|
|
|
lines = [line.strip() for line in lines]
|
|
|
|
darks = {}
|
|
|
|
for line in lines:
|
|
|
|
if line.find("pd1 dark/sec")>=0: darks['pd1ic'] = _findDark(line)
|
|
|
|
if line.find("pd2 dark/sec")>=0: darks['pd2ic'] = _findDark(line)
|
|
|
|
if line.find("pd3 dark/sec")>=0: darks['pd3ic'] = _findDark(line)
|
|
|
|
if line.find("pd4 dark/sec")>=0: darks['pd4ic'] = _findDark(line)
|
|
|
|
for iline,line in enumerate(lines):
|
|
|
|
if line.lstrip()[0] != "#": break
|
|
|
|
data=np.genfromtxt(fname,skip_header=iline-1,names=True,comments="%",dtype=None,converters = {'delay': lambda s: _delayToNum(s)})
|
|
|
|
if subtractDark:
|
|
|
|
for diode in ['pd1ic','pd2ic','pd3ic','pd4ic']:
|
|
|
|
if diode in darks: data[diode]=data[diode]-darks[diode]*data['timeic']
|
2017-03-11 11:28:10 +01:00
|
|
|
data = data[skip_first:last]
|
|
|
|
if asDataStorage:
|
|
|
|
# rstrip("_") is used to clean up last _ that appera for some reason in file_
|
|
|
|
data = storage.DataStorage( dict((name.rstrip("_"),data[name]) for name in data.dtype.names ) )
|
|
|
|
data.file = data.file.astype(str)
|
2017-03-09 23:20:57 +01:00
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-01-05 19:22:37 +01:00
|
|
|
|
2017-02-09 17:17:22 +01:00
|
|
|
def doFolder_azav(folder,nQ=1500,files='*.edf*',force=False,mask=None,
|
|
|
|
saveChi=True,poni='pyfai.poni',storageFile='auto',dark=9.9,zingerFilter=30,qlims=(0,10),
|
|
|
|
removeBack=False,removeBack_kw=dict()):
|
2017-01-05 19:22:37 +01:00
|
|
|
""" very small wrapper around azav.doFolder, essentially just reading
|
|
|
|
the diagnostics.log """
|
|
|
|
|
|
|
|
diag = dict( delays = readDelayFromDiagnostic(folder) )
|
2017-01-07 23:53:12 +01:00
|
|
|
if storageFile == 'auto' : storageFile = folder + "/" + "pyfai_1d" + default_extension
|
2017-02-09 17:17:22 +01:00
|
|
|
|
|
|
|
data = azav.doFolder(folder,files=files,nQ=nQ,force=force,mask=mask,
|
|
|
|
saveChi=saveChi,poni=poni,storageFile=storageFile,diagnostic=diag,dark=dark,save=False)
|
|
|
|
#try:
|
|
|
|
# if removeBack is not None:
|
|
|
|
# _,data.data = azav.removeBackground(data,qlims=qlims,**removeBack_kw)
|
|
|
|
#except Exception as e:
|
|
|
|
# log.error("Could not remove background, error was %s"%(str(e)))
|
|
|
|
|
|
|
|
if zingerFilter > 0:
|
|
|
|
data.data = filters.removeZingers(data.data,threshold=zingerFilter)
|
|
|
|
#data.save(storageFile); it does not save err ?
|
|
|
|
|
|
|
|
|
|
|
|
# idx = utils.findSlice(data.q,qlims)
|
|
|
|
# n = np.nanmean(data.data[:,idx],axis=1)
|
|
|
|
# data.norm_range = qlims
|
|
|
|
# data.norm = n
|
|
|
|
# n = utils.reshapeToBroadcast(n,data.data)
|
|
|
|
# data.data_norm = data.data/n
|
|
|
|
|
|
|
|
data.save(storageFile)
|
|
|
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-01-06 15:40:26 +01:00
|
|
|
|
2017-01-13 14:49:48 +01:00
|
|
|
def doFolder_dataRed(azavStorage,monitor=None,funcForAveraging=np.nanmean,
|
2017-02-09 17:17:22 +01:00
|
|
|
qlims=None,outStorageFile='auto',reference='min'):
|
2017-01-13 14:49:48 +01:00
|
|
|
""" azavStorage if a DataStorage instance or the filename to read """
|
2017-01-06 18:06:34 +01:00
|
|
|
|
2017-01-13 14:49:48 +01:00
|
|
|
if isinstance(azavStorage,storage.DataStorage):
|
|
|
|
data = azavStorage
|
2017-01-27 15:39:32 +01:00
|
|
|
folder = azavStorage.folder
|
2017-01-20 10:55:24 +01:00
|
|
|
elif os.path.isfile(azavStorage):
|
2017-01-13 14:49:48 +01:00
|
|
|
folder = os.path.dirname(azavStorage)
|
|
|
|
data = storage.DataStorage(azavStorage)
|
2017-01-10 22:43:22 +01:00
|
|
|
else:
|
2017-01-13 14:49:48 +01:00
|
|
|
# assume is just a folder name
|
|
|
|
folder = azavStorage
|
|
|
|
azavStorage = folder + "/pyfai_1d" + default_extension
|
|
|
|
data = storage.DataStorage(azavStorage)
|
2017-01-06 18:06:34 +01:00
|
|
|
|
2017-02-09 17:17:22 +01:00
|
|
|
#assert data.q.shape[0] == data.data.shape[1] == data.err.shape[1]
|
2017-01-10 00:28:29 +01:00
|
|
|
if qlims is not None:
|
|
|
|
idx = (data.q>qlims[0]) & (data.q<qlims[1])
|
|
|
|
data.data = data.data[:,idx]
|
2017-02-09 17:17:22 +01:00
|
|
|
data.err = data.err[:,idx]
|
2017-01-10 00:28:29 +01:00
|
|
|
data.q = data.q[idx]
|
2017-02-09 17:17:22 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
2017-01-06 18:06:34 +01:00
|
|
|
# calculate differences
|
2017-02-09 17:17:22 +01:00
|
|
|
diffs = dataReduction.calcTimeResolvedSignal(data.delays,data.data,err=data.err,
|
|
|
|
q=data.q,reference=reference,monitor=monitor,
|
|
|
|
funcForAveraging=funcForAveraging)
|
|
|
|
|
2017-01-06 18:06:34 +01:00
|
|
|
# save txt and npz file
|
2017-01-06 15:40:26 +01:00
|
|
|
dataReduction.saveTxt(folder,diffs,info=data.pyfai_info)
|
2017-01-13 14:49:48 +01:00
|
|
|
if outStorageFile == 'auto':
|
|
|
|
outStorageFile = folder + "/diffs" + default_extension
|
|
|
|
diffs.save(outStorageFile)
|
2017-01-06 18:06:34 +01:00
|
|
|
|
|
|
|
return data,diffs
|
2017-02-09 17:17:22 +01:00
|
|
|
|
|
|
|
def doFolder(folder,azav_kw = dict(), datared_kw = dict(),online=True, retryMax=20):
|
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
if folder == "./": folder = os.path.abspath(folder)
|
|
|
|
fig = plt.figure()
|
|
|
|
lastNum = None
|
|
|
|
keepGoing = True
|
|
|
|
lines = None
|
|
|
|
retryNum = 0
|
|
|
|
if online: print("Press Ctrl+C to stop")
|
|
|
|
while keepGoing and retryNum < retryMax:
|
|
|
|
try:
|
|
|
|
data = doFolder_azav(folder,**azav_kw)
|
|
|
|
# check if there are new data
|
|
|
|
if lastNum is None or lastNum<data.data.shape[0]:
|
|
|
|
data,diffs = doFolder_dataRed(data,**datared_kw)
|
|
|
|
if lines is None or len(lines) != diffs.data.shape[0]:
|
|
|
|
lines,_ = utils.plotdiffs(diffs,fig=fig,title=folder)
|
|
|
|
else:
|
|
|
|
utils.updateLines(lines,diffs.data)
|
|
|
|
plt.draw()
|
|
|
|
lastNum = data.data.shape[0]
|
|
|
|
retryNum = 0
|
|
|
|
else:
|
|
|
|
retryNum += 1
|
|
|
|
plt.pause(30)
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
keepGoing = False
|
|
|
|
if not online: keepGoing = False
|
|
|
|
return data,diffs
|