added Oleg Petrov's *write_to_tecmag* result function

This commit is contained in:
Markus Rosenstihl 2015-06-10 07:03:32 +00:00
parent 4f6ae7f1db
commit eed1d10f63

View File

@ -13,12 +13,18 @@ from Errorable import Errorable
from Drawable import Drawable from Drawable import Drawable
from DamarisFFT import DamarisFFT from DamarisFFT import DamarisFFT
from Signalpath import Signalpath from Signalpath import Signalpath
#from DataPool import DataPool
import sys import sys
import threading import threading
import types import types
import tables import tables
import numpy import numpy
import datetime import datetime # added by Oleg Petrov
import ctypes # added by Oleg Petrov
import struct # added by Oleg Petrov
import os # added by Oleg Petrov
class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath): class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
def __init__(self, x = None, y = None, y_2 = None, n = None, index = None, sampl_freq = None, error = False): def __init__(self, x = None, y = None, y_2 = None, n = None, index = None, sampl_freq = None, error = False):
@ -35,6 +41,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
self.common_descriptions=None self.common_descriptions=None
self.time_period=[] self.time_period=[]
self.job_id = None # added by Oleg Petrov
self.use_error = error self.use_error = error
@ -192,7 +199,8 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
return max(tmp_max) return max(tmp_max)
def get_job_id(self): def get_job_id(self):
return None # return None
return self.job_id # modified by Oleg Petrov
def write_to_csv(self, destination=sys.stdout, delimiter=" "): def write_to_csv(self, destination=sys.stdout, delimiter=" "):
""" """
@ -236,7 +244,8 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
finally: finally:
self.lock.release() self.lock.release()
def write_to_simpson(self, destination=sys.stdout, delimiter=" "): # ------------- added by Oleg Petrov, 14 Feb 2012 ----------------------
def write_to_simpson(self, destination=sys.stdout, delimiter=" ", frequency=100e6):
""" """
writes the data to a text file or sys.stdout in Simpson format, writes the data to a text file or sys.stdout in Simpson format,
for further processing with the NMRnotebook software; for further processing with the NMRnotebook software;
@ -253,6 +262,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
the_destination.write("SIMP\n") the_destination.write("SIMP\n")
the_destination.write("%s%i%s"%("NP=", len(xdata), "\n")) the_destination.write("%s%i%s"%("NP=", len(xdata), "\n"))
the_destination.write("%s%i%s"%("SW=", self.get_sampling_rate(), "\n")) the_destination.write("%s%i%s"%("SW=", self.get_sampling_rate(), "\n"))
the_destination.write("%s%i%s"%("REF=", frequency, "\n"))
the_destination.write("TYPE=FID\n") the_destination.write("TYPE=FID\n")
the_destination.write("DATA\n") the_destination.write("DATA\n")
ch_no=self.get_number_of_channels() ch_no=self.get_number_of_channels()
@ -267,6 +277,127 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
finally: finally:
self.lock.release() self.lock.release()
# ------------- added by Oleg Petrov, 10 Sep 2013 -----------------------
def write_to_tecmag(self, destination=sys.stdout, nrecords=1,\
frequency=100e6,\
last_delay = 1.,\
receiver_phase=0.,\
nucleus='1H'):
"""
writes the data to a binary file in TecMag format;
destination can be a file object or a filename;
nrecords determines the 2nd dimension of data;
"""
if self.job_id == None or self.n == 0:
raise ValueError("write_to_tecmag: cannot get a record number")
else:
record = self.job_id/self.n + 1
the_destination=destination
if type(destination) in types.StringTypes:
if record == 1 and os.path.exists(destination):
os.rename(destination, os.path.dirname(destination)+'/~'+os.path.basename(destination))
the_destination=file(destination, "ab")
self.lock.acquire()
try:
npts = [len(self), nrecords, 1, 1]
dwell = 1./self.get_sampling_rate()
sw = 0.5/dwell
base_freq = [frequency, 100., 0., 0.]
offset_freq = [0., 0., 0., 0.]
ob_freq = [sum(x) for x in zip(base_freq, offset_freq)]
date = self.time_period[0].strftime("%Y/%m/%d %H:%M:%S")
if record == 1:
buff = ctypes.create_string_buffer(1056)
struct.pack_into('8s', buff, 0, 'TNT1.005') # 'TNT1.000' version ID
struct.pack_into('4s', buff, 8, 'TMAG') # 'TMAG' tag
struct.pack_into('?', buff, 12, True) # BOOLean value
struct.pack_into('i', buff, 16, 1024) # length of Tecmag struct
#Initialize TECMAG structure:
struct.pack_into('4i', buff, 20, *npts) # npts[4]
struct.pack_into('4i', buff, 36, *npts) # actual_npts[4]
struct.pack_into('i', buff, 52, npts[0]) # acq_points
struct.pack_into('4i', buff, 56, 1, 1, 1, 1) # npts_start[4]
struct.pack_into('i', buff, 72, self.n) # scans
struct.pack_into('i', buff, 76, self.n) # actual_scans
struct.pack_into('i', buff, 88, 1) # sadimension
struct.pack_into('4d', buff, 104, *ob_freq) # ob_freq[4]
struct.pack_into('4d', buff, 136, *base_freq) # base_freq[4]
struct.pack_into('4d', buff, 168, *offset_freq) # offset_freq[4]
struct.pack_into('d', buff, 200, 0.0) # ref_freq
struct.pack_into('h', buff, 216, 1) # obs_channel
struct.pack_into('42s', buff, 218, 42*'2') # space2[42]
struct.pack_into('4d', buff, 260, sw, 0., 0., 0.) # sw[4], sw = 0.5/dwell
struct.pack_into('4d', buff, 292, dwell, 0., 0., 0.) # dwell[4]
struct.pack_into('d', buff, 324, sw) # filter, = 0.5/dwell
struct.pack_into('d', buff, 340, (npts[0]*dwell)) # acq_time
struct.pack_into('d', buff, 348, 1.) # last_delay (5*T1 minus sequence length)
struct.pack_into('h', buff, 356, 1) # spectrum_direction
struct.pack_into('16s', buff, 372, 16*'2') # space3[16]
struct.pack_into('d', buff, 396, receiver_phase) # receiver_phase
struct.pack_into('4s', buff, 404, 4*'2') # space4[4]
struct.pack_into('16s', buff, 444, 16*'2') # space5[16]
struct.pack_into('264s', buff, 608, 264*'2') # space6[264]
struct.pack_into('32s', buff, 884, date) # date[32]
struct.pack_into('16s', buff, 916, nucleus) # nucleus[16]
# TECMAG Structure total => 1024
struct.pack_into('4s', buff, 1044, 'DATA') # 'DATA' tag
struct.pack_into('?', buff, 1048, True) # BOOLean
struct.pack_into('i', buff, 1052, 2*4*npts[0]*npts[1]) # length of data (2 = [Re, Im]; 4 = len(c_float))
the_destination.write(buff)
# append data to the file as they appear in an experiment (RIRIRIRI blocks in linear order):
ch_no=self.get_number_of_channels()
ydata = map(self.get_ydata, xrange(ch_no))
if ch_no == 1:
ydata = [ydata, numpy.zeros(len(ydata))]
data = numpy.append([ydata[0]], [ydata[1]], axis=0)
data = data.T
data = data.flatten()
buff = ctypes.create_string_buffer(4*len(data))
struct.pack_into('%sf' % len(data), buff, 0, *data)
the_destination.write(buff)
if record == nrecords:
buff = ctypes.create_string_buffer(2068)
struct.pack_into('4s', buff, 0, 'TMG2') # 'TMG2' tag
struct.pack_into('?', buff, 4, True) # BOOLean
struct.pack_into('i', buff, 8, 2048) # length of Tecmag2 struct
# Leave TECMAG2 structure empty:
struct.pack_into('52s', buff, 372, 52*'2') # space[52]
struct.pack_into('866s', buff, 1194, 866*'2') # space[610]+names+strings
# TECMAG2 Structure total => 2048
struct.pack_into('4s', buff, 2060, 'PSEQ') # 'PSEQ' tag 658476
struct.pack_into('?', buff, 2064, False) # BOOLean 658480
the_destination.write(buff)
the_destination = None
ydata=None
finally:
self.lock.release()
# -----------------------------------------------------------------------
def write_to_hdf(self, hdffile, where, name, title, complib=None, complevel=None): def write_to_hdf(self, hdffile, where, name, title, complib=None, complevel=None):
accu_group=hdffile.createGroup(where=where,name=name,title=title) accu_group=hdffile.createGroup(where=where,name=name,title=title)
accu_group._v_attrs.damaris_type="Accumulation" accu_group._v_attrs.damaris_type="Accumulation"
@ -458,6 +589,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
else: else:
r = Accumulation(x = numpy.array(other.x, dtype="Float64"), y = tmp_y, index = other.index, sampl_freq = other.sampling_rate, n = 1, error = False) r = Accumulation(x = numpy.array(other.x, dtype="Float64"), y = tmp_y, index = other.index, sampl_freq = other.sampling_rate, n = 1, error = False)
r.time_period=[other.job_date,other.job_date] r.time_period=[other.job_date,other.job_date]
r.job_id = other.job_id # added by Oleg Petrov
r.common_descriptions=other.description.copy() r.common_descriptions=other.description.copy()
self.lock.release() self.lock.release()
return r return r
@ -485,6 +617,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
r = Accumulation(x = numpy.array(self.x, dtype="Float64"), y = tmp_y, n = self.n + 1, index = self.index, sampl_freq = self.sampling_rate, error = False) r = Accumulation(x = numpy.array(self.x, dtype="Float64"), y = tmp_y, n = self.n + 1, index = self.index, sampl_freq = self.sampling_rate, error = False)
r.time_period=[min(self.time_period[0],other.job_date), r.time_period=[min(self.time_period[0],other.job_date),
max(self.time_period[1],other.job_date)] max(self.time_period[1],other.job_date)]
r.job_id = other.job_id # added by Oleg Petrov
if self.common_descriptions is not None: if self.common_descriptions is not None:
r.common_descriptions={} r.common_descriptions={}
for key in self.common_descriptions.keys(): for key in self.common_descriptions.keys():
@ -516,6 +649,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
tmp_y.append(other.y[i]) tmp_y.append(other.y[i])
tmp_ysquare.append(other.y_square[i]) tmp_ysquare.append(other.y_square[i])
r.time_period=other.time_period[:] r.time_period=other.time_period[:]
r.job_id = other.job_id # added by Oleg Petrov
if other.common_descriptions is not None: if other.common_descriptions is not None:
r.common_descriptions=othter.common_descriptions.copy() r.common_descriptions=othter.common_descriptions.copy()
else: else:
@ -549,6 +683,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
r.time_period=[min(self.time_period[0],other.time_period[0]), r.time_period=[min(self.time_period[0],other.time_period[0]),
max(self.time_period[1],other.time_period[1])] max(self.time_period[1],other.time_period[1])]
r.job_id = other.job_id # added by Oleg Petrov
r.common_descriptions={} r.common_descriptions={}
if self.common_descriptions is not None and other.common_descriptions is not None: if self.common_descriptions is not None and other.common_descriptions is not None:
for key in self.common_descriptions.keys(): for key in self.common_descriptions.keys():
@ -614,6 +749,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
self.lock.release() self.lock.release()
self.time_period=[other.job_date,other.job_date] self.time_period=[other.job_date,other.job_date]
self.job_id = other.job_id # added by Oleg Petrov
self.common_descriptions=other.description.copy() self.common_descriptions=other.description.copy()
return self return self
@ -636,6 +772,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
self.n += 1 self.n += 1
self.time_period=[min(self.time_period[0],other.job_date), self.time_period=[min(self.time_period[0],other.job_date),
max(self.time_period[1],other.job_date)] max(self.time_period[1],other.job_date)]
self.job_id = other.job_id # added by Oleg Petrov
if self.common_descriptions is not None: if self.common_descriptions is not None:
for key in self.common_descriptions.keys(): for key in self.common_descriptions.keys():
if not (key in other.description and self.common_descriptions[key]==other.description[key]): if not (key in other.description and self.common_descriptions[key]==other.description[key]):
@ -670,6 +807,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
self.set_title(self.__title_pattern % self.n) self.set_title(self.__title_pattern % self.n)
self.common_descriptions=other.common_desriptions.copy() self.common_descriptions=other.common_desriptions.copy()
self.time_period=other.time_period[:] self.time_period=other.time_period[:]
self.job_id = other.job_id # added by Oleg Petrov
self.lock.release() self.lock.release()
return self return self
@ -691,6 +829,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
self.n += other.n self.n += other.n
self.time_period=[min(self.time_period[0],other.time_period[0]), self.time_period=[min(self.time_period[0],other.time_period[0]),
max(self.time_period[1],other.time_period[1])] max(self.time_period[1],other.time_period[1])]
self.job_id = other.job_id # added by Oleg Petrov
if self.common_descriptions is not None and other.common_descriptions is not None: if self.common_descriptions is not None and other.common_descriptions is not None:
for key in self.common_descriptions.keys(): for key in self.common_descriptions.keys():
if not (key in other.description and if not (key in other.description and