pytables v2 to v3 conversion done
This commit is contained in:
parent
00fe373108
commit
91ce1971c6
@ -214,7 +214,7 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
|
|||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
def write_to_hdf(self, hdffile, where, name, title, complib=None, complevel=None):
|
def write_to_hdf(self, hdffile, where, name, title, complib=None, complevel=None):
|
||||||
accu_group=hdffile.createGroup(where=where,name=name,title=title)
|
accu_group=hdffile.create_group(where=where,name=name,title=title)
|
||||||
accu_group._v_attrs.damaris_type="ADC_Result"
|
accu_group._v_attrs.damaris_type="ADC_Result"
|
||||||
if self.contains_data():
|
if self.contains_data():
|
||||||
self.lock.acquire()
|
self.lock.acquire()
|
||||||
@ -240,7 +240,7 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
|
|||||||
complevel=9
|
complevel=9
|
||||||
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
|
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
|
||||||
|
|
||||||
index_table=hdffile.createTable(where=accu_group,
|
index_table=hdffile.create_table(where=accu_group,
|
||||||
name="indices",
|
name="indices",
|
||||||
description={"start": tables.UInt64Col(),
|
description={"start": tables.UInt64Col(),
|
||||||
"length": tables.UInt64Col(),
|
"length": tables.UInt64Col(),
|
||||||
@ -279,7 +279,7 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
|
|||||||
else:
|
else:
|
||||||
chunkshape = (min(chunkshape[0],1024*8), chunkshape[1])
|
chunkshape = (min(chunkshape[0],1024*8), chunkshape[1])
|
||||||
if tables.__version__[0]=="1":
|
if tables.__version__[0]=="1":
|
||||||
time_slice_data=hdffile.createCArray(accu_group,
|
time_slice_data=hdffile.create_carray(accu_group,
|
||||||
name="adc_data",
|
name="adc_data",
|
||||||
shape=timedata.shape,
|
shape=timedata.shape,
|
||||||
atom=tables.Int32Atom(shape=chunkshape,
|
atom=tables.Int32Atom(shape=chunkshape,
|
||||||
@ -287,7 +287,7 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
|
|||||||
filters=filter,
|
filters=filter,
|
||||||
title="adc data")
|
title="adc data")
|
||||||
else:
|
else:
|
||||||
time_slice_data=hdffile.createCArray(accu_group,
|
time_slice_data=hdffile.create_carray(accu_group,
|
||||||
name="adc_data",
|
name="adc_data",
|
||||||
shape=timedata.shape,
|
shape=timedata.shape,
|
||||||
chunkshape=chunkshape,
|
chunkshape=chunkshape,
|
||||||
@ -296,9 +296,9 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
|
|||||||
title="adc data")
|
title="adc data")
|
||||||
time_slice_data[:]=timedata
|
time_slice_data[:]=timedata
|
||||||
else:
|
else:
|
||||||
time_slice_data=hdffile.createArray(accu_group,
|
time_slice_data=hdffile.create_array(accu_group,
|
||||||
name="adc_data",
|
name="adc_data",
|
||||||
object=timedata,
|
obj=timedata,
|
||||||
title="adc data")
|
title="adc data")
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
@ -404,7 +404,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
|
|||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
def write_to_hdf(self, hdffile, where, name, title, complib=None, complevel=None):
|
def write_to_hdf(self, hdffile, where, name, title, complib=None, complevel=None):
|
||||||
accu_group=hdffile.createGroup(where=where,name=name,title=title)
|
accu_group=hdffile.create_group(where=where,name=name,title=title)
|
||||||
accu_group._v_attrs.damaris_type="Accumulation"
|
accu_group._v_attrs.damaris_type="Accumulation"
|
||||||
if self.contains_data():
|
if self.contains_data():
|
||||||
self.lock.acquire()
|
self.lock.acquire()
|
||||||
@ -437,7 +437,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
|
|||||||
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
|
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
|
||||||
|
|
||||||
# tried compression filter, but no effect...
|
# tried compression filter, but no effect...
|
||||||
index_table=hdffile.createTable(where=accu_group,
|
index_table=hdffile.create_table(where=accu_group,
|
||||||
name="indices",
|
name="indices",
|
||||||
description={"start": tables.UInt64Col(),
|
description={"start": tables.UInt64Col(),
|
||||||
"length": tables.UInt64Col(),
|
"length": tables.UInt64Col(),
|
||||||
@ -481,7 +481,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
|
|||||||
else:
|
else:
|
||||||
chunkshape = (min(chunkshape[0],1024*8), chunkshape[1])
|
chunkshape = (min(chunkshape[0],1024*8), chunkshape[1])
|
||||||
if tables.__version__[0]=="1":
|
if tables.__version__[0]=="1":
|
||||||
time_slice_data=hdffile.createCArray(accu_group,
|
time_slice_data=hdffile.create_carray(accu_group,
|
||||||
name="accu_data",
|
name="accu_data",
|
||||||
shape=timedata.shape,
|
shape=timedata.shape,
|
||||||
atom=tables.Float64Atom(shape=chunkshape,
|
atom=tables.Float64Atom(shape=chunkshape,
|
||||||
@ -489,7 +489,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
|
|||||||
filters=filter,
|
filters=filter,
|
||||||
title="accu data")
|
title="accu data")
|
||||||
else:
|
else:
|
||||||
time_slice_data=hdffile.createCArray(accu_group,
|
time_slice_data=hdffile.create_carray(accu_group,
|
||||||
name="accu_data",
|
name="accu_data",
|
||||||
shape=timedata.shape,
|
shape=timedata.shape,
|
||||||
chunkshape=chunkshape,
|
chunkshape=chunkshape,
|
||||||
@ -499,9 +499,9 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
|
|||||||
|
|
||||||
time_slice_data[:]=timedata
|
time_slice_data[:]=timedata
|
||||||
else:
|
else:
|
||||||
time_slice_data=hdffile.createArray(accu_group,
|
time_slice_data=hdffile.create_array(accu_group,
|
||||||
name="accu_data",
|
name="accu_data",
|
||||||
object=timedata,
|
obj=timedata,
|
||||||
title="accu data")
|
title="accu data")
|
||||||
|
|
||||||
|
|
||||||
|
@ -98,13 +98,13 @@ class DataPool(UserDict.DictMixin):
|
|||||||
|
|
||||||
def write_hdf5(self,hdffile,where="/",name="data_pool", complib=None, complevel=None):
|
def write_hdf5(self,hdffile,where="/",name="data_pool", complib=None, complevel=None):
|
||||||
if type(hdffile) is types.StringType:
|
if type(hdffile) is types.StringType:
|
||||||
dump_file=tables.openFile(hdffile, mode="a")
|
dump_file=tables.open_file(hdffile, mode="a")
|
||||||
elif isinstance(hdffile,tables.File):
|
elif isinstance(hdffile,tables.File):
|
||||||
dump_file=hdffile
|
dump_file=hdffile
|
||||||
else:
|
else:
|
||||||
raise Exception("expecting hdffile or string")
|
raise Exception("expecting hdffile or string")
|
||||||
|
|
||||||
dump_group=dump_file.createGroup(where, name, "DAMARIS data pool")
|
dump_group=dump_file.create_group(where, name, "DAMARIS data pool")
|
||||||
self.__dictlock.acquire()
|
self.__dictlock.acquire()
|
||||||
dict_keys=self.__mydict.keys()
|
dict_keys=self.__mydict.keys()
|
||||||
self.__dictlock.release()
|
self.__dictlock.release()
|
||||||
@ -117,7 +117,7 @@ class DataPool(UserDict.DictMixin):
|
|||||||
for part in namelist[:-1]:
|
for part in namelist[:-1]:
|
||||||
dir_part="dir_"+str(part).translate(DataPool.translation_table)
|
dir_part="dir_"+str(part).translate(DataPool.translation_table)
|
||||||
if not dir_part in dump_dir:
|
if not dir_part in dump_dir:
|
||||||
dump_dir=dump_file.createGroup(dump_dir,name=dir_part,title=part)
|
dump_dir=dump_file.create_group(dump_dir,name=dir_part,title=part)
|
||||||
else:
|
else:
|
||||||
if dump_dir._v_children[dir_part]._v_title==part:
|
if dump_dir._v_children[dir_part]._v_title==part:
|
||||||
dump_dir=dump_dir._v_children[dir_part]
|
dump_dir=dump_dir._v_children[dir_part]
|
||||||
@ -125,7 +125,7 @@ class DataPool(UserDict.DictMixin):
|
|||||||
extension_count=0
|
extension_count=0
|
||||||
while dir_part+"_%03d"%extension_count in dump_dir:
|
while dir_part+"_%03d"%extension_count in dump_dir:
|
||||||
extension_count+=1
|
extension_count+=1
|
||||||
dump_dir=dump_file.createGroup(dump_dir,
|
dump_dir=dump_file.create_group(dump_dir,
|
||||||
name=dir_part+"_%03d"%extension_count,
|
name=dir_part+"_%03d"%extension_count,
|
||||||
title=part)
|
title=part)
|
||||||
|
|
||||||
|
@ -240,7 +240,7 @@ class MeasurementResult(Drawable.Drawable, UserDict.UserDict):
|
|||||||
complevel=9
|
complevel=9
|
||||||
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
|
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
|
||||||
|
|
||||||
mr_table=hdffile.createTable(where=where,name=name,
|
mr_table=hdffile.create_table(where=where,name=name,
|
||||||
description=h5_table_format,
|
description=h5_table_format,
|
||||||
title=title,
|
title=title,
|
||||||
filters=filter,
|
filters=filter,
|
||||||
@ -250,7 +250,6 @@ class MeasurementResult(Drawable.Drawable, UserDict.UserDict):
|
|||||||
self.lock.acquire()
|
self.lock.acquire()
|
||||||
try:
|
try:
|
||||||
mr_table.attrs.quantity_name=self.quantity_name
|
mr_table.attrs.quantity_name=self.quantity_name
|
||||||
|
|
||||||
row=mr_table.row
|
row=mr_table.row
|
||||||
xdata=self.get_xdata()
|
xdata=self.get_xdata()
|
||||||
if xdata.shape[0]!=0:
|
if xdata.shape[0]!=0:
|
||||||
@ -284,7 +283,6 @@ def read_from_hdf(hdf_node):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
mr=MeasurementResult(hdf_node._v_attrs.quantity_name)
|
mr=MeasurementResult(hdf_node._v_attrs.quantity_name)
|
||||||
|
|
||||||
for r in hdf_node.iterrows():
|
for r in hdf_node.iterrows():
|
||||||
mr[r["x"]]=AccumulatedValue(r["y"],r["y_err"],r["n"])
|
mr[r["x"]]=AccumulatedValue(r["y"],r["y_err"],r["n"])
|
||||||
|
|
||||||
|
@ -13,7 +13,6 @@ class Persistance :
|
|||||||
if self.counter < 1:
|
if self.counter < 1:
|
||||||
for i,ch in enumerate(self.accu.y):
|
for i,ch in enumerate(self.accu.y):
|
||||||
ch += res.y[i]
|
ch += res.y[i]
|
||||||
|
|
||||||
elif len(self.result_list) == self.shots:
|
elif len(self.result_list) == self.shots:
|
||||||
self.counter = len(self.result_list)
|
self.counter = len(self.result_list)
|
||||||
old_result = self.result_list.pop(0)
|
old_result = self.result_list.pop(0)
|
||||||
@ -25,5 +24,5 @@ class Persistance :
|
|||||||
for i,ch in enumerate(self.accu.y):
|
for i,ch in enumerate(self.accu.y):
|
||||||
ch *= self.counter-1
|
ch *= self.counter-1
|
||||||
ch += res.y[i]
|
ch += res.y[i]
|
||||||
self.accu /= self.counter
|
self.accu /= self.counter
|
||||||
return self.accu
|
return self.accu
|
||||||
|
@ -18,7 +18,6 @@ class Temp_Result(Resultable, Drawable):
|
|||||||
def __init__(self, x = None, y = None, desc = None, job_id = None, job_date = None):
|
def __init__(self, x = None, y = None, desc = None, job_id = None, job_date = None):
|
||||||
Resultable.__init__(self)
|
Resultable.__init__(self)
|
||||||
Drawable.__init__(self)
|
Drawable.__init__(self)
|
||||||
|
|
||||||
|
|
||||||
if (x is None) and (y is None) and (desc is None) and (job_id is None) and (job_date is None):
|
if (x is None) and (y is None) and (desc is None) and (job_id is None) and (job_date is None):
|
||||||
pass
|
pass
|
||||||
|
@ -712,49 +712,49 @@ class DamarisGUI:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# create new dump file
|
# create new dump file
|
||||||
dump_file = tables.openFile( self.dump_filename, mode="w", title="DAMARIS experiment data" )
|
dump_file = tables.open_file( self.dump_filename, mode="w", title="DAMARIS experiment data" )
|
||||||
# write scripts and other useful information
|
# write scripts and other useful information
|
||||||
scriptgroup = dump_file.createGroup( "/", "scripts", "Used Scripts" )
|
scriptgroup = dump_file.create_group( "/", "scripts", "Used Scripts" )
|
||||||
exp_text, res_text = self.sw.get_scripts( )
|
exp_text, res_text = self.sw.get_scripts( )
|
||||||
if self.si.exp_script:
|
if self.si.exp_script:
|
||||||
dump_file.createArray( scriptgroup, "experiment_script", exp_text )
|
dump_file.create_array( scriptgroup, "experiment_script", exp_text )
|
||||||
if self.si.res_script:
|
if self.si.res_script:
|
||||||
dump_file.createArray( scriptgroup, "result_script", res_text )
|
dump_file.create_array( scriptgroup, "result_script", res_text )
|
||||||
if self.si.backend_executable:
|
if self.si.backend_executable:
|
||||||
dump_file.createArray( scriptgroup, "backend_executable", self.si.backend_executable )
|
dump_file.create_array( scriptgroup, "backend_executable", self.si.backend_executable )
|
||||||
if self.spool_dir:
|
if self.spool_dir:
|
||||||
dump_file.createArray( scriptgroup, "spool_directory", self.spool_dir )
|
dump_file.create_array( scriptgroup, "spool_directory", self.spool_dir )
|
||||||
timeline_tablecols = numpy.recarray( 0, dtype=([ ("time", "S17"),
|
timeline_tablecols = numpy.recarray( 0, dtype=([ ("time", "S17"),
|
||||||
("experiments", "int64"),
|
("experiments", "int64"),
|
||||||
("results", "int64") ]) )
|
("results", "int64") ]) )
|
||||||
timeline_table = dump_file.createTable( "/", "timeline", timeline_tablecols,
|
timeline_table = dump_file.create_table( "/", "timeline", timeline_tablecols,
|
||||||
title="Timeline of Experiment" )
|
title="Timeline of Experiment" )
|
||||||
if tables.__version__[ 0 ] == "1":
|
if tables.__version__[ 0 ] == "1":
|
||||||
logarray = dump_file.createVLArray( where=dump_file.root,
|
logarray = dump_file.create_vlarray( where=dump_file.root,
|
||||||
name="log",
|
name="log",
|
||||||
atom=tables.StringAtom( length=120 ),
|
atom=tables.StringAtom( length=120 ),
|
||||||
title="log messages",
|
title="log messages",
|
||||||
filters=tables.Filters( complevel=9, complib='zlib' ) )
|
filters=tables.Filters( complevel=9, complib='zlib' ) )
|
||||||
else:
|
else:
|
||||||
logarray = dump_file.createEArray( where=dump_file.root,
|
logarray = dump_file.create_earray( where=dump_file.root,
|
||||||
name="log",
|
name="log",
|
||||||
atom=tables.StringAtom( itemsize=120 ),
|
atom=tables.StringAtom( itemsize=120 ),
|
||||||
shape=(0,),
|
shape=(0,),
|
||||||
title="log messages",
|
title="log messages",
|
||||||
filters=tables.Filters( complevel=9, complib='zlib' ) )
|
filters=tables.Filters( complevel=9, complib='zlib' ) )
|
||||||
|
|
||||||
if dump_file is None and os.path.isfile( self.dump_filename ) and tables.isPyTablesFile( self.dump_filename ):
|
if dump_file is None and os.path.isfile( self.dump_filename ) and tables.is_pytables_file( self.dump_filename ):
|
||||||
# take some data from dump file and repack
|
# take some data from dump file and repack
|
||||||
os.rename( self.dump_filename, self.dump_filename + ".bak" )
|
os.rename( self.dump_filename, self.dump_filename + ".bak" )
|
||||||
old_dump_file = tables.openFile( self.dump_filename + ".bak", mode="r+" )
|
old_dump_file = tables.open_file( self.dump_filename + ".bak", mode="r+" )
|
||||||
if "data_pool" in old_dump_file.root:
|
if "data_pool" in old_dump_file.root:
|
||||||
old_dump_file.removeNode( where="/", name="data_pool", recursive=True )
|
old_dump_file.remove_node( where="/", name="data_pool", recursive=True )
|
||||||
old_dump_file.copyFile( self.dump_filename )
|
old_dump_file.copy_file( self.dump_filename )
|
||||||
old_dump_file.close( )
|
old_dump_file.close( )
|
||||||
del old_dump_file
|
del old_dump_file
|
||||||
os.remove( self.dump_filename + ".bak" )
|
os.remove( self.dump_filename + ".bak" )
|
||||||
# prepare for update
|
# prepare for update
|
||||||
dump_file = tables.openFile( self.dump_filename, mode="r+" )
|
dump_file = tables.open_file( self.dump_filename, mode="r+" )
|
||||||
|
|
||||||
if dump_file is None:
|
if dump_file is None:
|
||||||
# exit!
|
# exit!
|
||||||
@ -764,8 +764,8 @@ class DamarisGUI:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# no undo please!
|
# no undo please!
|
||||||
if dump_file.isUndoEnabled( ):
|
if dump_file.is_undo_enabled( ):
|
||||||
dump_file.disableUndo( )
|
dump_file.disable_undo( )
|
||||||
|
|
||||||
# save the data!
|
# save the data!
|
||||||
self.data.write_hdf5( dump_file, where="/", name="data_pool",
|
self.data.write_hdf5( dump_file, where="/", name="data_pool",
|
||||||
@ -1944,7 +1944,7 @@ pygobject version %(pygobject)s
|
|||||||
"matplotlib": matplotlib.__version__,
|
"matplotlib": matplotlib.__version__,
|
||||||
"matplotlib_backend": FigureCanvas.__name__[ 12: ],
|
"matplotlib_backend": FigureCanvas.__name__[ 12: ],
|
||||||
"numpy": numpy_version,
|
"numpy": numpy_version,
|
||||||
"pytables": tables.getPyTablesVersion( ),
|
"pytables": tables.get_pytables_version( ),
|
||||||
"pytables_libs": "",
|
"pytables_libs": "",
|
||||||
"pygtk": "%d.%d.%d" % gtk.pygtk_version,
|
"pygtk": "%d.%d.%d" % gtk.pygtk_version,
|
||||||
"pygobject": pygobject_version
|
"pygobject": pygobject_version
|
||||||
@ -1952,12 +1952,12 @@ pygobject version %(pygobject)s
|
|||||||
|
|
||||||
# pytables modules:
|
# pytables modules:
|
||||||
# find compression extensions for combo box and write version numbers
|
# find compression extensions for combo box and write version numbers
|
||||||
# list is taken from ValueError output of tables.whichLibVersion("")
|
# list is taken from ValueError output of tables.which_lib_version("")
|
||||||
model = self.config_data_pool_complib.get_model( )
|
model = self.config_data_pool_complib.get_model( )
|
||||||
for libname in ('hdf5', 'zlib', 'lzo', 'ucl', 'bzip2'):
|
for libname in ('hdf5', 'zlib', 'lzo', 'ucl', 'bzip2'):
|
||||||
version_info = None
|
version_info = None
|
||||||
try:
|
try:
|
||||||
version_info = tables.whichLibVersion( libname )
|
version_info = tables.which_lib_version( libname )
|
||||||
except ValueError:
|
except ValueError:
|
||||||
continue
|
continue
|
||||||
if version_info:
|
if version_info:
|
||||||
|
@ -107,14 +107,14 @@ class ScriptInterface:
|
|||||||
def dump_data(self, filename):
|
def dump_data(self, filename):
|
||||||
try:
|
try:
|
||||||
# write data from pool
|
# write data from pool
|
||||||
dump_file=tables.openFile(filename,mode="w",title="DAMARIS experiment data")
|
dump_file=tables.open_file(filename,mode="w",title="DAMARIS experiment data")
|
||||||
self.data.write_hdf5(dump_file, complib='zlib', complevel=6)
|
self.data.write_hdf5(dump_file, complib='zlib', complevel=6)
|
||||||
# write scripts
|
# write scripts
|
||||||
scriptgroup=dump_file.createGroup("/","scripts","Used Scripts")
|
scriptgroup=dump_file.create_group("/","scripts","Used Scripts")
|
||||||
dump_file.createArray(scriptgroup,"experiment_script", self.exp_script)
|
dump_file.create_array(scriptgroup,"experiment_script", self.exp_script)
|
||||||
dump_file.createArray(scriptgroup,"result_script", self.res_script)
|
dump_file.create_array(scriptgroup,"result_script", self.res_script)
|
||||||
dump_file.createArray(scriptgroup,"backend_executable", self.backend_executable)
|
dump_file.create_array(scriptgroup,"backend_executable", self.backend_executable)
|
||||||
dump_file.createArray(scriptgroup,"spool_directory", self.spool_dir)
|
dump_file.create_array(scriptgroup,"spool_directory", self.spool_dir)
|
||||||
dump_file.flush()
|
dump_file.flush()
|
||||||
dump_file.close()
|
dump_file.close()
|
||||||
dump_file=None
|
dump_file=None
|
||||||
|
Loading…
Reference in New Issue
Block a user