pytables v2 to v3 conversion done

This commit is contained in:
Markus Rosenstihl 2016-11-23 15:55:22 +00:00
parent 00fe373108
commit 91ce1971c6
8 changed files with 43 additions and 47 deletions

View File

@ -214,7 +214,7 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
self.lock.release()
def write_to_hdf(self, hdffile, where, name, title, complib=None, complevel=None):
accu_group=hdffile.createGroup(where=where,name=name,title=title)
accu_group=hdffile.create_group(where=where,name=name,title=title)
accu_group._v_attrs.damaris_type="ADC_Result"
if self.contains_data():
self.lock.acquire()
@ -240,7 +240,7 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
complevel=9
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
index_table=hdffile.createTable(where=accu_group,
index_table=hdffile.create_table(where=accu_group,
name="indices",
description={"start": tables.UInt64Col(),
"length": tables.UInt64Col(),
@ -279,7 +279,7 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
else:
chunkshape = (min(chunkshape[0],1024*8), chunkshape[1])
if tables.__version__[0]=="1":
time_slice_data=hdffile.createCArray(accu_group,
time_slice_data=hdffile.create_carray(accu_group,
name="adc_data",
shape=timedata.shape,
atom=tables.Int32Atom(shape=chunkshape,
@ -287,7 +287,7 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
filters=filter,
title="adc data")
else:
time_slice_data=hdffile.createCArray(accu_group,
time_slice_data=hdffile.create_carray(accu_group,
name="adc_data",
shape=timedata.shape,
chunkshape=chunkshape,
@ -296,9 +296,9 @@ class ADC_Result(Resultable, Drawable, DamarisFFT, Signalpath):
title="adc data")
time_slice_data[:]=timedata
else:
time_slice_data=hdffile.createArray(accu_group,
time_slice_data=hdffile.create_array(accu_group,
name="adc_data",
object=timedata,
obj=timedata,
title="adc data")
finally:

View File

@ -404,7 +404,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
# -----------------------------------------------------------------------
def write_to_hdf(self, hdffile, where, name, title, complib=None, complevel=None):
accu_group=hdffile.createGroup(where=where,name=name,title=title)
accu_group=hdffile.create_group(where=where,name=name,title=title)
accu_group._v_attrs.damaris_type="Accumulation"
if self.contains_data():
self.lock.acquire()
@ -437,7 +437,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
# tried compression filter, but no effect...
index_table=hdffile.createTable(where=accu_group,
index_table=hdffile.create_table(where=accu_group,
name="indices",
description={"start": tables.UInt64Col(),
"length": tables.UInt64Col(),
@ -481,7 +481,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
else:
chunkshape = (min(chunkshape[0],1024*8), chunkshape[1])
if tables.__version__[0]=="1":
time_slice_data=hdffile.createCArray(accu_group,
time_slice_data=hdffile.create_carray(accu_group,
name="accu_data",
shape=timedata.shape,
atom=tables.Float64Atom(shape=chunkshape,
@ -489,7 +489,7 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
filters=filter,
title="accu data")
else:
time_slice_data=hdffile.createCArray(accu_group,
time_slice_data=hdffile.create_carray(accu_group,
name="accu_data",
shape=timedata.shape,
chunkshape=chunkshape,
@ -499,9 +499,9 @@ class Accumulation(Errorable, Drawable, DamarisFFT, Signalpath):
time_slice_data[:]=timedata
else:
time_slice_data=hdffile.createArray(accu_group,
time_slice_data=hdffile.create_array(accu_group,
name="accu_data",
object=timedata,
obj=timedata,
title="accu data")

View File

@ -98,13 +98,13 @@ class DataPool(UserDict.DictMixin):
def write_hdf5(self,hdffile,where="/",name="data_pool", complib=None, complevel=None):
if type(hdffile) is types.StringType:
dump_file=tables.openFile(hdffile, mode="a")
dump_file=tables.open_file(hdffile, mode="a")
elif isinstance(hdffile,tables.File):
dump_file=hdffile
else:
raise Exception("expecting hdffile or string")
dump_group=dump_file.createGroup(where, name, "DAMARIS data pool")
dump_group=dump_file.create_group(where, name, "DAMARIS data pool")
self.__dictlock.acquire()
dict_keys=self.__mydict.keys()
self.__dictlock.release()
@ -117,7 +117,7 @@ class DataPool(UserDict.DictMixin):
for part in namelist[:-1]:
dir_part="dir_"+str(part).translate(DataPool.translation_table)
if not dir_part in dump_dir:
dump_dir=dump_file.createGroup(dump_dir,name=dir_part,title=part)
dump_dir=dump_file.create_group(dump_dir,name=dir_part,title=part)
else:
if dump_dir._v_children[dir_part]._v_title==part:
dump_dir=dump_dir._v_children[dir_part]
@ -125,7 +125,7 @@ class DataPool(UserDict.DictMixin):
extension_count=0
while dir_part+"_%03d"%extension_count in dump_dir:
extension_count+=1
dump_dir=dump_file.createGroup(dump_dir,
dump_dir=dump_file.create_group(dump_dir,
name=dir_part+"_%03d"%extension_count,
title=part)

View File

@ -240,7 +240,7 @@ class MeasurementResult(Drawable.Drawable, UserDict.UserDict):
complevel=9
filter=tables.Filters(complevel=complevel,complib=complib,shuffle=1)
mr_table=hdffile.createTable(where=where,name=name,
mr_table=hdffile.create_table(where=where,name=name,
description=h5_table_format,
title=title,
filters=filter,
@ -250,7 +250,6 @@ class MeasurementResult(Drawable.Drawable, UserDict.UserDict):
self.lock.acquire()
try:
mr_table.attrs.quantity_name=self.quantity_name
row=mr_table.row
xdata=self.get_xdata()
if xdata.shape[0]!=0:
@ -284,7 +283,6 @@ def read_from_hdf(hdf_node):
return None
mr=MeasurementResult(hdf_node._v_attrs.quantity_name)
for r in hdf_node.iterrows():
mr[r["x"]]=AccumulatedValue(r["y"],r["y_err"],r["n"])

View File

@ -13,7 +13,6 @@ class Persistance :
if self.counter < 1:
for i,ch in enumerate(self.accu.y):
ch += res.y[i]
elif len(self.result_list) == self.shots:
self.counter = len(self.result_list)
old_result = self.result_list.pop(0)
@ -25,5 +24,5 @@ class Persistance :
for i,ch in enumerate(self.accu.y):
ch *= self.counter-1
ch += res.y[i]
self.accu /= self.counter
self.accu /= self.counter
return self.accu

View File

@ -18,7 +18,6 @@ class Temp_Result(Resultable, Drawable):
def __init__(self, x = None, y = None, desc = None, job_id = None, job_date = None):
Resultable.__init__(self)
Drawable.__init__(self)
if (x is None) and (y is None) and (desc is None) and (job_id is None) and (job_date is None):
pass

View File

@ -712,49 +712,49 @@ class DamarisGUI:
return True
# create new dump file
dump_file = tables.openFile( self.dump_filename, mode="w", title="DAMARIS experiment data" )
dump_file = tables.open_file( self.dump_filename, mode="w", title="DAMARIS experiment data" )
# write scripts and other useful information
scriptgroup = dump_file.createGroup( "/", "scripts", "Used Scripts" )
scriptgroup = dump_file.create_group( "/", "scripts", "Used Scripts" )
exp_text, res_text = self.sw.get_scripts( )
if self.si.exp_script:
dump_file.createArray( scriptgroup, "experiment_script", exp_text )
dump_file.create_array( scriptgroup, "experiment_script", exp_text )
if self.si.res_script:
dump_file.createArray( scriptgroup, "result_script", res_text )
dump_file.create_array( scriptgroup, "result_script", res_text )
if self.si.backend_executable:
dump_file.createArray( scriptgroup, "backend_executable", self.si.backend_executable )
dump_file.create_array( scriptgroup, "backend_executable", self.si.backend_executable )
if self.spool_dir:
dump_file.createArray( scriptgroup, "spool_directory", self.spool_dir )
dump_file.create_array( scriptgroup, "spool_directory", self.spool_dir )
timeline_tablecols = numpy.recarray( 0, dtype=([ ("time", "S17"),
("experiments", "int64"),
("results", "int64") ]) )
timeline_table = dump_file.createTable( "/", "timeline", timeline_tablecols,
timeline_table = dump_file.create_table( "/", "timeline", timeline_tablecols,
title="Timeline of Experiment" )
if tables.__version__[ 0 ] == "1":
logarray = dump_file.createVLArray( where=dump_file.root,
logarray = dump_file.create_vlarray( where=dump_file.root,
name="log",
atom=tables.StringAtom( length=120 ),
title="log messages",
filters=tables.Filters( complevel=9, complib='zlib' ) )
else:
logarray = dump_file.createEArray( where=dump_file.root,
logarray = dump_file.create_earray( where=dump_file.root,
name="log",
atom=tables.StringAtom( itemsize=120 ),
shape=(0,),
title="log messages",
filters=tables.Filters( complevel=9, complib='zlib' ) )
if dump_file is None and os.path.isfile( self.dump_filename ) and tables.isPyTablesFile( self.dump_filename ):
if dump_file is None and os.path.isfile( self.dump_filename ) and tables.is_pytables_file( self.dump_filename ):
# take some data from dump file and repack
os.rename( self.dump_filename, self.dump_filename + ".bak" )
old_dump_file = tables.openFile( self.dump_filename + ".bak", mode="r+" )
old_dump_file = tables.open_file( self.dump_filename + ".bak", mode="r+" )
if "data_pool" in old_dump_file.root:
old_dump_file.removeNode( where="/", name="data_pool", recursive=True )
old_dump_file.copyFile( self.dump_filename )
old_dump_file.remove_node( where="/", name="data_pool", recursive=True )
old_dump_file.copy_file( self.dump_filename )
old_dump_file.close( )
del old_dump_file
os.remove( self.dump_filename + ".bak" )
# prepare for update
dump_file = tables.openFile( self.dump_filename, mode="r+" )
dump_file = tables.open_file( self.dump_filename, mode="r+" )
if dump_file is None:
# exit!
@ -764,8 +764,8 @@ class DamarisGUI:
return True
# no undo please!
if dump_file.isUndoEnabled( ):
dump_file.disableUndo( )
if dump_file.is_undo_enabled( ):
dump_file.disable_undo( )
# save the data!
self.data.write_hdf5( dump_file, where="/", name="data_pool",
@ -1944,7 +1944,7 @@ pygobject version %(pygobject)s
"matplotlib": matplotlib.__version__,
"matplotlib_backend": FigureCanvas.__name__[ 12: ],
"numpy": numpy_version,
"pytables": tables.getPyTablesVersion( ),
"pytables": tables.get_pytables_version( ),
"pytables_libs": "",
"pygtk": "%d.%d.%d" % gtk.pygtk_version,
"pygobject": pygobject_version
@ -1952,12 +1952,12 @@ pygobject version %(pygobject)s
# pytables modules:
# find compression extensions for combo box and write version numbers
# list is taken from ValueError output of tables.whichLibVersion("")
# list is taken from ValueError output of tables.which_lib_version("")
model = self.config_data_pool_complib.get_model( )
for libname in ('hdf5', 'zlib', 'lzo', 'ucl', 'bzip2'):
version_info = None
try:
version_info = tables.whichLibVersion( libname )
version_info = tables.which_lib_version( libname )
except ValueError:
continue
if version_info:

View File

@ -107,14 +107,14 @@ class ScriptInterface:
def dump_data(self, filename):
try:
# write data from pool
dump_file=tables.openFile(filename,mode="w",title="DAMARIS experiment data")
dump_file=tables.open_file(filename,mode="w",title="DAMARIS experiment data")
self.data.write_hdf5(dump_file, complib='zlib', complevel=6)
# write scripts
scriptgroup=dump_file.createGroup("/","scripts","Used Scripts")
dump_file.createArray(scriptgroup,"experiment_script", self.exp_script)
dump_file.createArray(scriptgroup,"result_script", self.res_script)
dump_file.createArray(scriptgroup,"backend_executable", self.backend_executable)
dump_file.createArray(scriptgroup,"spool_directory", self.spool_dir)
scriptgroup=dump_file.create_group("/","scripts","Used Scripts")
dump_file.create_array(scriptgroup,"experiment_script", self.exp_script)
dump_file.create_array(scriptgroup,"result_script", self.res_script)
dump_file.create_array(scriptgroup,"backend_executable", self.backend_executable)
dump_file.create_array(scriptgroup,"spool_directory", self.spool_dir)
dump_file.flush()
dump_file.close()
dump_file=None