pytables v2 to v3 conversion done

This commit is contained in:
Markus Rosenstihl
2016-11-23 15:55:22 +00:00
parent 00fe373108
commit 91ce1971c6
8 changed files with 43 additions and 47 deletions

View File

@@ -712,49 +712,49 @@ class DamarisGUI:
return True
# create new dump file
dump_file = tables.openFile( self.dump_filename, mode="w", title="DAMARIS experiment data" )
dump_file = tables.open_file( self.dump_filename, mode="w", title="DAMARIS experiment data" )
# write scripts and other useful information
scriptgroup = dump_file.createGroup( "/", "scripts", "Used Scripts" )
scriptgroup = dump_file.create_group( "/", "scripts", "Used Scripts" )
exp_text, res_text = self.sw.get_scripts( )
if self.si.exp_script:
dump_file.createArray( scriptgroup, "experiment_script", exp_text )
dump_file.create_array( scriptgroup, "experiment_script", exp_text )
if self.si.res_script:
dump_file.createArray( scriptgroup, "result_script", res_text )
dump_file.create_array( scriptgroup, "result_script", res_text )
if self.si.backend_executable:
dump_file.createArray( scriptgroup, "backend_executable", self.si.backend_executable )
dump_file.create_array( scriptgroup, "backend_executable", self.si.backend_executable )
if self.spool_dir:
dump_file.createArray( scriptgroup, "spool_directory", self.spool_dir )
dump_file.create_array( scriptgroup, "spool_directory", self.spool_dir )
timeline_tablecols = numpy.recarray( 0, dtype=([ ("time", "S17"),
("experiments", "int64"),
("results", "int64") ]) )
timeline_table = dump_file.createTable( "/", "timeline", timeline_tablecols,
timeline_table = dump_file.create_table( "/", "timeline", timeline_tablecols,
title="Timeline of Experiment" )
if tables.__version__[ 0 ] == "1":
logarray = dump_file.createVLArray( where=dump_file.root,
logarray = dump_file.create_vlarray( where=dump_file.root,
name="log",
atom=tables.StringAtom( length=120 ),
title="log messages",
filters=tables.Filters( complevel=9, complib='zlib' ) )
else:
logarray = dump_file.createEArray( where=dump_file.root,
logarray = dump_file.create_earray( where=dump_file.root,
name="log",
atom=tables.StringAtom( itemsize=120 ),
shape=(0,),
title="log messages",
filters=tables.Filters( complevel=9, complib='zlib' ) )
if dump_file is None and os.path.isfile( self.dump_filename ) and tables.isPyTablesFile( self.dump_filename ):
if dump_file is None and os.path.isfile( self.dump_filename ) and tables.is_pytables_file( self.dump_filename ):
# take some data from dump file and repack
os.rename( self.dump_filename, self.dump_filename + ".bak" )
old_dump_file = tables.openFile( self.dump_filename + ".bak", mode="r+" )
old_dump_file = tables.open_file( self.dump_filename + ".bak", mode="r+" )
if "data_pool" in old_dump_file.root:
old_dump_file.removeNode( where="/", name="data_pool", recursive=True )
old_dump_file.copyFile( self.dump_filename )
old_dump_file.remove_node( where="/", name="data_pool", recursive=True )
old_dump_file.copy_file( self.dump_filename )
old_dump_file.close( )
del old_dump_file
os.remove( self.dump_filename + ".bak" )
# prepare for update
dump_file = tables.openFile( self.dump_filename, mode="r+" )
dump_file = tables.open_file( self.dump_filename, mode="r+" )
if dump_file is None:
# exit!
@@ -764,8 +764,8 @@ class DamarisGUI:
return True
# no undo please!
if dump_file.isUndoEnabled( ):
dump_file.disableUndo( )
if dump_file.is_undo_enabled( ):
dump_file.disable_undo( )
# save the data!
self.data.write_hdf5( dump_file, where="/", name="data_pool",
@@ -1944,7 +1944,7 @@ pygobject version %(pygobject)s
"matplotlib": matplotlib.__version__,
"matplotlib_backend": FigureCanvas.__name__[ 12: ],
"numpy": numpy_version,
"pytables": tables.getPyTablesVersion( ),
"pytables": tables.get_pytables_version( ),
"pytables_libs": "",
"pygtk": "%d.%d.%d" % gtk.pygtk_version,
"pygobject": pygobject_version
@@ -1952,12 +1952,12 @@ pygobject version %(pygobject)s
# pytables modules:
# find compression extensions for combo box and write version numbers
# list is taken from ValueError output of tables.whichLibVersion("")
# list is taken from ValueError output of tables.which_lib_version("")
model = self.config_data_pool_complib.get_model( )
for libname in ('hdf5', 'zlib', 'lzo', 'ucl', 'bzip2'):
version_info = None
try:
version_info = tables.whichLibVersion( libname )
version_info = tables.which_lib_version( libname )
except ValueError:
continue
if version_info:

View File

@@ -107,14 +107,14 @@ class ScriptInterface:
def dump_data(self, filename):
try:
# write data from pool
dump_file=tables.openFile(filename,mode="w",title="DAMARIS experiment data")
dump_file=tables.open_file(filename,mode="w",title="DAMARIS experiment data")
self.data.write_hdf5(dump_file, complib='zlib', complevel=6)
# write scripts
scriptgroup=dump_file.createGroup("/","scripts","Used Scripts")
dump_file.createArray(scriptgroup,"experiment_script", self.exp_script)
dump_file.createArray(scriptgroup,"result_script", self.res_script)
dump_file.createArray(scriptgroup,"backend_executable", self.backend_executable)
dump_file.createArray(scriptgroup,"spool_directory", self.spool_dir)
scriptgroup=dump_file.create_group("/","scripts","Used Scripts")
dump_file.create_array(scriptgroup,"experiment_script", self.exp_script)
dump_file.create_array(scriptgroup,"result_script", self.res_script)
dump_file.create_array(scriptgroup,"backend_executable", self.backend_executable)
dump_file.create_array(scriptgroup,"spool_directory", self.spool_dir)
dump_file.flush()
dump_file.close()
dump_file=None