PYTHON: Implement AmunH5 class for Amun's HDF5 format.
Mark the previous subroutines deprecated. Signed-off-by: Grzegorz Kowal <grzegorz@amuncode.org>
This commit is contained in:
parent
d28194dc23
commit
da339f1ac9
@ -33,12 +33,117 @@
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
"""
|
||||
from .interpolation import interpolate
|
||||
import h5py as h5
|
||||
import numpy as np
|
||||
import os.path as op
|
||||
import sys
|
||||
from .amun import Amun
|
||||
|
||||
class AmunH5(Amun):
|
||||
"""AMUN H5 snapshot class"""
|
||||
|
||||
def __init_snapshot__(self):
|
||||
"""
|
||||
Initializes the snapshots by verifying if the file is indeed
|
||||
in the AmunH5 format.
|
||||
"""
|
||||
import h5py
|
||||
|
||||
if not self.path_is_file:
|
||||
raise Exception("AmunH5 requires a file not directory as the argument!")
|
||||
|
||||
with h5py.File(self.path, 'r') as h5:
|
||||
if 'codes' in h5.attrs:
|
||||
if h5.attrs['code'].astype(str) == "AMUN":
|
||||
self.dataformat = 'AmunH5'
|
||||
else:
|
||||
raise Exception("'{}' contains attribute 'code', but its content is not 'AMUN'!".format(self.path))
|
||||
elif 'attributes' in h5 and 'coordinates' in h5 and 'variables' in h5:
|
||||
self.dataformat = 'AmunH5'
|
||||
else:
|
||||
raise Exception("{} misses one of these groups: 'attributes', 'coordinates' or 'variables'!".format(self.path))
|
||||
|
||||
|
||||
def __fill_attributes__(self):
|
||||
"""
|
||||
Read attributes from the snapshot file and fill up
|
||||
the corresponding attribute dictionary.
|
||||
"""
|
||||
import h5py
|
||||
|
||||
exclude_list = ['nseeds', 'seeds', 'dblocks', 'nproc', 'dims', 'dtn', 'last_id', 'mblocks']
|
||||
|
||||
with h5py.File(self.path, 'r') as h5:
|
||||
for aname in h5['attributes'].attrs:
|
||||
if not aname in exclude_list:
|
||||
attr = h5['attributes'].attrs[aname]
|
||||
if attr.dtype == 'float64' or attr.dtype == 'float32' or \
|
||||
attr.dtype == 'int64' or attr.dtype == 'int32':
|
||||
if len(attr) > 1:
|
||||
self.attributes[aname] = attr.tolist()
|
||||
else:
|
||||
self.attributes[aname] = attr[0]
|
||||
else:
|
||||
self.attributes[aname] = attr[0].astype(str)
|
||||
|
||||
if not 'nchunks' in self.attributes and 'nprocs' in self.attributes:
|
||||
self.attributes['nchunks'] = self.attributes['nprocs']
|
||||
del self.attributes['nprocs']
|
||||
if 'rdims' in self.attributes:
|
||||
self.attributes['xblocks'] = self.attributes['rdims'][0]
|
||||
self.attributes['yblocks'] = self.attributes['rdims'][1]
|
||||
if self.attributes['ndims'] == 3:
|
||||
self.attributes['zblocks'] = self.attributes['rdims'][2]
|
||||
del self.attributes['rdims']
|
||||
|
||||
|
||||
def __fill_variables__(self):
|
||||
"""
|
||||
Read variable names from the snapshot file and fill up
|
||||
the corresponding variable list.
|
||||
"""
|
||||
import h5py
|
||||
|
||||
with h5py.File(self.path, 'r') as h5:
|
||||
for variable in h5['variables']:
|
||||
v = variable.strip()
|
||||
self.variables[v] = v
|
||||
|
||||
|
||||
def __fill_chunks__(self):
|
||||
"""
|
||||
Retrieve metadata about datablocks stored in the snapshot's chunks.
|
||||
"""
|
||||
import h5py, numpy, os
|
||||
|
||||
self.chunkname = 'p{:06d}'.format(self.attributes['isnap']) + '_{:05d}.h5'
|
||||
for n in range(self.attributes['nchunks']):
|
||||
self.chunks[n] = dict()
|
||||
self.chunks[n]['filename'] = self.chunkname.format(n)
|
||||
cname = os.path.join(self.dirname, self.chunks[n]['filename'])
|
||||
if os.path.exists(cname):
|
||||
with h5py.File(cname, 'r') as h5:
|
||||
self.chunks[n]['dblocks'] = h5['attributes'].attrs['dblocks'][0]
|
||||
|
||||
self.chunks[n]['levels'] = numpy.array(h5['coordinates']['levels'])
|
||||
self.chunks[n]['bounds'] = numpy.array(h5['coordinates']['bounds'])
|
||||
self.chunks[n]['coords'] = numpy.array(h5['coordinates']['coords'])
|
||||
else:
|
||||
raise Exception("Snapshot's chunk '{}' not present!".format(cname))
|
||||
|
||||
|
||||
def __read_binary_data__(self, dataset_name, chunk_number):
|
||||
"""
|
||||
Gets the dataset array from a given snapshot's chunk.
|
||||
"""
|
||||
import h5py, numpy, os
|
||||
|
||||
cname = os.path.join(self.dirname, self.chunks[chunk_number]['filename'])
|
||||
with h5py.File(cname, 'r') as h5:
|
||||
return numpy.array(h5['variables'][dataset_name])
|
||||
|
||||
|
||||
|
||||
#===============================================================================
|
||||
'''
|
||||
DEPRECATED FUNCTIONS
|
||||
'''
|
||||
|
||||
def amun_compatible(fname):
|
||||
'''
|
||||
@ -57,6 +162,11 @@ def amun_compatible(fname):
|
||||
comp = amun_compatible('p000010_00000.h5')
|
||||
|
||||
'''
|
||||
from warnings import warn
|
||||
import h5py as h5
|
||||
|
||||
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||
|
||||
with h5.File(fname, 'r') as f:
|
||||
if 'codes' in f.attrs:
|
||||
if f.attrs['code'].astype(str) == "AMUN":
|
||||
@ -92,6 +202,12 @@ def amun_attribute(fname, aname):
|
||||
time = amun_attribute('p000010_00000.h5', 'time')
|
||||
|
||||
'''
|
||||
from warnings import warn
|
||||
import h5py as h5
|
||||
import numpy as np
|
||||
|
||||
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||
|
||||
if not amun_compatible(fname):
|
||||
return None
|
||||
|
||||
@ -126,6 +242,12 @@ def amun_coordinate(fname, iname):
|
||||
bounds = amun_coordinate('p000010_00000.h5', 'bounds')
|
||||
|
||||
'''
|
||||
from warnings import warn
|
||||
import h5py as h5
|
||||
import numpy as np
|
||||
|
||||
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||
|
||||
if not amun_compatible(fname):
|
||||
return None
|
||||
|
||||
@ -158,10 +280,18 @@ def amun_dataset(fname, vname, shrink=1, interpolation='rebin', order=3, progres
|
||||
dn = amun_dataset('p000010_00000.h5', 'dens')
|
||||
|
||||
'''
|
||||
from .interpolation import interpolate
|
||||
from warnings import warn
|
||||
import h5py as h5
|
||||
import numpy as np
|
||||
import os, sys
|
||||
|
||||
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||
|
||||
if not amun_compatible(fname):
|
||||
return None
|
||||
|
||||
dname = op.dirname(fname)
|
||||
dname = os.path.dirname(fname)
|
||||
|
||||
if progress:
|
||||
sys.stdout.write("Data file path:\n '%s'\n" % (dname))
|
||||
@ -265,7 +395,7 @@ def amun_dataset(fname, vname, shrink=1, interpolation='rebin', order=3, progres
|
||||
levs = []
|
||||
for n in range(nc):
|
||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||
lname = op.join(dname, fname)
|
||||
lname = os.path.join(dname, fname)
|
||||
dblocks = amun_attribute(lname, 'dblocks')
|
||||
if dblocks > 0:
|
||||
levs = np.append(levs, [amun_coordinate(lname, 'levels')])
|
||||
@ -282,7 +412,7 @@ def amun_dataset(fname, vname, shrink=1, interpolation='rebin', order=3, progres
|
||||
nb = 0
|
||||
for n in range(nc):
|
||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||
lname = op.join(dname, fname)
|
||||
lname = os.path.join(dname, fname)
|
||||
dblocks = amun_attribute(lname, 'dblocks')
|
||||
if dblocks > 0:
|
||||
levels = amun_coordinate(lname, 'levels')
|
||||
@ -475,7 +605,11 @@ def amun_dataset_vtk(fname, vname, label=None, compression=None, compression_lev
|
||||
'''
|
||||
from .octree import OcBase, OcNode
|
||||
from .vtkio import WriteVTK
|
||||
import os
|
||||
from warnings import warn
|
||||
import numpy as np
|
||||
import os, sys
|
||||
|
||||
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||
|
||||
if not amun_compatible(fname):
|
||||
return None
|
||||
@ -487,7 +621,7 @@ def amun_dataset_vtk(fname, vname, label=None, compression=None, compression_lev
|
||||
if label == None:
|
||||
label = vname
|
||||
|
||||
dname = op.dirname(fname)
|
||||
dname = os.path.dirname(fname)
|
||||
|
||||
if progress:
|
||||
sys.stdout.write("Data file path:\n '%s'\n" % (dname))
|
||||
@ -583,7 +717,7 @@ def amun_dataset_vtk(fname, vname, label=None, compression=None, compression_lev
|
||||
levs = []
|
||||
for n in range(nc):
|
||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||
lname = op.join(dname, fname)
|
||||
lname = os.path.join(dname, fname)
|
||||
dblocks = amun_attribute(lname, 'dblocks')
|
||||
if dblocks > 0:
|
||||
levs = np.append(levs, [amun_coordinate(lname, 'levels')])
|
||||
@ -597,7 +731,7 @@ def amun_dataset_vtk(fname, vname, label=None, compression=None, compression_lev
|
||||
nb = 0
|
||||
for n in range(nc):
|
||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||
lname = op.join(dname, fname)
|
||||
lname = os.path.join(dname, fname)
|
||||
dblocks = amun_attribute(lname, 'dblocks')
|
||||
if dblocks > 0:
|
||||
levels = amun_coordinate(lname, 'levels')
|
||||
@ -767,7 +901,7 @@ def amun_dataset_vtk(fname, vname, label=None, compression=None, compression_lev
|
||||
|
||||
ofile = "{}_{:06d}.vthb".format(vname, nr)
|
||||
opath = "{}_{:06d}".format(vname, nr)
|
||||
if not op.exists(opath):
|
||||
if not os.path.exists(opath):
|
||||
os.makedirs(opath)
|
||||
with open(ofile, 'w') as vtk:
|
||||
vtk.write('<VTKFile type="vtkOverlappingAMR" version="1.1" ' + \
|
||||
@ -791,7 +925,7 @@ def amun_dataset_vtk(fname, vname, label=None, compression=None, compression_lev
|
||||
up = lo + bm - 1
|
||||
ll = np.stack((lo,up)).T.flatten()
|
||||
if item.hasData:
|
||||
vfile = op.join(opath, fmt.format(vname, lv, no))
|
||||
vfile = os.path.join(opath, fmt.format(vname, lv, no))
|
||||
WriteVTK(vfile, label, item.data, \
|
||||
origin = (item.lower[0], item.lower[1], item.lower[2]), \
|
||||
spacing = (cw[0], cw[1], cw[2]), \
|
||||
|
Loading…
x
Reference in New Issue
Block a user