Merge branch 'master' into reconnection
This commit is contained in:
commit
346d263a83
@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
|
|||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="amunpy",
|
name="amunpy",
|
||||||
version="0.6.2",
|
version="0.9.1",
|
||||||
author="Grzegorz Kowal",
|
author="Grzegorz Kowal",
|
||||||
author_email="grzegorz@amuncode.org",
|
author_email="grzegorz@amuncode.org",
|
||||||
description="Python Interface for the AMUN code's snapshots",
|
description="Python Interface for the AMUN code's snapshots",
|
||||||
@ -23,10 +23,8 @@ setuptools.setup(
|
|||||||
package_dir={"": "src"},
|
package_dir={"": "src"},
|
||||||
packages=setuptools.find_packages(where="src"),
|
packages=setuptools.find_packages(where="src"),
|
||||||
python_requires=">=3.6",
|
python_requires=">=3.6",
|
||||||
install_requires=['h5py', 'numpy'],
|
install_requires=['h5py', 'numpy', 'xxhash', 'lz4', 'zstandard'],
|
||||||
extras_require={
|
extras_require={
|
||||||
"digest": ['xxhash'],
|
|
||||||
"interpolation": ['scipy'],
|
"interpolation": ['scipy'],
|
||||||
"compression": ['lz4', 'zstandard'],
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -12,10 +12,11 @@ See file LICENSE for more details.
|
|||||||
|
|
||||||
from .amunxml import *
|
from .amunxml import *
|
||||||
from .amunh5 import *
|
from .amunh5 import *
|
||||||
|
from .amunh5_deprecated import *
|
||||||
from .integrals import *
|
from .integrals import *
|
||||||
|
|
||||||
__all__ = [ 'AmunXML', 'amun_attribute', 'amun_coordinate', 'amun_dataset', \
|
__all__ = [ 'AmunXML', 'AmunH5', \
|
||||||
'amun_dataset_vtk', 'amun_integrals' ]
|
'amun_attribute', 'amun_coordinate', 'amun_dataset', 'amun_dataset_vtk', 'amun_integrals' ]
|
||||||
|
|
||||||
__author__ = "Grzegorz Kowal"
|
__author__ = "Grzegorz Kowal"
|
||||||
__copyright__ = "Copyright 2018-2021, Grzegorz Kowal <grzegorz@amuncode.org>"
|
__copyright__ = "Copyright 2018-2021, Grzegorz Kowal <grzegorz@amuncode.org>"
|
||||||
|
1141
python/amunpy/src/amunpy/amun.py
Normal file
1141
python/amunpy/src/amunpy/amun.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -24,798 +24,113 @@
|
|||||||
|
|
||||||
module: AMUN
|
module: AMUN
|
||||||
|
|
||||||
Python module with subroutines to read AMUN code HDF5 files.
|
This module implements an interface class to read attributes, coordinates,
|
||||||
|
and datasets stored in AmunH5 format snapshots.
|
||||||
The only requirements for this package are:
|
|
||||||
|
|
||||||
- h5py
|
|
||||||
- numpy
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
"""
|
"""
|
||||||
from .interpolation import interpolate
|
from .amun import Amun
|
||||||
import h5py as h5
|
|
||||||
import numpy as np
|
|
||||||
import os.path as op
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
class AmunH5(Amun):
|
||||||
|
"""AMUN H5 snapshot class"""
|
||||||
|
|
||||||
def amun_compatible(fname):
|
def __init_snapshot__(self):
|
||||||
'''
|
"""
|
||||||
Subroutine checks if the HDF5 file is AMUN compatible.
|
Sets the data format after verifying if the snapshot is stored
|
||||||
|
in AmunH5 format.
|
||||||
|
"""
|
||||||
|
import h5py
|
||||||
|
|
||||||
Arguments:
|
if not self.path_is_file:
|
||||||
|
raise Exception("AmunH5 requires a file not directory as the argument!")
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
with h5py.File(self.path, 'r') as h5:
|
||||||
|
if 'codes' in h5.attrs:
|
||||||
Return values:
|
if h5.attrs['code'].astype(str) == "AMUN":
|
||||||
|
self.dataformat = 'AmunH5'
|
||||||
True or False;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
comp = amun_compatible('p000010_00000.h5')
|
|
||||||
|
|
||||||
'''
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
if 'codes' in f.attrs:
|
|
||||||
if f.attrs['code'].astype(str) == "AMUN":
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print("'%s' contains attribute 'code'," % fname, \
|
|
||||||
" but it is not 'AMUN'!")
|
|
||||||
return False
|
|
||||||
elif 'attributes' in f and 'coordinates' in f and \
|
|
||||||
'variables' in f:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print("'%s' misses one of these groups:" % fname, \
|
|
||||||
"'attributes', 'coordinates' or 'variables'!")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def amun_attribute(fname, aname):
|
|
||||||
'''
|
|
||||||
Subroutine to read global attributes from AMUN HDF5 snapshots.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
|
||||||
aname - the attribute name;
|
|
||||||
|
|
||||||
Return values:
|
|
||||||
|
|
||||||
ret - the value of the attribute or None;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
time = amun_attribute('p000010_00000.h5', 'time')
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not amun_compatible(fname):
|
|
||||||
return None
|
|
||||||
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
if aname in f['attributes'].attrs:
|
|
||||||
attr = f['attributes'].attrs[aname]
|
|
||||||
if attr.dtype.type is np.string_:
|
|
||||||
ret = np.squeeze(attr).astype(str)
|
|
||||||
else:
|
|
||||||
ret = np.squeeze(attr)
|
|
||||||
return ret
|
|
||||||
else:
|
|
||||||
print("Attribute '%s' cannot be found in '%s'!" % (aname, fname))
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def amun_coordinate(fname, iname):
|
|
||||||
'''
|
|
||||||
Subroutine to read coordinate items from AMUN HDF5 snapshots.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
|
||||||
iname - the item name;
|
|
||||||
|
|
||||||
Return values:
|
|
||||||
|
|
||||||
ret - the value of the item or None;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
bounds = amun_coordinate('p000010_00000.h5', 'bounds')
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not amun_compatible(fname):
|
|
||||||
return None
|
|
||||||
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
if iname in f['coordinates']:
|
|
||||||
return np.array(f['coordinates'][iname])
|
|
||||||
else:
|
|
||||||
print("Coordinate item '%s' not found in group 'coordinate' of '%s'!" % (iname, fname))
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def amun_dataset(fname, vname, shrink=1, interpolation='rebin', order=3, progress=False):
|
|
||||||
'''
|
|
||||||
Subroutine to read datasets from AMUN HDF5 snapshots.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
|
||||||
vname - the variable name;
|
|
||||||
shrink - the shrink factor (must be the power of 2 and not larger
|
|
||||||
than the block size);
|
|
||||||
progress - the progress bar switch;
|
|
||||||
|
|
||||||
Return values:
|
|
||||||
|
|
||||||
ret - the array of values for the variable;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
dn = amun_dataset('p000010_00000.h5', 'dens')
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not amun_compatible(fname):
|
|
||||||
return None
|
|
||||||
|
|
||||||
dname = op.dirname(fname)
|
|
||||||
|
|
||||||
if progress:
|
|
||||||
sys.stdout.write("Data file path:\n '%s'\n" % (dname))
|
|
||||||
|
|
||||||
# get attributes necessary to reconstruct the domain
|
|
||||||
#
|
|
||||||
eqsys = amun_attribute(fname, 'eqsys')
|
|
||||||
eos = amun_attribute(fname, 'eos')
|
|
||||||
nr = amun_attribute(fname, 'isnap')
|
|
||||||
nc = amun_attribute(fname, 'nprocs')
|
|
||||||
nl = amun_attribute(fname, 'nleafs')
|
|
||||||
if eos == 'adi':
|
|
||||||
gm = amun_attribute(fname, 'adiabatic_index')
|
|
||||||
|
|
||||||
# get block dimensions and the maximum level
|
|
||||||
#
|
|
||||||
ndims = amun_attribute(fname, 'ndims')
|
|
||||||
nn = amun_attribute(fname, 'ncells')
|
|
||||||
bm = np.array([nn, nn, nn])
|
|
||||||
if ndims == 2:
|
|
||||||
bm[2] = 1
|
|
||||||
ng = amun_attribute(fname, 'nghosts')
|
|
||||||
ml = amun_attribute(fname, 'maxlev')
|
|
||||||
|
|
||||||
# get the base block dimensions
|
|
||||||
#
|
|
||||||
rm = amun_attribute(fname, 'bdims')
|
|
||||||
if rm is None:
|
|
||||||
rm = amun_attribute(fname, 'domain_base_dims')
|
|
||||||
if rm is None:
|
|
||||||
rm = amun_attribute(fname, 'rdims')
|
|
||||||
if rm is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# build the list of supported variables
|
|
||||||
#
|
|
||||||
variables = []
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
for var in f['variables'].keys():
|
|
||||||
variables.append(var)
|
|
||||||
|
|
||||||
# add derived variables if possible
|
|
||||||
#
|
|
||||||
variables.append('level')
|
|
||||||
if 'velx' in variables and 'vely' in variables and 'velz' in variables:
|
|
||||||
variables.append('velo')
|
|
||||||
variables.append('divv')
|
|
||||||
variables.append('vort')
|
|
||||||
if 'magx' in variables and 'magy' in variables and 'magz' in variables:
|
|
||||||
variables.append('magn')
|
|
||||||
variables.append('divb')
|
|
||||||
variables.append('curr')
|
|
||||||
if (eqsys == 'hd' or eqsys == 'mhd') and eos == 'adi' \
|
|
||||||
and 'pres' in variables:
|
|
||||||
variables.append('eint')
|
|
||||||
if 'dens' in variables and 'pres' in variables:
|
|
||||||
variables.append('temp')
|
|
||||||
if (eqsys == 'hd' or eqsys == 'mhd') \
|
|
||||||
and 'dens' in variables \
|
|
||||||
and 'velx' in variables \
|
|
||||||
and 'vely' in variables \
|
|
||||||
and 'velz' in variables:
|
|
||||||
variables.append('ekin')
|
|
||||||
if (eqsys == 'mhd' or eqsys == 'srmhd') \
|
|
||||||
and 'magx' in variables \
|
|
||||||
and 'magy' in variables \
|
|
||||||
and 'magz' in variables:
|
|
||||||
variables.append('emag')
|
|
||||||
if eqsys == 'hd' and 'ekin' in variables and 'eint' in variables:
|
|
||||||
variables.append('etot')
|
|
||||||
if eqsys == 'mhd' and 'eint' in variables \
|
|
||||||
and 'ekin' in variables \
|
|
||||||
and 'emag' in variables:
|
|
||||||
variables.append('etot')
|
|
||||||
if (eqsys == 'srhd' or eqsys == 'srmhd') and 'velo' in variables:
|
|
||||||
variables.append('lore')
|
|
||||||
|
|
||||||
# check if the requested variable is in the variable list
|
|
||||||
#
|
|
||||||
if not vname in variables:
|
|
||||||
print('The requested variable cannot be extracted from the file datasets!')
|
|
||||||
return None
|
|
||||||
|
|
||||||
# check if the shrink parameter is correct (block dimensions should be
|
|
||||||
# divisible by the shrink factor)
|
|
||||||
#
|
|
||||||
shrink = max(1, int(shrink))
|
|
||||||
if shrink > 1:
|
|
||||||
if (nn % shrink) != 0:
|
|
||||||
print('The block dimension should be divisible by the shrink factor!')
|
|
||||||
return None
|
|
||||||
sh = shrink
|
|
||||||
while(sh > 2 and sh % 2 == 0):
|
|
||||||
sh = int(sh / 2)
|
|
||||||
if (sh % 2) != 0:
|
|
||||||
print('The shrink factor should be a power of 2!')
|
|
||||||
return None
|
|
||||||
|
|
||||||
# determine the actual maximum level from the blocks
|
|
||||||
#
|
|
||||||
levs = []
|
|
||||||
for n in range(nc):
|
|
||||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
|
||||||
lname = op.join(dname, fname)
|
|
||||||
dblocks = amun_attribute(lname, 'dblocks')
|
|
||||||
if dblocks > 0:
|
|
||||||
levs = np.append(levs, [amun_coordinate(lname, 'levels')])
|
|
||||||
ml = int(levs.max())
|
|
||||||
|
|
||||||
# prepare dimensions of the output array and allocate it
|
|
||||||
#
|
|
||||||
dm = np.array(rm[0:ndims] * bm[0:ndims] * 2**(ml - 1) / shrink, \
|
|
||||||
dtype=np.int32)
|
|
||||||
ret = np.zeros(dm[::-1])
|
|
||||||
|
|
||||||
# iterate over all subdomain files
|
|
||||||
#
|
|
||||||
nb = 0
|
|
||||||
for n in range(nc):
|
|
||||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
|
||||||
lname = op.join(dname, fname)
|
|
||||||
dblocks = amun_attribute(lname, 'dblocks')
|
|
||||||
if dblocks > 0:
|
|
||||||
levels = amun_coordinate(lname, 'levels')
|
|
||||||
coords = amun_coordinate(lname, 'coords')
|
|
||||||
dx = amun_coordinate(lname, 'dx')
|
|
||||||
dy = amun_coordinate(lname, 'dy')
|
|
||||||
dz = amun_coordinate(lname, 'dz')
|
|
||||||
with h5.File(lname, 'r') as f:
|
|
||||||
g = f['variables']
|
|
||||||
if vname == 'level':
|
|
||||||
dataset = np.zeros(g[variables[0]].shape)
|
|
||||||
for l in range(dblocks):
|
|
||||||
dataset[:,:,:,l] = levels[l]
|
|
||||||
elif vname == 'velo':
|
|
||||||
dataset = np.sqrt(g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'magn':
|
|
||||||
dataset = np.sqrt(g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'eint':
|
|
||||||
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:]
|
|
||||||
elif vname == 'ekin':
|
|
||||||
dataset = 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'emag':
|
|
||||||
dataset = 0.5 * (g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'etot':
|
|
||||||
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:] \
|
|
||||||
+ 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
if eqsys == 'mhd':
|
|
||||||
dataset += 0.5 * (g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'temp':
|
|
||||||
dataset = g['pres'][:,:,:,:] / g['dens'][:,:,:,:]
|
|
||||||
elif vname == 'lore':
|
|
||||||
dataset = 1.0 / np.sqrt(1.0 - (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2))
|
|
||||||
elif vname == 'divv':
|
|
||||||
dataset = np.zeros(g['velx'].shape)
|
|
||||||
fields = [ 'velx', 'vely', 'velz' ]
|
|
||||||
h = (dx, dy, dz)
|
|
||||||
for i in range(ndims):
|
|
||||||
v = fields[i]
|
|
||||||
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ h[i][levels[:] - 1]
|
|
||||||
elif vname == 'divb':
|
|
||||||
dataset = np.zeros(g['magx'].shape)
|
|
||||||
fields = [ 'magx', 'magy', 'magz' ]
|
|
||||||
h = (dx, dy, dz)
|
|
||||||
for i in range(ndims):
|
|
||||||
v = fields[i]
|
|
||||||
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ h[i][levels[:] - 1]
|
|
||||||
elif vname == 'vort':
|
|
||||||
if ndims == 3:
|
|
||||||
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1]
|
|
||||||
wy = 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
else:
|
|
||||||
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
wy = - 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
|
||||||
elif vname == 'curr':
|
|
||||||
if ndims == 3:
|
|
||||||
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1]
|
|
||||||
wy = 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
else:
|
|
||||||
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
wy = - 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
|
||||||
else:
|
|
||||||
dataset = g[vname][:,:,:,:]
|
|
||||||
|
|
||||||
# rescale all blocks to the effective resolution
|
|
||||||
#
|
|
||||||
for l in range(dblocks):
|
|
||||||
nn = 2**(ml - levels[l])
|
|
||||||
if nn <= shrink:
|
|
||||||
method = 'rebin'
|
|
||||||
else:
|
|
||||||
method = interpolation
|
|
||||||
cm = np.array(bm[0:ndims] * nn / shrink, dtype=np.int32)
|
|
||||||
ibeg = coords[0:ndims,l] * cm[0:ndims]
|
|
||||||
iend = ibeg + cm[0:ndims]
|
|
||||||
if ndims == 3:
|
|
||||||
ib, jb, kb = ibeg[0], ibeg[1], ibeg[2]
|
|
||||||
ie, je, ke = iend[0], iend[1], iend[2]
|
|
||||||
ret[kb:ke,jb:je,ib:ie] = interpolate(dataset[:,:,:,l], cm, ng, method=method, order=order)
|
|
||||||
else:
|
|
||||||
ib, jb = ibeg[0], ibeg[1]
|
|
||||||
ie, je = iend[0], iend[1]
|
|
||||||
ret[jb:je,ib:ie] = interpolate(dataset[0,:,:,l], cm, ng, method=method, order=order)
|
|
||||||
|
|
||||||
nb += 1
|
|
||||||
|
|
||||||
# print progress bar if desired
|
|
||||||
#
|
|
||||||
if progress:
|
|
||||||
sys.stdout.write('\r')
|
|
||||||
sys.stdout.write("Reading '%s' from '%s': block %d from %d" \
|
|
||||||
% (vname, fname, nb, nl))
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
if (progress):
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def amun_dataset_vtk(fname, vname, label=None, compression='none', compression_level=19, progress=False):
|
|
||||||
'''
|
|
||||||
Subroutine to convert a dataset specified by argument 'vname' from
|
|
||||||
the AMUN HDF5 snapshot to OverlappedAMR VTK file.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
|
||||||
vname - the variable name;
|
|
||||||
label - the variable label (long name);
|
|
||||||
compression - the compression type: 'lz4', 'zlib', 'lzma'
|
|
||||||
progress - the progress bar switch;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
dn = amun_dataset_vtk('p000010_00000.h5', 'dens')
|
|
||||||
|
|
||||||
'''
|
|
||||||
from .octree import OcBase, OcNode
|
|
||||||
from .vtkio import WriteVTK
|
|
||||||
import os
|
|
||||||
|
|
||||||
if not amun_compatible(fname):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if amun_attribute(fname, 'ndims') < 3:
|
|
||||||
print('Subroutine amun_dataset_vtk() supports only 3D domains.')
|
|
||||||
return None
|
|
||||||
|
|
||||||
if label == None:
|
|
||||||
label = vname
|
|
||||||
|
|
||||||
dname = op.dirname(fname)
|
|
||||||
|
|
||||||
if progress:
|
|
||||||
sys.stdout.write("Data file path:\n '%s'\n" % (dname))
|
|
||||||
|
|
||||||
# get attributes necessary to reconstruct the domain
|
|
||||||
#
|
|
||||||
eqsys = amun_attribute(fname, 'eqsys')
|
|
||||||
eos = amun_attribute(fname, 'eos')
|
|
||||||
nr = amun_attribute(fname, 'isnap')
|
|
||||||
nc = amun_attribute(fname, 'nprocs')
|
|
||||||
nl = amun_attribute(fname, 'nleafs')
|
|
||||||
if eos == 'adi':
|
|
||||||
gm = amun_attribute(fname, 'adiabatic_index')
|
|
||||||
|
|
||||||
# get block dimensions and the maximum level
|
|
||||||
#
|
|
||||||
ndims = amun_attribute(fname, 'ndims')
|
|
||||||
nn = amun_attribute(fname, 'ncells')
|
|
||||||
bm = np.array([nn, nn, nn])
|
|
||||||
ng = amun_attribute(fname, 'nghosts')
|
|
||||||
ml = amun_attribute(fname, 'maxlev')
|
|
||||||
|
|
||||||
# get the base block dimensions
|
|
||||||
#
|
|
||||||
rm = amun_attribute(fname, 'bdims')
|
|
||||||
if rm is None:
|
|
||||||
rm = amun_attribute(fname, 'domain_base_dims')
|
|
||||||
if rm is None:
|
|
||||||
rm = amun_attribute(fname, 'rdims')
|
|
||||||
if rm is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# get domain bounds
|
|
||||||
#
|
|
||||||
xmin = amun_attribute(fname, 'xmin')
|
|
||||||
ymin = amun_attribute(fname, 'ymin')
|
|
||||||
zmin = amun_attribute(fname, 'zmin')
|
|
||||||
xlen = amun_attribute(fname, 'xmax') - xmin
|
|
||||||
ylen = amun_attribute(fname, 'ymax') - ymin
|
|
||||||
zlen = amun_attribute(fname, 'zmax') - zmin
|
|
||||||
|
|
||||||
# build the list of supported variables
|
|
||||||
#
|
|
||||||
variables = []
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
for var in f['variables'].keys():
|
|
||||||
variables.append(var)
|
|
||||||
|
|
||||||
# add derived variables if possible
|
|
||||||
#
|
|
||||||
variables.append('level')
|
|
||||||
if 'velx' in variables and 'vely' in variables and 'velz' in variables:
|
|
||||||
variables.append('velo')
|
|
||||||
variables.append('divv')
|
|
||||||
variables.append('vort')
|
|
||||||
if 'magx' in variables and 'magy' in variables and 'magz' in variables:
|
|
||||||
variables.append('magn')
|
|
||||||
variables.append('divb')
|
|
||||||
variables.append('curr')
|
|
||||||
if (eqsys == 'hd' or eqsys == 'mhd') and eos == 'adi' \
|
|
||||||
and 'pres' in variables:
|
|
||||||
variables.append('eint')
|
|
||||||
if 'dens' in variables and 'pres' in variables:
|
|
||||||
variables.append('temp')
|
|
||||||
if (eqsys == 'hd' or eqsys == 'mhd') \
|
|
||||||
and 'dens' in variables \
|
|
||||||
and 'velx' in variables \
|
|
||||||
and 'vely' in variables \
|
|
||||||
and 'velz' in variables:
|
|
||||||
variables.append('ekin')
|
|
||||||
if (eqsys == 'mhd' or eqsys == 'srmhd') \
|
|
||||||
and 'magx' in variables \
|
|
||||||
and 'magy' in variables \
|
|
||||||
and 'magz' in variables:
|
|
||||||
variables.append('emag')
|
|
||||||
if eqsys == 'hd' and 'ekin' in variables and 'eint' in variables:
|
|
||||||
variables.append('etot')
|
|
||||||
if eqsys == 'mhd' and 'eint' in variables \
|
|
||||||
and 'ekin' in variables \
|
|
||||||
and 'emag' in variables:
|
|
||||||
variables.append('etot')
|
|
||||||
if (eqsys == 'srhd' or eqsys == 'srmhd') and 'velo' in variables:
|
|
||||||
variables.append('lore')
|
|
||||||
|
|
||||||
# check if the requested variable is in the variable list
|
|
||||||
#
|
|
||||||
if not vname in variables:
|
|
||||||
print('The requested variable cannot be extracted from the file datasets!')
|
|
||||||
return None
|
|
||||||
|
|
||||||
# determine the actual maximum level from the blocks
|
|
||||||
#
|
|
||||||
levs = []
|
|
||||||
for n in range(nc):
|
|
||||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
|
||||||
lname = op.join(dname, fname)
|
|
||||||
dblocks = amun_attribute(lname, 'dblocks')
|
|
||||||
if dblocks > 0:
|
|
||||||
levs = np.append(levs, [amun_coordinate(lname, 'levels')])
|
|
||||||
ml = int(levs.max())
|
|
||||||
|
|
||||||
# create octree base
|
|
||||||
base = OcBase([xmin, ymin, zmin], [xlen, ylen, zlen], rm)
|
|
||||||
|
|
||||||
# iterate over all subdomain files
|
|
||||||
#
|
|
||||||
nb = 0
|
|
||||||
for n in range(nc):
|
|
||||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
|
||||||
lname = op.join(dname, fname)
|
|
||||||
dblocks = amun_attribute(lname, 'dblocks')
|
|
||||||
if dblocks > 0:
|
|
||||||
levels = amun_coordinate(lname, 'levels')
|
|
||||||
coords = amun_coordinate(lname, 'coords')
|
|
||||||
bounds = amun_coordinate(lname, 'bounds')
|
|
||||||
dx = amun_coordinate(lname, 'dx')
|
|
||||||
dy = amun_coordinate(lname, 'dy')
|
|
||||||
dz = amun_coordinate(lname, 'dz')
|
|
||||||
with h5.File(lname, 'r') as f:
|
|
||||||
g = f['variables']
|
|
||||||
if vname == 'level':
|
|
||||||
dataset = np.zeros(g[variables[0]].shape)
|
|
||||||
for l in range(dblocks):
|
|
||||||
dataset[:,:,:,l] = levels[l]
|
|
||||||
elif vname == 'velo':
|
|
||||||
dataset = np.sqrt(g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'magn':
|
|
||||||
dataset = np.sqrt(g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'eint':
|
|
||||||
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:]
|
|
||||||
elif vname == 'ekin':
|
|
||||||
dataset = 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'emag':
|
|
||||||
dataset = 0.5 * (g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'etot':
|
|
||||||
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:] \
|
|
||||||
+ 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
if eqsys == 'mhd':
|
|
||||||
dataset += 0.5 * (g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'temp':
|
|
||||||
dataset = g['pres'][:,:,:,:] / g['dens'][:,:,:,:]
|
|
||||||
elif vname == 'lore':
|
|
||||||
dataset = 1.0 / np.sqrt(1.0 - (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2))
|
|
||||||
elif vname == 'divv':
|
|
||||||
dataset = np.zeros(g['velx'].shape)
|
|
||||||
fields = [ 'velx', 'vely', 'velz' ]
|
|
||||||
h = (dx, dy, dz)
|
|
||||||
for i in range(ndims):
|
|
||||||
v = fields[i]
|
|
||||||
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ h[i][levels[:] - 1]
|
|
||||||
elif vname == 'divb':
|
|
||||||
dataset = np.zeros(g['magx'].shape)
|
|
||||||
fields = [ 'magx', 'magy', 'magz' ]
|
|
||||||
h = (dx, dy, dz)
|
|
||||||
for i in range(ndims):
|
|
||||||
v = fields[i]
|
|
||||||
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ h[i][levels[:] - 1]
|
|
||||||
elif vname == 'vort':
|
|
||||||
if ndims == 3:
|
|
||||||
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1]
|
|
||||||
wy = 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
else:
|
|
||||||
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
wy = - 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
|
||||||
elif vname == 'curr':
|
|
||||||
if ndims == 3:
|
|
||||||
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1]
|
|
||||||
wy = 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
else:
|
|
||||||
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
wy = - 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
|
||||||
else:
|
else:
|
||||||
dataset = g[vname][:,:,:,:]
|
raise Exception("'{}' contains attribute 'code', but its content is not 'AMUN'!".format(self.path))
|
||||||
|
elif 'attributes' in h5 and 'coordinates' in h5 and 'variables' in h5:
|
||||||
|
self.dataformat = 'AmunH5'
|
||||||
|
else:
|
||||||
|
raise Exception("{} misses one of these groups: 'attributes', 'coordinates' or 'variables'!".format(self.path))
|
||||||
|
|
||||||
# rescale all blocks to the effective resolution
|
|
||||||
#
|
|
||||||
for l in range(dblocks):
|
|
||||||
|
|
||||||
lv = levels[l] - 1
|
def __fill_attributes__(self):
|
||||||
|
"""
|
||||||
|
Reads attributes from the snapshot file and adds them to
|
||||||
|
the attributes' dictionary.
|
||||||
|
"""
|
||||||
|
import h5py
|
||||||
|
|
||||||
center = (bounds[0,:,l] + bounds[1,:,l]) / 2
|
exclude_list = ['nseeds', 'seeds', 'dblocks', 'nproc', 'dims', 'dtn', 'last_id', 'mblocks']
|
||||||
base.createNodeBranch(center, lv)
|
|
||||||
base.setNodeData(center, lv, dataset[ng:-ng,ng:-ng,ng:-ng,l])
|
|
||||||
|
|
||||||
nb += 1
|
with h5py.File(self.path, 'r') as h5:
|
||||||
|
for aname in h5['attributes'].attrs:
|
||||||
|
if not aname in exclude_list:
|
||||||
|
attr = h5['attributes'].attrs[aname]
|
||||||
|
if attr.dtype == 'float64' or attr.dtype == 'float32' or \
|
||||||
|
attr.dtype == 'int64' or attr.dtype == 'int32':
|
||||||
|
if len(attr) > 1:
|
||||||
|
self.attributes[aname] = attr.tolist()
|
||||||
|
else:
|
||||||
|
self.attributes[aname] = attr[0]
|
||||||
|
else:
|
||||||
|
self.attributes[aname] = attr[0].astype(str)
|
||||||
|
|
||||||
# print progress bar if desired
|
if not 'nchunks' in self.attributes and 'nprocs' in self.attributes:
|
||||||
#
|
self.attributes['nchunks'] = self.attributes['nprocs']
|
||||||
if progress:
|
del self.attributes['nprocs']
|
||||||
sys.stdout.write('\r')
|
if 'rdims' in self.attributes:
|
||||||
sys.stdout.write("Reading '%s' from '%s': block %d from %d" \
|
self.attributes['xblocks'] = self.attributes['rdims'][0]
|
||||||
% (vname, fname, nb, nl))
|
self.attributes['yblocks'] = self.attributes['rdims'][1]
|
||||||
sys.stdout.flush()
|
if self.attributes['ndims'] == 3:
|
||||||
|
self.attributes['zblocks'] = self.attributes['rdims'][2]
|
||||||
|
del self.attributes['rdims']
|
||||||
|
|
||||||
if (progress):
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
if progress:
|
def __fill_variables__(self):
|
||||||
sys.stdout.write("Populating AMR structure\n")
|
"""
|
||||||
base.populateNodeData()
|
Reads the names of datasets stored in the snapshot and adds them
|
||||||
|
to the variables' dictionary.
|
||||||
|
"""
|
||||||
|
import h5py
|
||||||
|
|
||||||
if progress:
|
with h5py.File(self.path, 'r') as h5:
|
||||||
sys.stdout.write("Generating OverlappingAMR VTK files\n")
|
for variable in h5['variables']:
|
||||||
|
v = variable.strip()
|
||||||
|
self.variables[v] = v
|
||||||
|
|
||||||
ofile = "{}_{:06d}.vthb".format(vname, nr)
|
|
||||||
opath = "{}_{:06d}".format(vname, nr)
|
|
||||||
if not op.exists(opath):
|
|
||||||
os.makedirs(opath)
|
|
||||||
with open(ofile, 'w') as vtk:
|
|
||||||
vtk.write('<VTKFile type="vtkOverlappingAMR" version="1.1" ' + \
|
|
||||||
'byte_order="LittleEndian" header_type="UInt64">\n')
|
|
||||||
vtk.write(' <vtkOverlappingAMR ' + \
|
|
||||||
'origin="{} {} {}" '.format(*base.lower) + \
|
|
||||||
'grid_description="XYZ">\n')
|
|
||||||
|
|
||||||
fmt = '{}_{:0' + str(len(str(ml))) + '}_{:0' + str(len(str(base.nodes))) + 'd}.vti'
|
def __fill_chunks__(self):
|
||||||
|
"""
|
||||||
|
Retrieves metadata about datablocks stored in the snapshot's chunks
|
||||||
|
and adds them to the chunks' dictionary.
|
||||||
|
"""
|
||||||
|
import h5py, numpy, os
|
||||||
|
|
||||||
m = 0
|
self.chunkname = 'p{:06d}'.format(self.attributes['isnap']) + '_{:05d}.h5'
|
||||||
for lv in range(ml):
|
for n in range(self.attributes['nchunks']):
|
||||||
|
self.chunks[n] = dict()
|
||||||
|
self.chunks[n]['filename'] = self.chunkname.format(n)
|
||||||
|
cname = os.path.join(self.dirname, self.chunks[n]['filename'])
|
||||||
|
if os.path.exists(cname):
|
||||||
|
with h5py.File(cname, 'r') as h5:
|
||||||
|
self.chunks[n]['dblocks'] = h5['attributes'].attrs['dblocks'][0]
|
||||||
|
|
||||||
cw = base.size / (rm * nn * 2**lv)
|
self.chunks[n]['levels'] = numpy.array(h5['coordinates']['levels'])
|
||||||
vtk.write(' <Block level="{}"'.format(lv) + \
|
self.chunks[n]['bounds'] = numpy.array(h5['coordinates']['bounds'])
|
||||||
' spacing="{} {} {}">\n'.format(*cw))
|
self.chunks[n]['coords'] = numpy.array(h5['coordinates']['coords'])
|
||||||
|
else:
|
||||||
|
raise Exception("Snapshot's chunk '{}' not present!".format(cname))
|
||||||
|
|
||||||
no = 0
|
|
||||||
for item in base.getNodesFromLevel(lv):
|
|
||||||
lo = np.array(item.index) * bm
|
|
||||||
up = lo + bm - 1
|
|
||||||
ll = np.stack((lo,up)).T.flatten()
|
|
||||||
if item.hasData:
|
|
||||||
vfile = op.join(opath, fmt.format(vname, lv, no))
|
|
||||||
WriteVTK(vfile, label, item.data, \
|
|
||||||
origin = (item.lower[0], item.lower[1], item.lower[2]), \
|
|
||||||
spacing = (cw[0], cw[1], cw[2]), \
|
|
||||||
compression=compression, compression_level=compression_level)
|
|
||||||
vtk.write(' <DataSet index="{}"'.format(no) + \
|
|
||||||
' amr_box = "{} {} {} {} {} {}"'.format(*ll) + \
|
|
||||||
' file = "{}"></DataSet>\n'.format(vfile))
|
|
||||||
no += 1
|
|
||||||
else:
|
|
||||||
vtk.write(' <DataSet index="{}"'.format(no) + \
|
|
||||||
' amr_box = "{} {} {} {} {} {}"'.format(*ll) + \
|
|
||||||
'></DataSet>\n')
|
|
||||||
m += 1
|
|
||||||
|
|
||||||
if progress:
|
def __read_binary_data__(self, dataset_name, chunk_number):
|
||||||
sys.stdout.write('\r')
|
"""
|
||||||
sys.stdout.write("Storing AMR block {} from {}".format(m, base.nodes))
|
Gets the dataset array from a given snapshot's chunk.
|
||||||
sys.stdout.flush()
|
"""
|
||||||
|
import h5py, numpy, os
|
||||||
|
|
||||||
vtk.write(' </Block>\n')
|
cname = os.path.join(self.dirname, self.chunks[chunk_number]['filename'])
|
||||||
|
with h5py.File(cname, 'r') as h5:
|
||||||
vtk.write(' </vtkOverlappingAMR>\n')
|
return numpy.array(h5['variables'][dataset_name])
|
||||||
vtk.write('</VTKFile>')
|
|
||||||
|
|
||||||
if (progress):
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
848
python/amunpy/src/amunpy/amunh5_deprecated.py
Normal file
848
python/amunpy/src/amunpy/amunh5_deprecated.py
Normal file
@ -0,0 +1,848 @@
|
|||||||
|
"""
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
This file is part of the AMUN source code, a program to perform
|
||||||
|
Newtonian or relativistic magnetohydrodynamical simulations on uniform or
|
||||||
|
adaptive mesh.
|
||||||
|
|
||||||
|
Copyright (C) 2018-2021 Grzegorz Kowal <grzegorz@amuncode.org>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
module: AMUN
|
||||||
|
|
||||||
|
Python module with subroutines to read AMUN code HDF5 files.
|
||||||
|
|
||||||
|
The only requirements for this package are:
|
||||||
|
|
||||||
|
- h5py
|
||||||
|
- numpy
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
"""
|
||||||
|
#===============================================================================
|
||||||
|
'''
|
||||||
|
DEPRECATED FUNCTIONS
|
||||||
|
'''
|
||||||
|
|
||||||
|
def amun_compatible(fname):
|
||||||
|
'''
|
||||||
|
Subroutine checks if the HDF5 file is AMUN compatible.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
|
||||||
|
Return values:
|
||||||
|
|
||||||
|
True or False;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
comp = amun_compatible('p000010_00000.h5')
|
||||||
|
|
||||||
|
'''
|
||||||
|
from warnings import warn
|
||||||
|
import h5py as h5
|
||||||
|
|
||||||
|
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
if 'codes' in f.attrs:
|
||||||
|
if f.attrs['code'].astype(str) == "AMUN":
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print("'%s' contains attribute 'code'," % fname, \
|
||||||
|
" but it is not 'AMUN'!")
|
||||||
|
return False
|
||||||
|
elif 'attributes' in f and 'coordinates' in f and \
|
||||||
|
'variables' in f:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print("'%s' misses one of these groups:" % fname, \
|
||||||
|
"'attributes', 'coordinates' or 'variables'!")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def amun_attribute(fname, aname):
|
||||||
|
'''
|
||||||
|
Subroutine to read global attributes from AMUN HDF5 snapshots.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
aname - the attribute name;
|
||||||
|
|
||||||
|
Return values:
|
||||||
|
|
||||||
|
ret - the value of the attribute or None;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
time = amun_attribute('p000010_00000.h5', 'time')
|
||||||
|
|
||||||
|
'''
|
||||||
|
from warnings import warn
|
||||||
|
import h5py as h5
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
if not amun_compatible(fname):
|
||||||
|
return None
|
||||||
|
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
if aname in f['attributes'].attrs:
|
||||||
|
attr = f['attributes'].attrs[aname]
|
||||||
|
if attr.dtype.type is np.string_:
|
||||||
|
ret = np.squeeze(attr).astype(str)
|
||||||
|
else:
|
||||||
|
ret = np.squeeze(attr)
|
||||||
|
return ret
|
||||||
|
else:
|
||||||
|
print("Attribute '%s' cannot be found in '%s'!" % (aname, fname))
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def amun_coordinate(fname, iname):
|
||||||
|
'''
|
||||||
|
Subroutine to read coordinate items from AMUN HDF5 snapshots.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
iname - the item name;
|
||||||
|
|
||||||
|
Return values:
|
||||||
|
|
||||||
|
ret - the value of the item or None;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
bounds = amun_coordinate('p000010_00000.h5', 'bounds')
|
||||||
|
|
||||||
|
'''
|
||||||
|
from warnings import warn
|
||||||
|
import h5py as h5
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
if not amun_compatible(fname):
|
||||||
|
return None
|
||||||
|
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
if iname in f['coordinates']:
|
||||||
|
return np.array(f['coordinates'][iname])
|
||||||
|
else:
|
||||||
|
print("Coordinate item '%s' not found in group 'coordinate' of '%s'!" % (iname, fname))
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def amun_dataset(fname, vname, shrink=1, interpolation='rebin', order=3, progress=False):
|
||||||
|
'''
|
||||||
|
Subroutine to read datasets from AMUN HDF5 snapshots.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
vname - the variable name;
|
||||||
|
shrink - the shrink factor (must be the power of 2 and not larger
|
||||||
|
than the block size);
|
||||||
|
progress - the progress bar switch;
|
||||||
|
|
||||||
|
Return values:
|
||||||
|
|
||||||
|
ret - the array of values for the variable;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
dn = amun_dataset('p000010_00000.h5', 'dens')
|
||||||
|
|
||||||
|
'''
|
||||||
|
from .interpolation import interpolate
|
||||||
|
from warnings import warn
|
||||||
|
import h5py as h5
|
||||||
|
import numpy as np
|
||||||
|
import os, sys
|
||||||
|
|
||||||
|
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
if not amun_compatible(fname):
|
||||||
|
return None
|
||||||
|
|
||||||
|
dname = os.path.dirname(fname)
|
||||||
|
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write("Data file path:\n '%s'\n" % (dname))
|
||||||
|
|
||||||
|
# get attributes necessary to reconstruct the domain
|
||||||
|
#
|
||||||
|
eqsys = amun_attribute(fname, 'eqsys')
|
||||||
|
eos = amun_attribute(fname, 'eos')
|
||||||
|
nr = amun_attribute(fname, 'isnap')
|
||||||
|
nc = amun_attribute(fname, 'nprocs')
|
||||||
|
nl = amun_attribute(fname, 'nleafs')
|
||||||
|
if eos == 'adi':
|
||||||
|
gm = amun_attribute(fname, 'adiabatic_index')
|
||||||
|
|
||||||
|
# get block dimensions and the maximum level
|
||||||
|
#
|
||||||
|
ndims = amun_attribute(fname, 'ndims')
|
||||||
|
nn = amun_attribute(fname, 'ncells')
|
||||||
|
bm = np.array([nn, nn, nn])
|
||||||
|
if ndims == 2:
|
||||||
|
bm[2] = 1
|
||||||
|
ng = amun_attribute(fname, 'nghosts')
|
||||||
|
ml = amun_attribute(fname, 'maxlev')
|
||||||
|
|
||||||
|
# get the base block dimensions
|
||||||
|
#
|
||||||
|
rm = amun_attribute(fname, 'bdims')
|
||||||
|
if rm is None:
|
||||||
|
rm = amun_attribute(fname, 'domain_base_dims')
|
||||||
|
if rm is None:
|
||||||
|
rm = amun_attribute(fname, 'rdims')
|
||||||
|
if rm is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# build the list of supported variables
|
||||||
|
#
|
||||||
|
variables = []
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
for var in f['variables'].keys():
|
||||||
|
variables.append(var)
|
||||||
|
|
||||||
|
# add derived variables if possible
|
||||||
|
#
|
||||||
|
variables.append('level')
|
||||||
|
if 'velx' in variables and 'vely' in variables and 'velz' in variables:
|
||||||
|
variables.append('velo')
|
||||||
|
variables.append('divv')
|
||||||
|
variables.append('vort')
|
||||||
|
if 'magx' in variables and 'magy' in variables and 'magz' in variables:
|
||||||
|
variables.append('magn')
|
||||||
|
variables.append('divb')
|
||||||
|
variables.append('curr')
|
||||||
|
if (eqsys == 'hd' or eqsys == 'mhd') and eos == 'adi' \
|
||||||
|
and 'pres' in variables:
|
||||||
|
variables.append('eint')
|
||||||
|
if 'dens' in variables and 'pres' in variables:
|
||||||
|
variables.append('temp')
|
||||||
|
if (eqsys == 'hd' or eqsys == 'mhd') \
|
||||||
|
and 'dens' in variables \
|
||||||
|
and 'velx' in variables \
|
||||||
|
and 'vely' in variables \
|
||||||
|
and 'velz' in variables:
|
||||||
|
variables.append('ekin')
|
||||||
|
if (eqsys == 'mhd' or eqsys == 'srmhd') \
|
||||||
|
and 'magx' in variables \
|
||||||
|
and 'magy' in variables \
|
||||||
|
and 'magz' in variables:
|
||||||
|
variables.append('emag')
|
||||||
|
if eqsys == 'hd' and 'ekin' in variables and 'eint' in variables:
|
||||||
|
variables.append('etot')
|
||||||
|
if eqsys == 'mhd' and 'eint' in variables \
|
||||||
|
and 'ekin' in variables \
|
||||||
|
and 'emag' in variables:
|
||||||
|
variables.append('etot')
|
||||||
|
if (eqsys == 'srhd' or eqsys == 'srmhd') and 'velo' in variables:
|
||||||
|
variables.append('lore')
|
||||||
|
|
||||||
|
# check if the requested variable is in the variable list
|
||||||
|
#
|
||||||
|
if not vname in variables:
|
||||||
|
print('The requested variable cannot be extracted from the file datasets!')
|
||||||
|
return None
|
||||||
|
|
||||||
|
# check if the shrink parameter is correct (block dimensions should be
|
||||||
|
# divisible by the shrink factor)
|
||||||
|
#
|
||||||
|
shrink = max(1, int(shrink))
|
||||||
|
if shrink > 1:
|
||||||
|
if (nn % shrink) != 0:
|
||||||
|
print('The block dimension should be divisible by the shrink factor!')
|
||||||
|
return None
|
||||||
|
sh = shrink
|
||||||
|
while(sh > 2 and sh % 2 == 0):
|
||||||
|
sh = int(sh / 2)
|
||||||
|
if (sh % 2) != 0:
|
||||||
|
print('The shrink factor should be a power of 2!')
|
||||||
|
return None
|
||||||
|
|
||||||
|
# determine the actual maximum level from the blocks
|
||||||
|
#
|
||||||
|
levs = []
|
||||||
|
for n in range(nc):
|
||||||
|
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||||
|
lname = os.path.join(dname, fname)
|
||||||
|
dblocks = amun_attribute(lname, 'dblocks')
|
||||||
|
if dblocks > 0:
|
||||||
|
levs = np.append(levs, [amun_coordinate(lname, 'levels')])
|
||||||
|
ml = int(levs.max())
|
||||||
|
|
||||||
|
# prepare dimensions of the output array and allocate it
|
||||||
|
#
|
||||||
|
dm = np.array(rm[0:ndims] * bm[0:ndims] * 2**(ml - 1) / shrink, \
|
||||||
|
dtype=np.int32)
|
||||||
|
ret = np.zeros(dm[::-1])
|
||||||
|
|
||||||
|
# iterate over all subdomain files
|
||||||
|
#
|
||||||
|
nb = 0
|
||||||
|
for n in range(nc):
|
||||||
|
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||||
|
lname = os.path.join(dname, fname)
|
||||||
|
dblocks = amun_attribute(lname, 'dblocks')
|
||||||
|
if dblocks > 0:
|
||||||
|
levels = amun_coordinate(lname, 'levels')
|
||||||
|
coords = amun_coordinate(lname, 'coords')
|
||||||
|
dx = amun_coordinate(lname, 'dx')
|
||||||
|
dy = amun_coordinate(lname, 'dy')
|
||||||
|
dz = amun_coordinate(lname, 'dz')
|
||||||
|
with h5.File(lname, 'r') as f:
|
||||||
|
g = f['variables']
|
||||||
|
if vname == 'level':
|
||||||
|
dataset = np.zeros(g[variables[0]].shape)
|
||||||
|
for l in range(dblocks):
|
||||||
|
dataset[:,:,:,l] = levels[l]
|
||||||
|
elif vname == 'velo':
|
||||||
|
dataset = np.sqrt(g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'magn':
|
||||||
|
dataset = np.sqrt(g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'eint':
|
||||||
|
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:]
|
||||||
|
elif vname == 'ekin':
|
||||||
|
dataset = 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'emag':
|
||||||
|
dataset = 0.5 * (g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'etot':
|
||||||
|
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:] \
|
||||||
|
+ 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
if eqsys == 'mhd':
|
||||||
|
dataset += 0.5 * (g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'temp':
|
||||||
|
dataset = g['pres'][:,:,:,:] / g['dens'][:,:,:,:]
|
||||||
|
elif vname == 'lore':
|
||||||
|
dataset = 1.0 / np.sqrt(1.0 - (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2))
|
||||||
|
elif vname == 'divv':
|
||||||
|
dataset = np.zeros(g['velx'].shape)
|
||||||
|
fields = [ 'velx', 'vely', 'velz' ]
|
||||||
|
h = (dx, dy, dz)
|
||||||
|
for i in range(ndims):
|
||||||
|
v = fields[i]
|
||||||
|
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ h[i][levels[:] - 1]
|
||||||
|
elif vname == 'divb':
|
||||||
|
dataset = np.zeros(g['magx'].shape)
|
||||||
|
fields = [ 'magx', 'magy', 'magz' ]
|
||||||
|
h = (dx, dy, dz)
|
||||||
|
for i in range(ndims):
|
||||||
|
v = fields[i]
|
||||||
|
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ h[i][levels[:] - 1]
|
||||||
|
elif vname == 'vort':
|
||||||
|
if ndims == 3:
|
||||||
|
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1]
|
||||||
|
wy = 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
else:
|
||||||
|
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
wy = - 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
||||||
|
elif vname == 'curr':
|
||||||
|
if ndims == 3:
|
||||||
|
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1]
|
||||||
|
wy = 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
else:
|
||||||
|
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
wy = - 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
||||||
|
else:
|
||||||
|
dataset = g[vname][:,:,:,:]
|
||||||
|
|
||||||
|
# rescale all blocks to the effective resolution
|
||||||
|
#
|
||||||
|
for l in range(dblocks):
|
||||||
|
nn = 2**(ml - levels[l])
|
||||||
|
if nn <= shrink:
|
||||||
|
method = 'rebin'
|
||||||
|
else:
|
||||||
|
method = interpolation
|
||||||
|
cm = np.array(bm[0:ndims] * nn / shrink, dtype=np.int32)
|
||||||
|
ibeg = coords[0:ndims,l] * cm[0:ndims]
|
||||||
|
iend = ibeg + cm[0:ndims]
|
||||||
|
if ndims == 3:
|
||||||
|
ib, jb, kb = ibeg[0], ibeg[1], ibeg[2]
|
||||||
|
ie, je, ke = iend[0], iend[1], iend[2]
|
||||||
|
ret[kb:ke,jb:je,ib:ie] = interpolate(dataset[:,:,:,l], cm, ng, method=method, order=order)
|
||||||
|
else:
|
||||||
|
ib, jb = ibeg[0], ibeg[1]
|
||||||
|
ie, je = iend[0], iend[1]
|
||||||
|
ret[jb:je,ib:ie] = interpolate(dataset[0,:,:,l], cm, ng, method=method, order=order)
|
||||||
|
|
||||||
|
nb += 1
|
||||||
|
|
||||||
|
# print progress bar if desired
|
||||||
|
#
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write('\r')
|
||||||
|
sys.stdout.write("Reading '%s' from '%s': block %d from %d" \
|
||||||
|
% (vname, fname, nb, nl))
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
if (progress):
|
||||||
|
sys.stdout.write('\n')
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def amun_dataset_vtk(fname, vname, label=None, compression=None, compression_level=19, progress=False):
|
||||||
|
'''
|
||||||
|
Subroutine to convert a dataset specified by argument 'vname' from
|
||||||
|
the AMUN HDF5 snapshot to OverlappedAMR VTK file.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
vname - the variable name;
|
||||||
|
label - the variable label (long name);
|
||||||
|
compression - the compression type: 'lz4', 'zlib', 'lzma'
|
||||||
|
progress - the progress bar switch;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
dn = amun_dataset_vtk('p000010_00000.h5', 'dens')
|
||||||
|
|
||||||
|
'''
|
||||||
|
from .octree import OcBase, OcNode
|
||||||
|
from .vtkio import WriteVTK
|
||||||
|
from warnings import warn
|
||||||
|
import numpy as np
|
||||||
|
import os, sys
|
||||||
|
|
||||||
|
warn('This function is deprecated', DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
if not amun_compatible(fname):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if amun_attribute(fname, 'ndims') < 3:
|
||||||
|
print('Subroutine amun_dataset_vtk() supports only 3D domains.')
|
||||||
|
return None
|
||||||
|
|
||||||
|
if label == None:
|
||||||
|
label = vname
|
||||||
|
|
||||||
|
dname = os.path.dirname(fname)
|
||||||
|
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write("Data file path:\n '%s'\n" % (dname))
|
||||||
|
|
||||||
|
# get attributes necessary to reconstruct the domain
|
||||||
|
#
|
||||||
|
eqsys = amun_attribute(fname, 'eqsys')
|
||||||
|
eos = amun_attribute(fname, 'eos')
|
||||||
|
nr = amun_attribute(fname, 'isnap')
|
||||||
|
nc = amun_attribute(fname, 'nprocs')
|
||||||
|
nl = amun_attribute(fname, 'nleafs')
|
||||||
|
if eos == 'adi':
|
||||||
|
gm = amun_attribute(fname, 'adiabatic_index')
|
||||||
|
|
||||||
|
# get block dimensions and the maximum level
|
||||||
|
#
|
||||||
|
ndims = amun_attribute(fname, 'ndims')
|
||||||
|
nn = amun_attribute(fname, 'ncells')
|
||||||
|
bm = np.array([nn, nn, nn])
|
||||||
|
ng = amun_attribute(fname, 'nghosts')
|
||||||
|
ml = amun_attribute(fname, 'maxlev')
|
||||||
|
|
||||||
|
# get the base block dimensions
|
||||||
|
#
|
||||||
|
rm = amun_attribute(fname, 'bdims')
|
||||||
|
if rm is None:
|
||||||
|
rm = amun_attribute(fname, 'domain_base_dims')
|
||||||
|
if rm is None:
|
||||||
|
rm = amun_attribute(fname, 'rdims')
|
||||||
|
if rm is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# get domain bounds
|
||||||
|
#
|
||||||
|
xmin = amun_attribute(fname, 'xmin')
|
||||||
|
ymin = amun_attribute(fname, 'ymin')
|
||||||
|
zmin = amun_attribute(fname, 'zmin')
|
||||||
|
xlen = amun_attribute(fname, 'xmax') - xmin
|
||||||
|
ylen = amun_attribute(fname, 'ymax') - ymin
|
||||||
|
zlen = amun_attribute(fname, 'zmax') - zmin
|
||||||
|
|
||||||
|
# build the list of supported variables
|
||||||
|
#
|
||||||
|
variables = []
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
for var in f['variables'].keys():
|
||||||
|
variables.append(var)
|
||||||
|
|
||||||
|
# add derived variables if possible
|
||||||
|
#
|
||||||
|
variables.append('level')
|
||||||
|
if 'velx' in variables and 'vely' in variables and 'velz' in variables:
|
||||||
|
variables.append('velo')
|
||||||
|
variables.append('divv')
|
||||||
|
variables.append('vort')
|
||||||
|
if 'magx' in variables and 'magy' in variables and 'magz' in variables:
|
||||||
|
variables.append('magn')
|
||||||
|
variables.append('divb')
|
||||||
|
variables.append('curr')
|
||||||
|
if (eqsys == 'hd' or eqsys == 'mhd') and eos == 'adi' \
|
||||||
|
and 'pres' in variables:
|
||||||
|
variables.append('eint')
|
||||||
|
if 'dens' in variables and 'pres' in variables:
|
||||||
|
variables.append('temp')
|
||||||
|
if (eqsys == 'hd' or eqsys == 'mhd') \
|
||||||
|
and 'dens' in variables \
|
||||||
|
and 'velx' in variables \
|
||||||
|
and 'vely' in variables \
|
||||||
|
and 'velz' in variables:
|
||||||
|
variables.append('ekin')
|
||||||
|
if (eqsys == 'mhd' or eqsys == 'srmhd') \
|
||||||
|
and 'magx' in variables \
|
||||||
|
and 'magy' in variables \
|
||||||
|
and 'magz' in variables:
|
||||||
|
variables.append('emag')
|
||||||
|
if eqsys == 'hd' and 'ekin' in variables and 'eint' in variables:
|
||||||
|
variables.append('etot')
|
||||||
|
if eqsys == 'mhd' and 'eint' in variables \
|
||||||
|
and 'ekin' in variables \
|
||||||
|
and 'emag' in variables:
|
||||||
|
variables.append('etot')
|
||||||
|
if (eqsys == 'srhd' or eqsys == 'srmhd') and 'velo' in variables:
|
||||||
|
variables.append('lore')
|
||||||
|
|
||||||
|
# check if the requested variable is in the variable list
|
||||||
|
#
|
||||||
|
if not vname in variables:
|
||||||
|
print('The requested variable cannot be extracted from the file datasets!')
|
||||||
|
return None
|
||||||
|
|
||||||
|
# determine the actual maximum level from the blocks
|
||||||
|
#
|
||||||
|
levs = []
|
||||||
|
for n in range(nc):
|
||||||
|
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||||
|
lname = os.path.join(dname, fname)
|
||||||
|
dblocks = amun_attribute(lname, 'dblocks')
|
||||||
|
if dblocks > 0:
|
||||||
|
levs = np.append(levs, [amun_coordinate(lname, 'levels')])
|
||||||
|
ml = int(levs.max())
|
||||||
|
|
||||||
|
# create octree base
|
||||||
|
base = OcBase([xmin, ymin, zmin], [xlen, ylen, zlen], rm)
|
||||||
|
|
||||||
|
# iterate over all subdomain files
|
||||||
|
#
|
||||||
|
nb = 0
|
||||||
|
for n in range(nc):
|
||||||
|
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||||
|
lname = os.path.join(dname, fname)
|
||||||
|
dblocks = amun_attribute(lname, 'dblocks')
|
||||||
|
if dblocks > 0:
|
||||||
|
levels = amun_coordinate(lname, 'levels')
|
||||||
|
coords = amun_coordinate(lname, 'coords')
|
||||||
|
bounds = amun_coordinate(lname, 'bounds')
|
||||||
|
dx = amun_coordinate(lname, 'dx')
|
||||||
|
dy = amun_coordinate(lname, 'dy')
|
||||||
|
dz = amun_coordinate(lname, 'dz')
|
||||||
|
with h5.File(lname, 'r') as f:
|
||||||
|
g = f['variables']
|
||||||
|
if vname == 'level':
|
||||||
|
dataset = np.zeros(g[variables[0]].shape)
|
||||||
|
for l in range(dblocks):
|
||||||
|
dataset[:,:,:,l] = levels[l]
|
||||||
|
elif vname == 'velo':
|
||||||
|
dataset = np.sqrt(g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'magn':
|
||||||
|
dataset = np.sqrt(g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'eint':
|
||||||
|
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:]
|
||||||
|
elif vname == 'ekin':
|
||||||
|
dataset = 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'emag':
|
||||||
|
dataset = 0.5 * (g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'etot':
|
||||||
|
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:] \
|
||||||
|
+ 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
if eqsys == 'mhd':
|
||||||
|
dataset += 0.5 * (g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'temp':
|
||||||
|
dataset = g['pres'][:,:,:,:] / g['dens'][:,:,:,:]
|
||||||
|
elif vname == 'lore':
|
||||||
|
dataset = 1.0 / np.sqrt(1.0 - (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2))
|
||||||
|
elif vname == 'divv':
|
||||||
|
dataset = np.zeros(g['velx'].shape)
|
||||||
|
fields = [ 'velx', 'vely', 'velz' ]
|
||||||
|
h = (dx, dy, dz)
|
||||||
|
for i in range(ndims):
|
||||||
|
v = fields[i]
|
||||||
|
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ h[i][levels[:] - 1]
|
||||||
|
elif vname == 'divb':
|
||||||
|
dataset = np.zeros(g['magx'].shape)
|
||||||
|
fields = [ 'magx', 'magy', 'magz' ]
|
||||||
|
h = (dx, dy, dz)
|
||||||
|
for i in range(ndims):
|
||||||
|
v = fields[i]
|
||||||
|
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ h[i][levels[:] - 1]
|
||||||
|
elif vname == 'vort':
|
||||||
|
if ndims == 3:
|
||||||
|
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1]
|
||||||
|
wy = 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
else:
|
||||||
|
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
wy = - 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
||||||
|
elif vname == 'curr':
|
||||||
|
if ndims == 3:
|
||||||
|
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1]
|
||||||
|
wy = 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
else:
|
||||||
|
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
wy = - 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
||||||
|
else:
|
||||||
|
dataset = g[vname][:,:,:,:]
|
||||||
|
|
||||||
|
# rescale all blocks to the effective resolution
|
||||||
|
#
|
||||||
|
for l in range(dblocks):
|
||||||
|
|
||||||
|
lv = levels[l] - 1
|
||||||
|
|
||||||
|
center = (bounds[0,:,l] + bounds[1,:,l]) / 2
|
||||||
|
base.createNodeBranch(center, lv)
|
||||||
|
base.setNodeData(center, lv, dataset[ng:-ng,ng:-ng,ng:-ng,l])
|
||||||
|
|
||||||
|
nb += 1
|
||||||
|
|
||||||
|
# print progress bar if desired
|
||||||
|
#
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write('\r')
|
||||||
|
sys.stdout.write("Reading '%s' from '%s': block %d from %d" \
|
||||||
|
% (vname, fname, nb, nl))
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
if (progress):
|
||||||
|
sys.stdout.write('\n')
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write("Populating AMR structure\n")
|
||||||
|
base.populateNodeData()
|
||||||
|
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write("Generating OverlappingAMR VTK files\n")
|
||||||
|
|
||||||
|
ofile = "{}_{:06d}.vthb".format(vname, nr)
|
||||||
|
opath = "{}_{:06d}".format(vname, nr)
|
||||||
|
if not os.path.exists(opath):
|
||||||
|
os.makedirs(opath)
|
||||||
|
with open(ofile, 'w') as vtk:
|
||||||
|
vtk.write('<VTKFile type="vtkOverlappingAMR" version="1.1" ' + \
|
||||||
|
'byte_order="LittleEndian" header_type="UInt64">\n')
|
||||||
|
vtk.write(' <vtkOverlappingAMR ' + \
|
||||||
|
'origin="{} {} {}" '.format(*base.lower) + \
|
||||||
|
'grid_description="XYZ">\n')
|
||||||
|
|
||||||
|
fmt = '{}_{:0' + str(len(str(ml))) + '}_{:0' + str(len(str(base.nodes))) + 'd}.vti'
|
||||||
|
|
||||||
|
m = 0
|
||||||
|
for lv in range(ml):
|
||||||
|
|
||||||
|
cw = base.size / (rm * nn * 2**lv)
|
||||||
|
vtk.write(' <Block level="{}"'.format(lv) + \
|
||||||
|
' spacing="{} {} {}">\n'.format(*cw))
|
||||||
|
|
||||||
|
no = 0
|
||||||
|
for item in base.getNodesFromLevel(lv):
|
||||||
|
lo = np.array(item.index) * bm
|
||||||
|
up = lo + bm - 1
|
||||||
|
ll = np.stack((lo,up)).T.flatten()
|
||||||
|
if item.hasData:
|
||||||
|
vfile = os.path.join(opath, fmt.format(vname, lv, no))
|
||||||
|
WriteVTK(vfile, label, item.data, \
|
||||||
|
origin = (item.lower[0], item.lower[1], item.lower[2]), \
|
||||||
|
spacing = (cw[0], cw[1], cw[2]), \
|
||||||
|
compression=compression, compression_level=compression_level)
|
||||||
|
vtk.write(' <DataSet index="{}"'.format(no) + \
|
||||||
|
' amr_box = "{} {} {} {} {} {}"'.format(*ll) + \
|
||||||
|
' file = "{}"></DataSet>\n'.format(vfile))
|
||||||
|
no += 1
|
||||||
|
else:
|
||||||
|
vtk.write(' <DataSet index="{}"'.format(no) + \
|
||||||
|
' amr_box = "{} {} {} {} {} {}"'.format(*ll) + \
|
||||||
|
'></DataSet>\n')
|
||||||
|
m += 1
|
||||||
|
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write('\r')
|
||||||
|
sys.stdout.write("Storing AMR block {} from {}".format(m, base.nodes))
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
vtk.write(' </Block>\n')
|
||||||
|
|
||||||
|
vtk.write(' </vtkOverlappingAMR>\n')
|
||||||
|
vtk.write('</VTKFile>')
|
||||||
|
|
||||||
|
if (progress):
|
||||||
|
sys.stdout.write('\n')
|
||||||
|
sys.stdout.flush()
|
File diff suppressed because it is too large
Load Diff
@ -22,14 +22,10 @@
|
|||||||
|
|
||||||
================================================================================
|
================================================================================
|
||||||
|
|
||||||
module: AMUN
|
module: INTERPOLATION
|
||||||
|
|
||||||
Python module with subroutines to read AMUN code HDF5 files.
|
Support module for Amun snapshots for the block prolongation with different
|
||||||
|
types of interpolation.
|
||||||
The only requirements for this package are:
|
|
||||||
|
|
||||||
- numpy
|
|
||||||
- scipy (optional, for data interpolation)
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
"""
|
"""
|
||||||
@ -43,154 +39,129 @@ except ImportError:
|
|||||||
|
|
||||||
|
|
||||||
def rebin(a, newshape):
|
def rebin(a, newshape):
|
||||||
'''
|
'''
|
||||||
Subroutine changes the size of the input array to to new shape,
|
Subroutine changes the size of the input array to to new shape,
|
||||||
by copying cells or averaging them.
|
by copying cells or averaging them.
|
||||||
'''
|
'''
|
||||||
assert len(a.shape) == len(newshape)
|
assert len(a.shape) == len(newshape)
|
||||||
|
|
||||||
m = a.ndim - 1
|
m = a.ndim - 1
|
||||||
if (a.shape[m] > newshape[m]):
|
if (a.shape[m] > newshape[m]):
|
||||||
if a.ndim == 3:
|
if a.ndim == 3:
|
||||||
nn = [newshape[0], int(a.shape[0] / newshape[0]),
|
nn = [newshape[0], a.shape[0] // newshape[0], \
|
||||||
newshape[1], int(a.shape[1] / newshape[1]),
|
newshape[1], a.shape[1] // newshape[1], \
|
||||||
newshape[2], int(a.shape[2] / newshape[2])]
|
newshape[2], a.shape[2] // newshape[2]]
|
||||||
return a.reshape(nn).mean(5).mean(3).mean(1)
|
return a.reshape(nn).mean(5).mean(3).mean(1)
|
||||||
|
else:
|
||||||
|
nn = [newshape[0], a.shape[0] // newshape[0], \
|
||||||
|
newshape[1], a.shape[1] // newshape[1]]
|
||||||
|
return a.reshape(nn).mean(3).mean(1)
|
||||||
else:
|
else:
|
||||||
nn = [newshape[0], int(a.shape[0] / newshape[0]),
|
for n in range(a.ndim):
|
||||||
newshape[1], int(a.shape[1] / newshape[1])]
|
a = np.repeat(a, newshape[n] // a.shape[n], axis=n)
|
||||||
return a.reshape(nn).mean(3).mean(1)
|
return(a)
|
||||||
else:
|
|
||||||
for n in range(a.ndim):
|
|
||||||
a = np.repeat(a, int(newshape[n] / a.shape[n]), axis=n)
|
|
||||||
return(a)
|
|
||||||
|
|
||||||
|
|
||||||
def interpolate(a, newshape, nghosts, method='rebin', order=3):
|
def interpolate(a, newshape, nghosts=0, method=None, order=1):
|
||||||
'''
|
'''
|
||||||
Subroutine rescales the block by interpolating its values.
|
Subroutine rescales the block by interpolating its values.
|
||||||
'''
|
'''
|
||||||
if (method == 'rebin' or not scipy_available):
|
if method == None or method == 'rebin' or not scipy_available:
|
||||||
|
|
||||||
# calculate the indices in order to remove the ghost zones
|
ng = nghosts
|
||||||
#
|
if a.ndim == 3:
|
||||||
if a.ndim == 3:
|
return rebin(a[ng:-ng,ng:-ng,ng:-ng], newshape)
|
||||||
ib = nghosts
|
else:
|
||||||
jb = nghosts
|
return rebin(a[ng:-ng,ng:-ng], newshape)
|
||||||
kb = nghosts
|
|
||||||
ie = a.shape[2] - nghosts
|
elif method == 'zoom':
|
||||||
je = a.shape[1] - nghosts
|
|
||||||
ke = a.shape[0] - nghosts
|
zf = (newshape[1] // (a.shape[1] - 2 * nghosts))
|
||||||
if (a.shape[0] == 1):
|
ng = zf * nghosts
|
||||||
kb = 0
|
if a.ndim == 3:
|
||||||
ke = 1
|
return zoom(a, zf, order=order, grid_mode=True, mode='nearest')[ng:-ng,ng:-ng,ng:-ng]
|
||||||
|
else:
|
||||||
|
return zoom(a, zf, order=order, grid_mode=True, mode='nearest')[ng:-ng,ng:-ng]
|
||||||
|
|
||||||
|
elif method in [ 'monotonic', 'pchip' ]:
|
||||||
|
|
||||||
|
dims = np.arange(a.ndim)
|
||||||
|
q = a
|
||||||
|
|
||||||
|
for n in dims:
|
||||||
|
|
||||||
|
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
||||||
|
q = q - d2 / 24.0
|
||||||
|
|
||||||
|
d = np.array(q.shape)
|
||||||
|
|
||||||
|
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
||||||
|
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
||||||
|
|
||||||
|
u = q.reshape([d[0], q.size // d[0]])
|
||||||
|
f = np.zeros([newshape[n], q.size // d[0]])
|
||||||
|
for i in range(q.size // d[0]):
|
||||||
|
f[:,i] = pchip_interpolate(xo, u[:,i], xn)
|
||||||
|
|
||||||
|
d[0] = newshape[n]
|
||||||
|
f = f.reshape(d)
|
||||||
|
|
||||||
|
q = f.transpose(np.roll(dims, -1))
|
||||||
|
|
||||||
|
return q
|
||||||
|
|
||||||
|
elif method == 'spline':
|
||||||
|
|
||||||
|
dims = np.arange(a.ndim)
|
||||||
|
q = a
|
||||||
|
|
||||||
|
for n in dims:
|
||||||
|
|
||||||
|
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
||||||
|
q = q - d2 / 24.0
|
||||||
|
|
||||||
|
d = np.array(q.shape)
|
||||||
|
|
||||||
|
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
||||||
|
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
||||||
|
|
||||||
|
u = q.reshape([d[0], q.size // d[0]])
|
||||||
|
f = np.zeros([newshape[n], q.size // d[0]])
|
||||||
|
for i in range(q.size // d[0]):
|
||||||
|
t = splrep(xo, u[:,i], k=5, s=0.0)
|
||||||
|
f[:,i] = splev(xn, t)
|
||||||
|
|
||||||
|
d[0] = newshape[n]
|
||||||
|
f = f.reshape(d)
|
||||||
|
|
||||||
|
q = f.transpose(np.roll(dims, -1))
|
||||||
|
|
||||||
|
return q
|
||||||
|
|
||||||
return rebin(a[kb:ke,jb:je,ib:ie], newshape)
|
|
||||||
else:
|
else:
|
||||||
ib = nghosts
|
|
||||||
jb = nghosts
|
|
||||||
ie = a.shape[1] - nghosts
|
|
||||||
je = a.shape[0] - nghosts
|
|
||||||
|
|
||||||
return rebin(a[jb:je,ib:ie], newshape)
|
dims = np.arange(a.ndim)
|
||||||
|
q = a
|
||||||
|
|
||||||
elif (method == 'zoom'):
|
for n in dims:
|
||||||
|
|
||||||
fc = int(newshape[1] / (a.shape[1] - 2 * nghosts))
|
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
||||||
ib, ie = fc * nghosts, - fc * nghosts
|
q = q - d2 / 24.0
|
||||||
jb, je = fc * nghosts, - fc * nghosts
|
|
||||||
if a.ndim == 3:
|
|
||||||
kb, ke = fc * nghosts, - fc * nghosts
|
|
||||||
if (a.shape[0] == 1):
|
|
||||||
kb = 0
|
|
||||||
ke = 1
|
|
||||||
|
|
||||||
return zoom(a, fc, order=order, grid_mode=True, mode='nearest')[kb:ke,jb:je,ib:ie]
|
d = np.array(q.shape)
|
||||||
else:
|
|
||||||
return zoom(a, fc, order=order, grid_mode=True, mode='nearest')[jb:je,ib:ie]
|
|
||||||
|
|
||||||
elif (method == 'monotonic' or method == 'pchip'):
|
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
||||||
|
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
||||||
|
|
||||||
dims = np.arange(a.ndim)
|
u = q.reshape([d[0], q.size // d[0]])
|
||||||
|
f = np.zeros([newshape[n], q.size // d[0]])
|
||||||
|
for i in range(q.size // d[0]):
|
||||||
|
t = interp1d(xo, u[:,i], kind=method)
|
||||||
|
f[:,i] = t(xn)
|
||||||
|
|
||||||
q = a
|
d[0] = newshape[n]
|
||||||
|
f = f.reshape(d)
|
||||||
|
|
||||||
for n in dims:
|
q = f.transpose(np.roll(dims, -1))
|
||||||
|
|
||||||
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
return q
|
||||||
q = q - d2 / 24.0
|
|
||||||
|
|
||||||
d = np.array(q.shape)
|
|
||||||
|
|
||||||
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
|
||||||
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
|
||||||
|
|
||||||
u = q.reshape([d[0], int(q.size / d[0])])
|
|
||||||
f = np.zeros([newshape[n], int(q.size / d[0])])
|
|
||||||
for i in range(int(q.size / d[0])):
|
|
||||||
f[:,i] = pchip_interpolate(xo, u[:,i], xn)
|
|
||||||
|
|
||||||
d[0] = newshape[n]
|
|
||||||
f = f.reshape(d)
|
|
||||||
|
|
||||||
q = f.transpose(np.roll(dims, -1))
|
|
||||||
|
|
||||||
return q
|
|
||||||
|
|
||||||
elif (method == 'spline'):
|
|
||||||
|
|
||||||
dims = np.arange(a.ndim)
|
|
||||||
|
|
||||||
q = a
|
|
||||||
|
|
||||||
for n in dims:
|
|
||||||
|
|
||||||
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
|
||||||
q = q - d2 / 24.0
|
|
||||||
|
|
||||||
d = np.array(q.shape)
|
|
||||||
|
|
||||||
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
|
||||||
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
|
||||||
|
|
||||||
u = q.reshape([d[0], int(q.size / d[0])])
|
|
||||||
f = np.zeros([newshape[n], int(q.size / d[0])])
|
|
||||||
for i in range(int(q.size / d[0])):
|
|
||||||
t = splrep(xo, u[:,i], k=5, s=0.0)
|
|
||||||
f[:,i] = splev(xn, t)
|
|
||||||
|
|
||||||
d[0] = newshape[n]
|
|
||||||
f = f.reshape(d)
|
|
||||||
|
|
||||||
q = f.transpose(np.roll(dims, -1))
|
|
||||||
|
|
||||||
return q
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
dims = np.arange(a.ndim)
|
|
||||||
|
|
||||||
q = a
|
|
||||||
|
|
||||||
for n in dims:
|
|
||||||
|
|
||||||
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
|
||||||
q = q - d2 / 24.0
|
|
||||||
|
|
||||||
d = np.array(q.shape)
|
|
||||||
|
|
||||||
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
|
||||||
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
|
||||||
|
|
||||||
u = q.reshape([d[0], int(q.size / d[0])])
|
|
||||||
f = np.zeros([newshape[n], int(q.size / d[0])])
|
|
||||||
for i in range(int(q.size / d[0])):
|
|
||||||
t = interp1d(xo, u[:,i], kind=method)
|
|
||||||
f[:,i] = t(xn)
|
|
||||||
|
|
||||||
d[0] = newshape[n]
|
|
||||||
f = f.reshape(d)
|
|
||||||
|
|
||||||
q = f.transpose(np.roll(dims, -1))
|
|
||||||
|
|
||||||
return q
|
|
||||||
|
@ -22,120 +22,67 @@
|
|||||||
|
|
||||||
================================================================================
|
================================================================================
|
||||||
|
|
||||||
submodule: vtkio.py
|
module: VTKIO
|
||||||
|
|
||||||
This submodule provides a function to store a dataset as VTK image file.
|
Module provides a function to store given dataset in the VTK image file.
|
||||||
|
|
||||||
The only requirements for this package are:
|
|
||||||
|
|
||||||
- numpy
|
|
||||||
- lz4 (for LZ4 compression)
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
"""
|
"""
|
||||||
import base64, numpy, struct, zlib, lz4.block, lzma
|
|
||||||
|
|
||||||
def WriteVTK(vtkfile, vname, data, origin = (0.0, 0.0, 0.0), \
|
def WriteVTK(vtkfile, vname, data, \
|
||||||
spacing = (1.0, 1.0, 1.0), points = False, encode = True, \
|
origin=(0, 0, 0), spacing=(1, 1, 1), \
|
||||||
compression = "none", block_size = 32768, lz4mode = 'fast', \
|
compression=None, compression_level=19, encode=True, \
|
||||||
compression_level = 6, verbose = False):
|
lz4mode='default', lz4acceleration=1, block_size=32768, \
|
||||||
|
points=False, verbose=False):
|
||||||
|
|
||||||
|
import base64, numpy, struct, zlib, lz4.block, lzma
|
||||||
|
|
||||||
# Separate cases when the input is a vector field and scalar field.
|
|
||||||
#
|
|
||||||
if isinstance(data, (list, tuple)):
|
if isinstance(data, (list, tuple)):
|
||||||
|
|
||||||
# Check if all list or tuple components are arrays.
|
if not all(isinstance(d, (numpy.ndarray)) for d in data):
|
||||||
#
|
raise Exception('All input data components in WriteVTK must be arrays!')
|
||||||
allarrays = True
|
|
||||||
for d in data:
|
|
||||||
allarrays = allarrays & isinstance(d, (numpy.ndarray))
|
|
||||||
|
|
||||||
# Quit if not all elements are arrays.
|
if not all(data[0].shape == d.shape for d in data):
|
||||||
#
|
raise Exception('All input data components in WriteVTK must have the same dimensions!')
|
||||||
if not allarrays:
|
|
||||||
|
|
||||||
print('All input data components must be arrays!')
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check if the components have the same dimensions
|
|
||||||
#
|
|
||||||
sameshapes = True
|
|
||||||
for d in data:
|
|
||||||
sameshapes = sameshapes & (data[0].shape == d.shape)
|
|
||||||
|
|
||||||
# Quit if elements have different dimensions.
|
|
||||||
#
|
|
||||||
if not sameshapes:
|
|
||||||
print('All input data component must have the same dimensions!')
|
|
||||||
return
|
|
||||||
|
|
||||||
# Define the type of input data.
|
|
||||||
#
|
|
||||||
dtype = 'vector'
|
dtype = 'vector'
|
||||||
|
ncomp = len(data)
|
||||||
# Get the number of vector components.
|
ndims = data[0].ndim
|
||||||
#
|
dims = data[0].shape
|
||||||
nc = len(data)
|
|
||||||
|
|
||||||
# Get the number of dimansions
|
|
||||||
#
|
|
||||||
nd = data[0].ndim
|
|
||||||
|
|
||||||
# Get the variable dimensions.
|
|
||||||
#
|
|
||||||
dm = data[0].shape
|
|
||||||
|
|
||||||
elif isinstance(data, (numpy.ndarray)):
|
elif isinstance(data, (numpy.ndarray)):
|
||||||
|
|
||||||
# Define the type of input data.
|
|
||||||
#
|
|
||||||
dtype = 'scalar'
|
dtype = 'scalar'
|
||||||
|
ncomp = 1
|
||||||
# Get the number of vector components.
|
ndims = data.ndim
|
||||||
#
|
dims = data.shape
|
||||||
nc = 1
|
|
||||||
|
|
||||||
# Get the number of dimansions
|
|
||||||
#
|
|
||||||
nd = data.ndim
|
|
||||||
|
|
||||||
# Get the variable dimensions.
|
|
||||||
#
|
|
||||||
dm = data.shape
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print('Unknown type of the input data!')
|
raise Exception('Unknown type of the input data in WriteVTK!')
|
||||||
return
|
|
||||||
|
|
||||||
# Create the VTK output file and open it in the binary mode.
|
|
||||||
#
|
|
||||||
with open(vtkfile, 'wb') as vt:
|
with open(vtkfile, 'wb') as vt:
|
||||||
|
|
||||||
# Initiate offset and array size.
|
|
||||||
#
|
|
||||||
offset = 0
|
offset = 0
|
||||||
|
|
||||||
# Prepare strong for dimensions.
|
|
||||||
#
|
|
||||||
sdims = '"'
|
sdims = '"'
|
||||||
if points:
|
if points:
|
||||||
for i in range(nd - 1, 0, -1):
|
for i in range(ndims - 1, 0, -1):
|
||||||
sdims +='%d %d ' % (0, dm[i] - 1)
|
sdims +='%d %d ' % (0, dims[i] - 1)
|
||||||
sdims += '%d %d" ' % (0, dm[0] - 1)
|
sdims += '%d %d" ' % (0, dims[0] - 1)
|
||||||
else:
|
else:
|
||||||
for i in range(nd - 1, 0, -1):
|
for i in range(ndims - 1, 0, -1):
|
||||||
sdims +='%d %d ' % (0, dm[i])
|
sdims +='%d %d ' % (0, dims[i])
|
||||||
sdims += '%d %d" ' % (0, dm[0])
|
sdims += '%d %d" ' % (0, dims[0])
|
||||||
|
|
||||||
# Write the VTK header and data description.
|
|
||||||
#
|
|
||||||
string = '<?xml version="1.0"?>\n<VTKFile type="ImageData" version="1.0" byte_order="LittleEndian" header_type="UInt64"'
|
string = '<?xml version="1.0"?>\n<VTKFile type="ImageData" version="1.0" byte_order="LittleEndian" header_type="UInt64"'
|
||||||
if compression == 'zlib':
|
if compression == 'zlib':
|
||||||
string += ' compressor="vtkZLibDataCompressor"'
|
string += ' compressor="vtkZLibDataCompressor"'
|
||||||
|
compression_level = min(max(compression_level, 0), 9)
|
||||||
elif compression == 'lzma':
|
elif compression == 'lzma':
|
||||||
string += ' compressor="vtkLZMADataCompressor"'
|
string += ' compressor="vtkLZMADataCompressor"'
|
||||||
|
compression_level = min(max(compression_level, 0), 9)
|
||||||
elif compression == 'lz4':
|
elif compression == 'lz4':
|
||||||
string += ' compressor="vtkLZ4DataCompressor"'
|
string += ' compressor="vtkLZ4DataCompressor"'
|
||||||
|
compression_level = min(max(compression_level, 0), 12)
|
||||||
|
lz4acceleration = max(lz4acceleration, 1)
|
||||||
string += '>\n <ImageData WholeExtent=%s' % sdims
|
string += '>\n <ImageData WholeExtent=%s' % sdims
|
||||||
string += 'Origin="%e %e %e" ' % origin
|
string += 'Origin="%e %e %e" ' % origin
|
||||||
string += 'Spacing="%e %e %e">\n' % spacing
|
string += 'Spacing="%e %e %e">\n' % spacing
|
||||||
@ -144,7 +91,7 @@ def WriteVTK(vtkfile, vname, data, origin = (0.0, 0.0, 0.0), \
|
|||||||
string += ' <PointData %ss="%s">\n' % (dtype, vname)
|
string += ' <PointData %ss="%s">\n' % (dtype, vname)
|
||||||
else:
|
else:
|
||||||
string += ' <CellData %ss="%s">\n' % (dtype, vname)
|
string += ' <CellData %ss="%s">\n' % (dtype, vname)
|
||||||
if nc == 1:
|
if ncomp == 1:
|
||||||
dmin = data.min()
|
dmin = data.min()
|
||||||
dmax = data.max()
|
dmax = data.max()
|
||||||
else:
|
else:
|
||||||
@ -154,8 +101,8 @@ def WriteVTK(vtkfile, vname, data, origin = (0.0, 0.0, 0.0), \
|
|||||||
dmin = numpy.sqrt(dd.min())
|
dmin = numpy.sqrt(dd.min())
|
||||||
dmax = numpy.sqrt(dd.max())
|
dmax = numpy.sqrt(dd.max())
|
||||||
string += ' <DataArray '
|
string += ' <DataArray '
|
||||||
if nc > 1:
|
if ncomp > 1:
|
||||||
string += 'NumberOfComponents="{:d}" '.format(nc)
|
string += 'NumberOfComponents="{:d}" '.format(ncomp)
|
||||||
string += 'type="Float32" Name="{}" RangeMin="{:e}" RangeMax="{:e}" format="appended" offset="{}">\n'.format(vname, dmin, dmax, offset)
|
string += 'type="Float32" Name="{}" RangeMin="{:e}" RangeMax="{:e}" format="appended" offset="{}">\n'.format(vname, dmin, dmax, offset)
|
||||||
string += " </DataArray>\n"
|
string += " </DataArray>\n"
|
||||||
if points:
|
if points:
|
||||||
@ -166,8 +113,6 @@ def WriteVTK(vtkfile, vname, data, origin = (0.0, 0.0, 0.0), \
|
|||||||
string += ' </ImageData>\n'
|
string += ' </ImageData>\n'
|
||||||
vt.write(string.encode())
|
vt.write(string.encode())
|
||||||
|
|
||||||
# Append variable data.
|
|
||||||
#
|
|
||||||
if encode:
|
if encode:
|
||||||
string = ' <AppendedData encoding="base64">\n'
|
string = ' <AppendedData encoding="base64">\n'
|
||||||
else:
|
else:
|
||||||
@ -175,40 +120,27 @@ def WriteVTK(vtkfile, vname, data, origin = (0.0, 0.0, 0.0), \
|
|||||||
string += ' _'
|
string += ' _'
|
||||||
vt.write(string.encode())
|
vt.write(string.encode())
|
||||||
|
|
||||||
# Convert the input data to Float32. In the case of vector data concatenate the components first.
|
if dtype == 'vector':
|
||||||
#
|
qt = numpy.zeros([ dims[0], dims[1], dims[2], ncomp ], dtype=numpy.float32)
|
||||||
if (dtype == 'vector'):
|
|
||||||
qt = numpy.zeros([ dm[0], dm[1], dm[2], nc ], dtype = numpy.float32)
|
|
||||||
for n, d in enumerate(data[:]):
|
for n, d in enumerate(data[:]):
|
||||||
qt[:,:,:,n] = numpy.float32(d)
|
qt[:,:,:,n] = numpy.float32(d)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
qt = numpy.float32(data)
|
qt = numpy.float32(data)
|
||||||
|
|
||||||
barr = qt.tobytes()
|
barr = qt.tobytes()
|
||||||
|
|
||||||
# Compress if desired.
|
if compression != None:
|
||||||
#
|
|
||||||
if compression != 'none':
|
|
||||||
|
|
||||||
lz4compression = 10 - compression_level
|
|
||||||
|
|
||||||
if len(barr) < block_size:
|
if len(barr) < block_size:
|
||||||
|
|
||||||
if compression == 'zlib':
|
if compression == 'zlib':
|
||||||
carr = zlib.compress(barr, compression_level)
|
carr = zlib.compress(barr, compression_level)
|
||||||
elif compression == 'lz4':
|
elif compression == 'lz4':
|
||||||
carr = lz4.block.compress(barr, mode = lz4mode, acceleration = lz4compression, compression = compression_level, store_size = False)
|
carr = lz4.block.compress(barr, mode=lz4mode, acceleration=lz4acceleration, compression=compression_level, store_size=False)
|
||||||
elif compression == 'lzma':
|
elif compression == 'lzma':
|
||||||
carr = lzma.compress(barr)
|
carr = lzma.compress(barr)
|
||||||
|
|
||||||
# Prepare the number of blocks, the size of the last partial block,
|
|
||||||
# and the size of the compressed data.
|
|
||||||
#
|
|
||||||
head = struct.pack("QQQQ", 1, len(barr), 0, len(carr))
|
head = struct.pack("QQQQ", 1, len(barr), 0, len(carr))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
nblocks = len(barr) // block_size
|
nblocks = len(barr) // block_size
|
||||||
rsize = len(barr) % block_size
|
rsize = len(barr) % block_size
|
||||||
if verbose:
|
if verbose:
|
||||||
@ -231,7 +163,7 @@ def WriteVTK(vtkfile, vname, data, origin = (0.0, 0.0, 0.0), \
|
|||||||
cctx = lzma
|
cctx = lzma
|
||||||
for i in range(nblocks):
|
for i in range(nblocks):
|
||||||
ie = min(len(barr), ib + block_size)
|
ie = min(len(barr), ib + block_size)
|
||||||
cblk = cctx.compress(barr[ib:ie], preset = compression_level)
|
cblk = cctx.compress(barr[ib:ie], preset=compression_level)
|
||||||
head += struct.pack("Q", len(cblk))
|
head += struct.pack("Q", len(cblk))
|
||||||
carr += cblk
|
carr += cblk
|
||||||
ib = ie
|
ib = ie
|
||||||
@ -240,7 +172,7 @@ def WriteVTK(vtkfile, vname, data, origin = (0.0, 0.0, 0.0), \
|
|||||||
cctx = lz4.block
|
cctx = lz4.block
|
||||||
for i in range(nblocks):
|
for i in range(nblocks):
|
||||||
ie = min(len(barr), ib + block_size)
|
ie = min(len(barr), ib + block_size)
|
||||||
cblk = cctx.compress(barr[ib:ie], mode = lz4mode, acceleration = lz4compression, compression = compression_level, store_size = False)
|
cblk = cctx.compress(barr[ib:ie], mode=lz4mode, acceleration=lz4acceleration, compression=compression_level, store_size=False)
|
||||||
head += struct.pack("Q", len(cblk))
|
head += struct.pack("Q", len(cblk))
|
||||||
carr += cblk
|
carr += cblk
|
||||||
ib = ie
|
ib = ie
|
||||||
@ -251,16 +183,12 @@ def WriteVTK(vtkfile, vname, data, origin = (0.0, 0.0, 0.0), \
|
|||||||
vt.write(head+carr)
|
vt.write(head+carr)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
head = struct.pack("Q", len(barr))
|
head = struct.pack("Q", len(barr))
|
||||||
|
|
||||||
if encode:
|
if encode:
|
||||||
vt.write(base64.b64encode(head+barr))
|
vt.write(base64.b64encode(head+barr))
|
||||||
else:
|
else:
|
||||||
vt.write(head+barr)
|
vt.write(head+barr)
|
||||||
|
|
||||||
# Close the appended data section.
|
|
||||||
#
|
|
||||||
string = '\n </AppendedData>\n'
|
string = '\n </AppendedData>\n'
|
||||||
string += '</VTKFile>\n'
|
string += '</VTKFile>\n'
|
||||||
vt.write(string.encode())
|
vt.write(string.encode())
|
||||||
|
Loading…
x
Reference in New Issue
Block a user