PYTHON: Split module into submodules.
Signed-off-by: Grzegorz Kowal <grzegorz@amuncode.org>
This commit is contained in:
parent
c1c68c3516
commit
f8a7039a94
@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
|
|||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="amunpy",
|
name="amunpy",
|
||||||
version="0.6",
|
version="0.6.1",
|
||||||
author="Grzegorz Kowal",
|
author="Grzegorz Kowal",
|
||||||
author_email="grzegorz@amuncode.org",
|
author_email="grzegorz@amuncode.org",
|
||||||
description="Python Interface for the AMUN code's snapshots",
|
description="Python Interface for the AMUN code's snapshots",
|
||||||
|
@ -0,0 +1,23 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
Package AmunPy is the Python interface to handle datasets produced by
|
||||||
|
the AMUN code (https://amuncode.org).
|
||||||
|
|
||||||
|
The package is released under GNU General Public License v3.
|
||||||
|
See file LICENSE for more details.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .amunxml import *
|
||||||
|
from .amunh5 import *
|
||||||
|
from .integrals import *
|
||||||
|
|
||||||
|
__all__ = [ 'AmunXML', 'amun_attribute', 'amun_coordinate', 'amun_dataset', 'amun_integrals' ]
|
||||||
|
|
||||||
|
__author__ = "Grzegorz Kowal"
|
||||||
|
__copyright__ = "Copyright 2018-2021, Grzegorz Kowal <grzegorz@amuncode.org>"
|
||||||
|
__version__ = "0.6.1"
|
||||||
|
__maintainer__ = "Grzegorz Kowal"
|
||||||
|
__email__ = "grzegorz@amuncode.org"
|
455
python/amunpy/src/amunpy/amunh5.py
Normal file
455
python/amunpy/src/amunpy/amunh5.py
Normal file
@ -0,0 +1,455 @@
|
|||||||
|
"""
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
This file is part of the AMUN source code, a program to perform
|
||||||
|
Newtonian or relativistic magnetohydrodynamical simulations on uniform or
|
||||||
|
adaptive mesh.
|
||||||
|
|
||||||
|
Copyright (C) 2018-2021 Grzegorz Kowal <grzegorz@amuncode.org>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
module: AMUN
|
||||||
|
|
||||||
|
Python module with subroutines to read AMUN code HDF5 files.
|
||||||
|
|
||||||
|
The only requirements for this package are:
|
||||||
|
|
||||||
|
- h5py
|
||||||
|
- numpy
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
"""
|
||||||
|
from .interpolation import interpolate
|
||||||
|
import h5py as h5
|
||||||
|
import numpy as np
|
||||||
|
import os.path as op
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def amun_compatible(fname):
|
||||||
|
'''
|
||||||
|
Subroutine checks if the HDF5 file is AMUN compatible.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
|
||||||
|
Return values:
|
||||||
|
|
||||||
|
True or False;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
comp = amun_compatible('p000010_00000.h5')
|
||||||
|
|
||||||
|
'''
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
if 'codes' in f.attrs:
|
||||||
|
if f.attrs['code'].astype(str) == "AMUN":
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print("'%s' contains attribute 'code'," % fname, \
|
||||||
|
" but it is not 'AMUN'!")
|
||||||
|
return False
|
||||||
|
elif 'attributes' in f and 'coordinates' in f and \
|
||||||
|
'variables' in f:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print("'%s' misses one of these groups:" % fname, \
|
||||||
|
"'attributes', 'coordinates' or 'variables'!")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def amun_attribute(fname, aname):
|
||||||
|
'''
|
||||||
|
Subroutine to read global attributes from AMUN HDF5 snapshots.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
aname - the attribute name;
|
||||||
|
|
||||||
|
Return values:
|
||||||
|
|
||||||
|
ret - the value of the attribute or None;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
time = amun_attribute('p000010_00000.h5', 'time')
|
||||||
|
|
||||||
|
'''
|
||||||
|
if not amun_compatible(fname):
|
||||||
|
return None
|
||||||
|
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
if aname in f['attributes'].attrs:
|
||||||
|
attr = f['attributes'].attrs[aname]
|
||||||
|
if attr.dtype.type is np.string_:
|
||||||
|
ret = np.squeeze(attr).astype(str)
|
||||||
|
else:
|
||||||
|
ret = np.squeeze(attr)
|
||||||
|
return ret
|
||||||
|
else:
|
||||||
|
print("Attribute '%s' cannot be found in '%s'!" % (aname, fname))
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def amun_coordinate(fname, iname):
|
||||||
|
'''
|
||||||
|
Subroutine to read coordinate items from AMUN HDF5 snapshots.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
iname - the item name;
|
||||||
|
|
||||||
|
Return values:
|
||||||
|
|
||||||
|
ret - the value of the item or None;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
bounds = amun_coordinate('p000010_00000.h5', 'bounds')
|
||||||
|
|
||||||
|
'''
|
||||||
|
if not amun_compatible(fname):
|
||||||
|
return None
|
||||||
|
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
if iname in f['coordinates']:
|
||||||
|
return np.array(f['coordinates'][iname])
|
||||||
|
else:
|
||||||
|
print("Coordinate item '%s' not found in group 'coordinate' of '%s'!" % (iname, fname))
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def amun_dataset(fname, vname, shrink=1, interpolation='rebin', progress=False):
|
||||||
|
'''
|
||||||
|
Subroutine to read datasets from AMUN HDF5 snapshots.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
fname - the HDF5 file name;
|
||||||
|
vname - the variable name;
|
||||||
|
shrink - the shrink factor (must be the power of 2 and not larger
|
||||||
|
than the block size);
|
||||||
|
progress - the progress bar switch;
|
||||||
|
|
||||||
|
Return values:
|
||||||
|
|
||||||
|
ret - the array of values for the variable;
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
dn = amun_dataset('p000010_00000.h5', 'dens')
|
||||||
|
|
||||||
|
'''
|
||||||
|
if not amun_compatible(fname):
|
||||||
|
return None
|
||||||
|
|
||||||
|
dname = op.dirname(fname)
|
||||||
|
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write("Data file path:\n '%s'\n" % (dname))
|
||||||
|
|
||||||
|
# get attributes necessary to reconstruct the domain
|
||||||
|
#
|
||||||
|
eqsys = amun_attribute(fname, 'eqsys')
|
||||||
|
eos = amun_attribute(fname, 'eos')
|
||||||
|
nr = amun_attribute(fname, 'isnap')
|
||||||
|
nc = amun_attribute(fname, 'nprocs')
|
||||||
|
nl = amun_attribute(fname, 'nleafs')
|
||||||
|
if eos == 'adi':
|
||||||
|
gm = amun_attribute(fname, 'adiabatic_index')
|
||||||
|
|
||||||
|
# get block dimensions and the maximum level
|
||||||
|
#
|
||||||
|
ndims = amun_attribute(fname, 'ndims')
|
||||||
|
nn = amun_attribute(fname, 'ncells')
|
||||||
|
bm = np.array([nn, nn, nn])
|
||||||
|
if ndims == 2:
|
||||||
|
bm[2] = 1
|
||||||
|
ng = amun_attribute(fname, 'nghosts')
|
||||||
|
ml = amun_attribute(fname, 'maxlev')
|
||||||
|
|
||||||
|
# get the base block dimensions
|
||||||
|
#
|
||||||
|
rm = amun_attribute(fname, 'bdims')
|
||||||
|
if rm is None:
|
||||||
|
rm = amun_attribute(fname, 'domain_base_dims')
|
||||||
|
if rm is None:
|
||||||
|
rm = amun_attribute(fname, 'rdims')
|
||||||
|
if rm is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# build the list of supported variables
|
||||||
|
#
|
||||||
|
variables = []
|
||||||
|
with h5.File(fname, 'r') as f:
|
||||||
|
for var in f['variables'].keys():
|
||||||
|
variables.append(var)
|
||||||
|
|
||||||
|
# add derived variables if possible
|
||||||
|
#
|
||||||
|
variables.append('level')
|
||||||
|
if 'velx' in variables and 'vely' in variables and 'velz' in variables:
|
||||||
|
variables.append('velo')
|
||||||
|
variables.append('divv')
|
||||||
|
variables.append('vort')
|
||||||
|
if 'magx' in variables and 'magy' in variables and 'magz' in variables:
|
||||||
|
variables.append('magn')
|
||||||
|
variables.append('divb')
|
||||||
|
variables.append('curr')
|
||||||
|
if (eqsys == 'hd' or eqsys == 'mhd') and eos == 'adi' \
|
||||||
|
and 'pres' in variables:
|
||||||
|
variables.append('eint')
|
||||||
|
if 'dens' in variables and 'pres' in variables:
|
||||||
|
variables.append('temp')
|
||||||
|
if (eqsys == 'hd' or eqsys == 'mhd') \
|
||||||
|
and 'dens' in variables \
|
||||||
|
and 'velx' in variables \
|
||||||
|
and 'vely' in variables \
|
||||||
|
and 'velz' in variables:
|
||||||
|
variables.append('ekin')
|
||||||
|
if (eqsys == 'mhd' or eqsys == 'srmhd') \
|
||||||
|
and 'magx' in variables \
|
||||||
|
and 'magy' in variables \
|
||||||
|
and 'magz' in variables:
|
||||||
|
variables.append('emag')
|
||||||
|
if eqsys == 'hd' and 'ekin' in variables and 'eint' in variables:
|
||||||
|
variables.append('etot')
|
||||||
|
if eqsys == 'mhd' and 'eint' in variables \
|
||||||
|
and 'ekin' in variables \
|
||||||
|
and 'emag' in variables:
|
||||||
|
variables.append('etot')
|
||||||
|
if (eqsys == 'srhd' or eqsys == 'srmhd') and 'velo' in variables:
|
||||||
|
variables.append('lore')
|
||||||
|
|
||||||
|
# check if the requested variable is in the variable list
|
||||||
|
#
|
||||||
|
if not vname in variables:
|
||||||
|
print('The requested variable cannot be extracted from the file datasets!')
|
||||||
|
return None
|
||||||
|
|
||||||
|
# check if the shrink parameter is correct (block dimensions should be
|
||||||
|
# divisible by the shrink factor)
|
||||||
|
#
|
||||||
|
shrink = max(1, int(shrink))
|
||||||
|
if shrink > 1:
|
||||||
|
if (nn % shrink) != 0:
|
||||||
|
print('The block dimension should be divisible by the shrink factor!')
|
||||||
|
return None
|
||||||
|
sh = shrink
|
||||||
|
while(sh > 2 and sh % 2 == 0):
|
||||||
|
sh = int(sh / 2)
|
||||||
|
if (sh % 2) != 0:
|
||||||
|
print('The shrink factor should be a power of 2!')
|
||||||
|
return None
|
||||||
|
|
||||||
|
# determine the actual maximum level from the blocks
|
||||||
|
#
|
||||||
|
levs = []
|
||||||
|
for n in range(nc):
|
||||||
|
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||||
|
lname = op.join(dname, fname)
|
||||||
|
dblocks = amun_attribute(lname, 'dblocks')
|
||||||
|
if dblocks > 0:
|
||||||
|
levs = np.append(levs, [amun_coordinate(lname, 'levels')])
|
||||||
|
ml = int(levs.max())
|
||||||
|
|
||||||
|
# prepare dimensions of the output array and allocate it
|
||||||
|
#
|
||||||
|
dm = np.array(rm[0:ndims] * bm[0:ndims] * 2**(ml - 1) / shrink, \
|
||||||
|
dtype=np.int32)
|
||||||
|
ret = np.zeros(dm[::-1])
|
||||||
|
|
||||||
|
# iterate over all subdomain files
|
||||||
|
#
|
||||||
|
nb = 0
|
||||||
|
for n in range(nc):
|
||||||
|
fname = 'p%06d_%05d.h5' % (nr, n)
|
||||||
|
lname = op.join(dname, fname)
|
||||||
|
dblocks = amun_attribute(lname, 'dblocks')
|
||||||
|
if dblocks > 0:
|
||||||
|
levels = amun_coordinate(lname, 'levels')
|
||||||
|
coords = amun_coordinate(lname, 'coords')
|
||||||
|
dx = amun_coordinate(lname, 'dx')
|
||||||
|
dy = amun_coordinate(lname, 'dy')
|
||||||
|
dz = amun_coordinate(lname, 'dz')
|
||||||
|
with h5.File(lname, 'r') as f:
|
||||||
|
g = f['variables']
|
||||||
|
if vname == 'level':
|
||||||
|
dataset = np.zeros(g[variables[0]].shape)
|
||||||
|
for l in range(dblocks):
|
||||||
|
dataset[:,:,:,l] = levels[l]
|
||||||
|
elif vname == 'velo':
|
||||||
|
dataset = np.sqrt(g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'magn':
|
||||||
|
dataset = np.sqrt(g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'eint':
|
||||||
|
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:]
|
||||||
|
elif vname == 'ekin':
|
||||||
|
dataset = 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'emag':
|
||||||
|
dataset = 0.5 * (g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'etot':
|
||||||
|
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:] \
|
||||||
|
+ 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2)
|
||||||
|
if eqsys == 'mhd':
|
||||||
|
dataset += 0.5 * (g['magx'][:,:,:,:]**2 \
|
||||||
|
+ g['magy'][:,:,:,:]**2 \
|
||||||
|
+ g['magz'][:,:,:,:]**2)
|
||||||
|
elif vname == 'temp':
|
||||||
|
dataset = g['pres'][:,:,:,:] / g['dens'][:,:,:,:]
|
||||||
|
elif vname == 'lore':
|
||||||
|
dataset = 1.0 / np.sqrt(1.0 - (g['velx'][:,:,:,:]**2 \
|
||||||
|
+ g['vely'][:,:,:,:]**2 \
|
||||||
|
+ g['velz'][:,:,:,:]**2))
|
||||||
|
elif vname == 'divv':
|
||||||
|
dataset = np.zeros(g['velx'].shape)
|
||||||
|
fields = [ 'velx', 'vely', 'velz' ]
|
||||||
|
h = (dx, dy, dz)
|
||||||
|
for i in range(ndims):
|
||||||
|
v = fields[i]
|
||||||
|
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ h[i][levels[:] - 1]
|
||||||
|
elif vname == 'divb':
|
||||||
|
dataset = np.zeros(g['magx'].shape)
|
||||||
|
fields = [ 'magx', 'magy', 'magz' ]
|
||||||
|
h = (dx, dy, dz)
|
||||||
|
for i in range(ndims):
|
||||||
|
v = fields[i]
|
||||||
|
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ h[i][levels[:] - 1]
|
||||||
|
elif vname == 'vort':
|
||||||
|
if ndims == 3:
|
||||||
|
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1]
|
||||||
|
wy = 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
else:
|
||||||
|
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
wy = - 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
||||||
|
elif vname == 'curr':
|
||||||
|
if ndims == 3:
|
||||||
|
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1]
|
||||||
|
wy = 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=0) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=0)) \
|
||||||
|
/ dz[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
else:
|
||||||
|
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
wy = - 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1]
|
||||||
|
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
||||||
|
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
||||||
|
/ dx[levels[:]-1] \
|
||||||
|
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
||||||
|
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
||||||
|
/ dy[levels[:]-1]
|
||||||
|
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
||||||
|
else:
|
||||||
|
dataset = g[vname][:,:,:,:]
|
||||||
|
|
||||||
|
# rescale all blocks to the effective resolution
|
||||||
|
#
|
||||||
|
for l in range(dblocks):
|
||||||
|
nn = 2**(ml - levels[l])
|
||||||
|
if nn <= shrink:
|
||||||
|
method = 'rebin'
|
||||||
|
else:
|
||||||
|
method = interpolation
|
||||||
|
cm = np.array(bm[0:ndims] * nn / shrink, dtype=np.int32)
|
||||||
|
ibeg = coords[0:ndims,l] * cm[0:ndims]
|
||||||
|
iend = ibeg + cm[0:ndims]
|
||||||
|
if ndims == 3:
|
||||||
|
ib, jb, kb = ibeg[0], ibeg[1], ibeg[2]
|
||||||
|
ie, je, ke = iend[0], iend[1], iend[2]
|
||||||
|
ret[kb:ke,jb:je,ib:ie] = interpolate(dataset[:,:,:,l], cm, ng, method=method)
|
||||||
|
else:
|
||||||
|
ib, jb = ibeg[0], ibeg[1]
|
||||||
|
ie, je = iend[0], iend[1]
|
||||||
|
ret[jb:je,ib:ie] = interpolate(dataset[0,:,:,l], cm, ng, method=method)
|
||||||
|
|
||||||
|
nb += 1
|
||||||
|
|
||||||
|
# print progress bar if desired
|
||||||
|
#
|
||||||
|
if progress:
|
||||||
|
sys.stdout.write('\r')
|
||||||
|
sys.stdout.write("Reading '%s' from '%s': block %d from %d" \
|
||||||
|
% (vname, fname, nb, nl))
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
if (progress):
|
||||||
|
sys.stdout.write('\n')
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
return ret
|
@ -36,15 +36,9 @@
|
|||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
"""
|
"""
|
||||||
import h5py as h5
|
from .interpolation import interpolate
|
||||||
import numpy as np
|
|
||||||
import os.path as op
|
import os.path as op
|
||||||
import sys
|
import sys
|
||||||
try:
|
|
||||||
from scipy.interpolate import splrep, splev, interp1d, pchip_interpolate
|
|
||||||
scipy_available = True
|
|
||||||
except ImportError:
|
|
||||||
scipy_available = False
|
|
||||||
try:
|
try:
|
||||||
from xxhash import *
|
from xxhash import *
|
||||||
xxhash_available = True
|
xxhash_available = True
|
||||||
@ -831,667 +825,3 @@ class AmunXML:
|
|||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
return arr
|
return arr
|
||||||
|
|
||||||
|
|
||||||
def amun_compatible(fname):
|
|
||||||
'''
|
|
||||||
Subroutine checks if the HDF5 file is AMUN compatible.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
|
||||||
|
|
||||||
Return values:
|
|
||||||
|
|
||||||
True or False;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
comp = amun_compatible('p000010_00000.h5')
|
|
||||||
|
|
||||||
'''
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
if 'codes' in f.attrs:
|
|
||||||
if f.attrs['code'].astype(str) == "AMUN":
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print("'%s' contains attribute 'code'," % fname, \
|
|
||||||
" but it is not 'AMUN'!")
|
|
||||||
return False
|
|
||||||
elif 'attributes' in f and 'coordinates' in f and \
|
|
||||||
'variables' in f:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print("'%s' misses one of these groups:" % fname, \
|
|
||||||
"'attributes', 'coordinates' or 'variables'!")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def amun_attribute(fname, aname):
|
|
||||||
'''
|
|
||||||
Subroutine to read global attributes from AMUN HDF5 snapshots.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
|
||||||
aname - the attribute name;
|
|
||||||
|
|
||||||
Return values:
|
|
||||||
|
|
||||||
ret - the value of the attribute or None;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
time = amun_attribute('p000010_00000.h5', 'time')
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not amun_compatible(fname):
|
|
||||||
return None
|
|
||||||
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
if aname in f['attributes'].attrs:
|
|
||||||
attr = f['attributes'].attrs[aname]
|
|
||||||
if attr.dtype.type is np.string_:
|
|
||||||
ret = np.squeeze(attr).astype(str)
|
|
||||||
else:
|
|
||||||
ret = np.squeeze(attr)
|
|
||||||
return ret
|
|
||||||
else:
|
|
||||||
print("Attribute '%s' cannot be found in '%s'!" % (aname, fname))
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def amun_coordinate(fname, iname):
|
|
||||||
'''
|
|
||||||
Subroutine to read coordinate items from AMUN HDF5 snapshots.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
|
||||||
iname - the item name;
|
|
||||||
|
|
||||||
Return values:
|
|
||||||
|
|
||||||
ret - the value of the item or None;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
bounds = amun_coordinate('p000010_00000.h5', 'bounds')
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not amun_compatible(fname):
|
|
||||||
return None
|
|
||||||
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
if iname in f['coordinates']:
|
|
||||||
return np.array(f['coordinates'][iname])
|
|
||||||
else:
|
|
||||||
print("Coordinate item '%s' not found in group 'coordinate' of '%s'!" % (iname, fname))
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def amun_dataset(fname, vname, shrink=1, interpolation='rebin', progress=False):
|
|
||||||
'''
|
|
||||||
Subroutine to read datasets from AMUN HDF5 snapshots.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
fname - the HDF5 file name;
|
|
||||||
vname - the variable name;
|
|
||||||
shrink - the shrink factor (must be the power of 2 and not larger
|
|
||||||
than the block size);
|
|
||||||
progress - the progress bar switch;
|
|
||||||
|
|
||||||
Return values:
|
|
||||||
|
|
||||||
ret - the array of values for the variable;
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
dn = amun_dataset('p000010_00000.h5', 'dens')
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not amun_compatible(fname):
|
|
||||||
return None
|
|
||||||
|
|
||||||
dname = op.dirname(fname)
|
|
||||||
|
|
||||||
if progress:
|
|
||||||
sys.stdout.write("Data file path:\n '%s'\n" % (dname))
|
|
||||||
|
|
||||||
# get attributes necessary to reconstruct the domain
|
|
||||||
#
|
|
||||||
eqsys = amun_attribute(fname, 'eqsys')
|
|
||||||
eos = amun_attribute(fname, 'eos')
|
|
||||||
nr = amun_attribute(fname, 'isnap')
|
|
||||||
nc = amun_attribute(fname, 'nprocs')
|
|
||||||
nl = amun_attribute(fname, 'nleafs')
|
|
||||||
if eos == 'adi':
|
|
||||||
gm = amun_attribute(fname, 'adiabatic_index')
|
|
||||||
|
|
||||||
# get block dimensions and the maximum level
|
|
||||||
#
|
|
||||||
ndims = amun_attribute(fname, 'ndims')
|
|
||||||
nn = amun_attribute(fname, 'ncells')
|
|
||||||
bm = np.array([nn, nn, nn])
|
|
||||||
if ndims == 2:
|
|
||||||
bm[2] = 1
|
|
||||||
ng = amun_attribute(fname, 'nghosts')
|
|
||||||
ml = amun_attribute(fname, 'maxlev')
|
|
||||||
|
|
||||||
# get the base block dimensions
|
|
||||||
#
|
|
||||||
rm = amun_attribute(fname, 'bdims')
|
|
||||||
if rm is None:
|
|
||||||
rm = amun_attribute(fname, 'domain_base_dims')
|
|
||||||
if rm is None:
|
|
||||||
rm = amun_attribute(fname, 'rdims')
|
|
||||||
if rm is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# build the list of supported variables
|
|
||||||
#
|
|
||||||
variables = []
|
|
||||||
with h5.File(fname, 'r') as f:
|
|
||||||
for var in f['variables'].keys():
|
|
||||||
variables.append(var)
|
|
||||||
|
|
||||||
# add derived variables if possible
|
|
||||||
#
|
|
||||||
variables.append('level')
|
|
||||||
if 'velx' in variables and 'vely' in variables and 'velz' in variables:
|
|
||||||
variables.append('velo')
|
|
||||||
variables.append('divv')
|
|
||||||
variables.append('vort')
|
|
||||||
if 'magx' in variables and 'magy' in variables and 'magz' in variables:
|
|
||||||
variables.append('magn')
|
|
||||||
variables.append('divb')
|
|
||||||
variables.append('curr')
|
|
||||||
if (eqsys == 'hd' or eqsys == 'mhd') and eos == 'adi' \
|
|
||||||
and 'pres' in variables:
|
|
||||||
variables.append('eint')
|
|
||||||
if 'dens' in variables and 'pres' in variables:
|
|
||||||
variables.append('temp')
|
|
||||||
if (eqsys == 'hd' or eqsys == 'mhd') \
|
|
||||||
and 'dens' in variables \
|
|
||||||
and 'velx' in variables \
|
|
||||||
and 'vely' in variables \
|
|
||||||
and 'velz' in variables:
|
|
||||||
variables.append('ekin')
|
|
||||||
if (eqsys == 'mhd' or eqsys == 'srmhd') \
|
|
||||||
and 'magx' in variables \
|
|
||||||
and 'magy' in variables \
|
|
||||||
and 'magz' in variables:
|
|
||||||
variables.append('emag')
|
|
||||||
if eqsys == 'hd' and 'ekin' in variables and 'eint' in variables:
|
|
||||||
variables.append('etot')
|
|
||||||
if eqsys == 'mhd' and 'eint' in variables \
|
|
||||||
and 'ekin' in variables \
|
|
||||||
and 'emag' in variables:
|
|
||||||
variables.append('etot')
|
|
||||||
if (eqsys == 'srhd' or eqsys == 'srmhd') and 'velo' in variables:
|
|
||||||
variables.append('lore')
|
|
||||||
|
|
||||||
# check if the requested variable is in the variable list
|
|
||||||
#
|
|
||||||
if not vname in variables:
|
|
||||||
print('The requested variable cannot be extracted from the file datasets!')
|
|
||||||
return None
|
|
||||||
|
|
||||||
# check if the shrink parameter is correct (block dimensions should be
|
|
||||||
# divisible by the shrink factor)
|
|
||||||
#
|
|
||||||
shrink = max(1, int(shrink))
|
|
||||||
if shrink > 1:
|
|
||||||
if (nn % shrink) != 0:
|
|
||||||
print('The block dimension should be divisible by the shrink factor!')
|
|
||||||
return None
|
|
||||||
sh = shrink
|
|
||||||
while(sh > 2 and sh % 2 == 0):
|
|
||||||
sh = int(sh / 2)
|
|
||||||
if (sh % 2) != 0:
|
|
||||||
print('The shrink factor should be a power of 2!')
|
|
||||||
return None
|
|
||||||
|
|
||||||
# determine the actual maximum level from the blocks
|
|
||||||
#
|
|
||||||
levs = []
|
|
||||||
for n in range(nc):
|
|
||||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
|
||||||
lname = op.join(dname, fname)
|
|
||||||
dblocks = amun_attribute(lname, 'dblocks')
|
|
||||||
if dblocks > 0:
|
|
||||||
levs = np.append(levs, [amun_coordinate(lname, 'levels')])
|
|
||||||
ml = int(levs.max())
|
|
||||||
|
|
||||||
# prepare dimensions of the output array and allocate it
|
|
||||||
#
|
|
||||||
dm = np.array(rm[0:ndims] * bm[0:ndims] * 2**(ml - 1) / shrink, \
|
|
||||||
dtype=np.int32)
|
|
||||||
ret = np.zeros(dm[::-1])
|
|
||||||
|
|
||||||
# iterate over all subdomain files
|
|
||||||
#
|
|
||||||
nb = 0
|
|
||||||
for n in range(nc):
|
|
||||||
fname = 'p%06d_%05d.h5' % (nr, n)
|
|
||||||
lname = op.join(dname, fname)
|
|
||||||
dblocks = amun_attribute(lname, 'dblocks')
|
|
||||||
if dblocks > 0:
|
|
||||||
levels = amun_coordinate(lname, 'levels')
|
|
||||||
coords = amun_coordinate(lname, 'coords')
|
|
||||||
dx = amun_coordinate(lname, 'dx')
|
|
||||||
dy = amun_coordinate(lname, 'dy')
|
|
||||||
dz = amun_coordinate(lname, 'dz')
|
|
||||||
with h5.File(lname, 'r') as f:
|
|
||||||
g = f['variables']
|
|
||||||
if vname == 'level':
|
|
||||||
dataset = np.zeros(g[variables[0]].shape)
|
|
||||||
for l in range(dblocks):
|
|
||||||
dataset[:,:,:,l] = levels[l]
|
|
||||||
elif vname == 'velo':
|
|
||||||
dataset = np.sqrt(g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'magn':
|
|
||||||
dataset = np.sqrt(g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'eint':
|
|
||||||
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:]
|
|
||||||
elif vname == 'ekin':
|
|
||||||
dataset = 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'emag':
|
|
||||||
dataset = 0.5 * (g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'etot':
|
|
||||||
dataset = 1.0 / (gm - 1.0) * g['pres'][:,:,:,:] \
|
|
||||||
+ 0.5 * g['dens'][:,:,:,:] * (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2)
|
|
||||||
if eqsys == 'mhd':
|
|
||||||
dataset += 0.5 * (g['magx'][:,:,:,:]**2 \
|
|
||||||
+ g['magy'][:,:,:,:]**2 \
|
|
||||||
+ g['magz'][:,:,:,:]**2)
|
|
||||||
elif vname == 'temp':
|
|
||||||
dataset = g['pres'][:,:,:,:] / g['dens'][:,:,:,:]
|
|
||||||
elif vname == 'lore':
|
|
||||||
dataset = 1.0 / np.sqrt(1.0 - (g['velx'][:,:,:,:]**2 \
|
|
||||||
+ g['vely'][:,:,:,:]**2 \
|
|
||||||
+ g['velz'][:,:,:,:]**2))
|
|
||||||
elif vname == 'divv':
|
|
||||||
dataset = np.zeros(g['velx'].shape)
|
|
||||||
fields = [ 'velx', 'vely', 'velz' ]
|
|
||||||
h = (dx, dy, dz)
|
|
||||||
for i in range(ndims):
|
|
||||||
v = fields[i]
|
|
||||||
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ h[i][levels[:] - 1]
|
|
||||||
elif vname == 'divb':
|
|
||||||
dataset = np.zeros(g['magx'].shape)
|
|
||||||
fields = [ 'magx', 'magy', 'magz' ]
|
|
||||||
h = (dx, dy, dz)
|
|
||||||
for i in range(ndims):
|
|
||||||
v = fields[i]
|
|
||||||
dataset += 0.5 * (np.roll(g[v][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g[v][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ h[i][levels[:] - 1]
|
|
||||||
elif vname == 'vort':
|
|
||||||
if ndims == 3:
|
|
||||||
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1]
|
|
||||||
wy = 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
else:
|
|
||||||
wx = 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
wy = - 0.5 * (np.roll(g['velz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['velz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['vely'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['vely'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['velx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['velx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
|
||||||
elif vname == 'curr':
|
|
||||||
if ndims == 3:
|
|
||||||
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1]
|
|
||||||
wy = 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=0) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=0)) \
|
|
||||||
/ dz[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
else:
|
|
||||||
wx = 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
wy = - 0.5 * (np.roll(g['magz'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magz'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1]
|
|
||||||
wz = 0.5 * (np.roll(g['magy'][:,:,:,:], -1, axis=2) \
|
|
||||||
- np.roll(g['magy'][:,:,:,:], 1, axis=2)) \
|
|
||||||
/ dx[levels[:]-1] \
|
|
||||||
- 0.5 * (np.roll(g['magx'][:,:,:,:], -1, axis=1) \
|
|
||||||
- np.roll(g['magx'][:,:,:,:], 1, axis=1)) \
|
|
||||||
/ dy[levels[:]-1]
|
|
||||||
dataset = np.sqrt(wx * wx + wy * wy + wz * wz)
|
|
||||||
else:
|
|
||||||
dataset = g[vname][:,:,:,:]
|
|
||||||
|
|
||||||
# rescale all blocks to the effective resolution
|
|
||||||
#
|
|
||||||
for l in range(dblocks):
|
|
||||||
nn = 2**(ml - levels[l])
|
|
||||||
if nn <= shrink:
|
|
||||||
method = 'rebin'
|
|
||||||
else:
|
|
||||||
method = interpolation
|
|
||||||
cm = np.array(bm[0:ndims] * nn / shrink, dtype=np.int32)
|
|
||||||
ibeg = coords[0:ndims,l] * cm[0:ndims]
|
|
||||||
iend = ibeg + cm[0:ndims]
|
|
||||||
if ndims == 3:
|
|
||||||
ib, jb, kb = ibeg[0], ibeg[1], ibeg[2]
|
|
||||||
ie, je, ke = iend[0], iend[1], iend[2]
|
|
||||||
ret[kb:ke,jb:je,ib:ie] = interpolate(dataset[:,:,:,l], cm, ng, method=method)
|
|
||||||
else:
|
|
||||||
ib, jb = ibeg[0], ibeg[1]
|
|
||||||
ie, je = iend[0], iend[1]
|
|
||||||
ret[jb:je,ib:ie] = interpolate(dataset[0,:,:,l], cm, ng, method=method)
|
|
||||||
|
|
||||||
nb += 1
|
|
||||||
|
|
||||||
# print progress bar if desired
|
|
||||||
#
|
|
||||||
if progress:
|
|
||||||
sys.stdout.write('\r')
|
|
||||||
sys.stdout.write("Reading '%s' from '%s': block %d from %d" \
|
|
||||||
% (vname, fname, nb, nl))
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
if (progress):
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def amun_integrals(field, filename, pathlist):
|
|
||||||
'''
|
|
||||||
get_integral: iterate over pathlist and read and merge field values from filename files in the provided paths
|
|
||||||
'''
|
|
||||||
# Initiate the return values with empty array and file number.
|
|
||||||
#
|
|
||||||
vals = np.array([])
|
|
||||||
num = 1
|
|
||||||
|
|
||||||
# Iterate over all paths provided in the list 'pathlist'.
|
|
||||||
#
|
|
||||||
for path in pathlist:
|
|
||||||
|
|
||||||
# Iterate over all files in the current path.
|
|
||||||
#
|
|
||||||
while True:
|
|
||||||
|
|
||||||
# Generate file name.
|
|
||||||
#
|
|
||||||
dfile = path + '/' + filename + '_' + str(num).zfill(2) + '.dat'
|
|
||||||
|
|
||||||
# Check if the file exists.
|
|
||||||
#
|
|
||||||
if op.isfile(dfile):
|
|
||||||
|
|
||||||
# Read values from the current integrals file.
|
|
||||||
#
|
|
||||||
lvals = read_integrals(dfile, field)
|
|
||||||
|
|
||||||
# Append to the return array.
|
|
||||||
#
|
|
||||||
vals = np.append(vals, lvals)
|
|
||||||
|
|
||||||
# Increase the number file.
|
|
||||||
#
|
|
||||||
num = num + 1
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
# File does not exists, so go to the next path.
|
|
||||||
#
|
|
||||||
break
|
|
||||||
|
|
||||||
# Return appended values.
|
|
||||||
#
|
|
||||||
return vals
|
|
||||||
|
|
||||||
|
|
||||||
def read_integrals(filename, column):
|
|
||||||
'''
|
|
||||||
read_integrals: reads a given column from an integral file.
|
|
||||||
'''
|
|
||||||
# Open the given file and check if it is text file.
|
|
||||||
#
|
|
||||||
f = open(filename, 'r')
|
|
||||||
|
|
||||||
# Read fist line and store it in h, since it will be used to obtain the
|
|
||||||
# column headers.
|
|
||||||
#
|
|
||||||
l = f.readline()
|
|
||||||
h = l
|
|
||||||
|
|
||||||
# Read first line which is not comment in order to determine the number of
|
|
||||||
# columns and store the number of columns in nc. Calculate the column width
|
|
||||||
# and store it in wc.
|
|
||||||
#
|
|
||||||
while l.startswith('#'):
|
|
||||||
l = f.readline()
|
|
||||||
nc = len(l.rsplit())
|
|
||||||
wc = int((len(l) - 9) / (nc - 1))
|
|
||||||
|
|
||||||
# Split header line into a list.
|
|
||||||
#
|
|
||||||
lh = [h[1:9].strip()]
|
|
||||||
for i in range(nc - 1):
|
|
||||||
ib = i * wc + 10
|
|
||||||
ie = ib + wc - 1
|
|
||||||
lh.append(h[ib:ie].strip())
|
|
||||||
|
|
||||||
|
|
||||||
ic = lh.index(column)
|
|
||||||
|
|
||||||
# Read given column.
|
|
||||||
#
|
|
||||||
if (ic > -1):
|
|
||||||
lc = [float(l.split()[ic])]
|
|
||||||
for l in f:
|
|
||||||
lc.append(float(l.split()[ic]))
|
|
||||||
|
|
||||||
# Close the file.
|
|
||||||
#
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
# Return values.
|
|
||||||
#
|
|
||||||
return(np.array(lc))
|
|
||||||
|
|
||||||
|
|
||||||
def rebin(a, newshape):
|
|
||||||
'''
|
|
||||||
Subroutine changes the size of the input array to to new shape,
|
|
||||||
by copying cells or averaging them.
|
|
||||||
'''
|
|
||||||
assert len(a.shape) == len(newshape)
|
|
||||||
|
|
||||||
m = a.ndim - 1
|
|
||||||
if (a.shape[m] > newshape[m]):
|
|
||||||
if a.ndim == 3:
|
|
||||||
nn = [newshape[0], int(a.shape[0] / newshape[0]),
|
|
||||||
newshape[1], int(a.shape[1] / newshape[1]),
|
|
||||||
newshape[2], int(a.shape[2] / newshape[2])]
|
|
||||||
return a.reshape(nn).mean(5).mean(3).mean(1)
|
|
||||||
else:
|
|
||||||
nn = [newshape[0], int(a.shape[0] / newshape[0]),
|
|
||||||
newshape[1], int(a.shape[1] / newshape[1])]
|
|
||||||
return a.reshape(nn).mean(3).mean(1)
|
|
||||||
else:
|
|
||||||
for n in range(a.ndim):
|
|
||||||
a = np.repeat(a, int(newshape[n] / a.shape[n]), axis=n)
|
|
||||||
return(a)
|
|
||||||
|
|
||||||
|
|
||||||
def interpolate(a, newshape, nghosts, method = 'rebin'):
|
|
||||||
'''
|
|
||||||
Subroutine rescales the block by interpolating its values.
|
|
||||||
'''
|
|
||||||
if (method == 'rebin' or not scipy_available):
|
|
||||||
|
|
||||||
# calculate the indices in order to remove the ghost zones
|
|
||||||
#
|
|
||||||
if a.ndim == 3:
|
|
||||||
ib = nghosts
|
|
||||||
jb = nghosts
|
|
||||||
kb = nghosts
|
|
||||||
ie = a.shape[2] - nghosts
|
|
||||||
je = a.shape[1] - nghosts
|
|
||||||
ke = a.shape[0] - nghosts
|
|
||||||
if (a.shape[0] == 1):
|
|
||||||
kb = 0
|
|
||||||
ke = 1
|
|
||||||
|
|
||||||
return rebin(a[kb:ke,jb:je,ib:ie], newshape)
|
|
||||||
else:
|
|
||||||
ib = nghosts
|
|
||||||
jb = nghosts
|
|
||||||
ie = a.shape[1] - nghosts
|
|
||||||
je = a.shape[0] - nghosts
|
|
||||||
|
|
||||||
return rebin(a[jb:je,ib:ie], newshape)
|
|
||||||
|
|
||||||
elif (method == 'monotonic' or method == 'pchip'):
|
|
||||||
|
|
||||||
dims = np.arange(a.ndim)
|
|
||||||
|
|
||||||
q = a
|
|
||||||
|
|
||||||
for n in dims:
|
|
||||||
|
|
||||||
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
|
||||||
q = q - d2 / 24.0
|
|
||||||
|
|
||||||
d = np.array(q.shape)
|
|
||||||
|
|
||||||
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
|
||||||
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
|
||||||
|
|
||||||
u = q.reshape([d[0], int(q.size / d[0])])
|
|
||||||
f = np.zeros([newshape[n], int(q.size / d[0])])
|
|
||||||
for i in range(int(q.size / d[0])):
|
|
||||||
f[:,i] = pchip_interpolate(xo, u[:,i], xn)
|
|
||||||
|
|
||||||
d[0] = newshape[n]
|
|
||||||
f = f.reshape(d)
|
|
||||||
|
|
||||||
q = f.transpose(np.roll(dims, -1))
|
|
||||||
|
|
||||||
return q
|
|
||||||
|
|
||||||
elif (method == 'spline'):
|
|
||||||
|
|
||||||
dims = np.arange(a.ndim)
|
|
||||||
|
|
||||||
q = a
|
|
||||||
|
|
||||||
for n in dims:
|
|
||||||
|
|
||||||
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
|
||||||
q = q - d2 / 24.0
|
|
||||||
|
|
||||||
d = np.array(q.shape)
|
|
||||||
|
|
||||||
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
|
||||||
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
|
||||||
|
|
||||||
u = q.reshape([d[0], int(q.size / d[0])])
|
|
||||||
f = np.zeros([newshape[n], int(q.size / d[0])])
|
|
||||||
for i in range(int(q.size / d[0])):
|
|
||||||
t = splrep(xo, u[:,i], k=5, s=0.0)
|
|
||||||
f[:,i] = splev(xn, t)
|
|
||||||
|
|
||||||
d[0] = newshape[n]
|
|
||||||
f = f.reshape(d)
|
|
||||||
|
|
||||||
q = f.transpose(np.roll(dims, -1))
|
|
||||||
|
|
||||||
return q
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
dims = np.arange(a.ndim)
|
|
||||||
|
|
||||||
q = a
|
|
||||||
|
|
||||||
for n in dims:
|
|
||||||
|
|
||||||
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
|
||||||
q = q - d2 / 24.0
|
|
||||||
|
|
||||||
d = np.array(q.shape)
|
|
||||||
|
|
||||||
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
|
||||||
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
|
||||||
|
|
||||||
u = q.reshape([d[0], int(q.size / d[0])])
|
|
||||||
f = np.zeros([newshape[n], int(q.size / d[0])])
|
|
||||||
for i in range(int(q.size / d[0])):
|
|
||||||
t = interp1d(xo, u[:,i], kind=method)
|
|
||||||
f[:,i] = t(xn)
|
|
||||||
|
|
||||||
d[0] = newshape[n]
|
|
||||||
f = f.reshape(d)
|
|
||||||
|
|
||||||
q = f.transpose(np.roll(dims, -1))
|
|
||||||
|
|
||||||
return q
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
fname = './p000030_00000.h5'
|
|
||||||
|
|
||||||
ret = amun_attribute(fname, 'time')
|
|
||||||
print(ret)
|
|
||||||
ret = amun_attribute(fname, 'dims')
|
|
||||||
print(ret)
|
|
||||||
|
|
||||||
ret = amun_dataset(fname, 'dens')
|
|
||||||
print(ret.shape, ret.min(), ret.max())
|
|
130
python/amunpy/src/amunpy/integrals.py
Normal file
130
python/amunpy/src/amunpy/integrals.py
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
"""
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
This file is part of the AMUN source code, a program to perform
|
||||||
|
Newtonian or relativistic magnetohydrodynamical simulations on uniform or
|
||||||
|
adaptive mesh.
|
||||||
|
|
||||||
|
Copyright (C) 2018-2021 Grzegorz Kowal <grzegorz@amuncode.org>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
module: AMUN
|
||||||
|
|
||||||
|
Python module with subroutines to read AMUN code HDF5 files.
|
||||||
|
|
||||||
|
The only requirements for this package are:
|
||||||
|
|
||||||
|
- numpy
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
"""
|
||||||
|
def amun_integrals(field, filename, pathlist):
|
||||||
|
'''
|
||||||
|
get_integral: iterate over pathlist and read and merge field values from filename files in the provided paths
|
||||||
|
'''
|
||||||
|
# Initiate the return values with empty array and file number.
|
||||||
|
#
|
||||||
|
vals = np.array([])
|
||||||
|
num = 1
|
||||||
|
|
||||||
|
# Iterate over all paths provided in the list 'pathlist'.
|
||||||
|
#
|
||||||
|
for path in pathlist:
|
||||||
|
|
||||||
|
# Iterate over all files in the current path.
|
||||||
|
#
|
||||||
|
while True:
|
||||||
|
|
||||||
|
# Generate file name.
|
||||||
|
#
|
||||||
|
dfile = path + '/' + filename + '_' + str(num).zfill(2) + '.dat'
|
||||||
|
|
||||||
|
# Check if the file exists.
|
||||||
|
#
|
||||||
|
if op.isfile(dfile):
|
||||||
|
|
||||||
|
# Read values from the current integrals file.
|
||||||
|
#
|
||||||
|
lvals = read_integrals(dfile, field)
|
||||||
|
|
||||||
|
# Append to the return array.
|
||||||
|
#
|
||||||
|
vals = np.append(vals, lvals)
|
||||||
|
|
||||||
|
# Increase the number file.
|
||||||
|
#
|
||||||
|
num = num + 1
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
# File does not exists, so go to the next path.
|
||||||
|
#
|
||||||
|
break
|
||||||
|
|
||||||
|
# Return appended values.
|
||||||
|
#
|
||||||
|
return vals
|
||||||
|
|
||||||
|
|
||||||
|
def read_integrals(filename, column):
|
||||||
|
'''
|
||||||
|
read_integrals: reads a given column from an integral file.
|
||||||
|
'''
|
||||||
|
# Open the given file and check if it is text file.
|
||||||
|
#
|
||||||
|
f = open(filename, 'r')
|
||||||
|
|
||||||
|
# Read fist line and store it in h, since it will be used to obtain the
|
||||||
|
# column headers.
|
||||||
|
#
|
||||||
|
l = f.readline()
|
||||||
|
h = l
|
||||||
|
|
||||||
|
# Read first line which is not comment in order to determine the number of
|
||||||
|
# columns and store the number of columns in nc. Calculate the column width
|
||||||
|
# and store it in wc.
|
||||||
|
#
|
||||||
|
while l.startswith('#'):
|
||||||
|
l = f.readline()
|
||||||
|
nc = len(l.rsplit())
|
||||||
|
wc = int((len(l) - 9) / (nc - 1))
|
||||||
|
|
||||||
|
# Split header line into a list.
|
||||||
|
#
|
||||||
|
lh = [h[1:9].strip()]
|
||||||
|
for i in range(nc - 1):
|
||||||
|
ib = i * wc + 10
|
||||||
|
ie = ib + wc - 1
|
||||||
|
lh.append(h[ib:ie].strip())
|
||||||
|
|
||||||
|
|
||||||
|
ic = lh.index(column)
|
||||||
|
|
||||||
|
# Read given column.
|
||||||
|
#
|
||||||
|
if (ic > -1):
|
||||||
|
lc = [float(l.split()[ic])]
|
||||||
|
for l in f:
|
||||||
|
lc.append(float(l.split()[ic]))
|
||||||
|
|
||||||
|
# Close the file.
|
||||||
|
#
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
# Return values.
|
||||||
|
#
|
||||||
|
return(np.array(lc))
|
180
python/amunpy/src/amunpy/interpolation.py
Normal file
180
python/amunpy/src/amunpy/interpolation.py
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
"""
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
This file is part of the AMUN source code, a program to perform
|
||||||
|
Newtonian or relativistic magnetohydrodynamical simulations on uniform or
|
||||||
|
adaptive mesh.
|
||||||
|
|
||||||
|
Copyright (C) 2018-2021 Grzegorz Kowal <grzegorz@amuncode.org>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
module: AMUN
|
||||||
|
|
||||||
|
Python module with subroutines to read AMUN code HDF5 files.
|
||||||
|
|
||||||
|
The only requirements for this package are:
|
||||||
|
|
||||||
|
- numpy
|
||||||
|
- scipy (optional, for data interpolation)
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
"""
|
||||||
|
import numpy as np
|
||||||
|
try:
|
||||||
|
from scipy.interpolate import splrep, splev, interp1d, pchip_interpolate
|
||||||
|
scipy_available = True
|
||||||
|
except ImportError:
|
||||||
|
scipy_available = False
|
||||||
|
|
||||||
|
|
||||||
|
def rebin(a, newshape):
|
||||||
|
'''
|
||||||
|
Subroutine changes the size of the input array to to new shape,
|
||||||
|
by copying cells or averaging them.
|
||||||
|
'''
|
||||||
|
assert len(a.shape) == len(newshape)
|
||||||
|
|
||||||
|
m = a.ndim - 1
|
||||||
|
if (a.shape[m] > newshape[m]):
|
||||||
|
if a.ndim == 3:
|
||||||
|
nn = [newshape[0], int(a.shape[0] / newshape[0]),
|
||||||
|
newshape[1], int(a.shape[1] / newshape[1]),
|
||||||
|
newshape[2], int(a.shape[2] / newshape[2])]
|
||||||
|
return a.reshape(nn).mean(5).mean(3).mean(1)
|
||||||
|
else:
|
||||||
|
nn = [newshape[0], int(a.shape[0] / newshape[0]),
|
||||||
|
newshape[1], int(a.shape[1] / newshape[1])]
|
||||||
|
return a.reshape(nn).mean(3).mean(1)
|
||||||
|
else:
|
||||||
|
for n in range(a.ndim):
|
||||||
|
a = np.repeat(a, int(newshape[n] / a.shape[n]), axis=n)
|
||||||
|
return(a)
|
||||||
|
|
||||||
|
|
||||||
|
def interpolate(a, newshape, nghosts, method = 'rebin'):
|
||||||
|
'''
|
||||||
|
Subroutine rescales the block by interpolating its values.
|
||||||
|
'''
|
||||||
|
if (method == 'rebin' or not scipy_available):
|
||||||
|
|
||||||
|
# calculate the indices in order to remove the ghost zones
|
||||||
|
#
|
||||||
|
if a.ndim == 3:
|
||||||
|
ib = nghosts
|
||||||
|
jb = nghosts
|
||||||
|
kb = nghosts
|
||||||
|
ie = a.shape[2] - nghosts
|
||||||
|
je = a.shape[1] - nghosts
|
||||||
|
ke = a.shape[0] - nghosts
|
||||||
|
if (a.shape[0] == 1):
|
||||||
|
kb = 0
|
||||||
|
ke = 1
|
||||||
|
|
||||||
|
return rebin(a[kb:ke,jb:je,ib:ie], newshape)
|
||||||
|
else:
|
||||||
|
ib = nghosts
|
||||||
|
jb = nghosts
|
||||||
|
ie = a.shape[1] - nghosts
|
||||||
|
je = a.shape[0] - nghosts
|
||||||
|
|
||||||
|
return rebin(a[jb:je,ib:ie], newshape)
|
||||||
|
|
||||||
|
elif (method == 'monotonic' or method == 'pchip'):
|
||||||
|
|
||||||
|
dims = np.arange(a.ndim)
|
||||||
|
|
||||||
|
q = a
|
||||||
|
|
||||||
|
for n in dims:
|
||||||
|
|
||||||
|
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
||||||
|
q = q - d2 / 24.0
|
||||||
|
|
||||||
|
d = np.array(q.shape)
|
||||||
|
|
||||||
|
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
||||||
|
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
||||||
|
|
||||||
|
u = q.reshape([d[0], int(q.size / d[0])])
|
||||||
|
f = np.zeros([newshape[n], int(q.size / d[0])])
|
||||||
|
for i in range(int(q.size / d[0])):
|
||||||
|
f[:,i] = pchip_interpolate(xo, u[:,i], xn)
|
||||||
|
|
||||||
|
d[0] = newshape[n]
|
||||||
|
f = f.reshape(d)
|
||||||
|
|
||||||
|
q = f.transpose(np.roll(dims, -1))
|
||||||
|
|
||||||
|
return q
|
||||||
|
|
||||||
|
elif (method == 'spline'):
|
||||||
|
|
||||||
|
dims = np.arange(a.ndim)
|
||||||
|
|
||||||
|
q = a
|
||||||
|
|
||||||
|
for n in dims:
|
||||||
|
|
||||||
|
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
||||||
|
q = q - d2 / 24.0
|
||||||
|
|
||||||
|
d = np.array(q.shape)
|
||||||
|
|
||||||
|
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
||||||
|
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
||||||
|
|
||||||
|
u = q.reshape([d[0], int(q.size / d[0])])
|
||||||
|
f = np.zeros([newshape[n], int(q.size / d[0])])
|
||||||
|
for i in range(int(q.size / d[0])):
|
||||||
|
t = splrep(xo, u[:,i], k=5, s=0.0)
|
||||||
|
f[:,i] = splev(xn, t)
|
||||||
|
|
||||||
|
d[0] = newshape[n]
|
||||||
|
f = f.reshape(d)
|
||||||
|
|
||||||
|
q = f.transpose(np.roll(dims, -1))
|
||||||
|
|
||||||
|
return q
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
dims = np.arange(a.ndim)
|
||||||
|
|
||||||
|
q = a
|
||||||
|
|
||||||
|
for n in dims:
|
||||||
|
|
||||||
|
d2 = np.roll(q,-1, axis=0) + np.roll(q, 1, axis=0) - 2.0 * q
|
||||||
|
q = q - d2 / 24.0
|
||||||
|
|
||||||
|
d = np.array(q.shape)
|
||||||
|
|
||||||
|
xo = (np.arange(0.5, a.shape[n]) - nghosts) / (a.shape[n] - 2 * nghosts)
|
||||||
|
xn = np.arange(0.5, newshape[n]) / newshape[n]
|
||||||
|
|
||||||
|
u = q.reshape([d[0], int(q.size / d[0])])
|
||||||
|
f = np.zeros([newshape[n], int(q.size / d[0])])
|
||||||
|
for i in range(int(q.size / d[0])):
|
||||||
|
t = interp1d(xo, u[:,i], kind=method)
|
||||||
|
f[:,i] = t(xn)
|
||||||
|
|
||||||
|
d[0] = newshape[n]
|
||||||
|
f = f.reshape(d)
|
||||||
|
|
||||||
|
q = f.transpose(np.roll(dims, -1))
|
||||||
|
|
||||||
|
return q
|
Loading…
x
Reference in New Issue
Block a user