diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..284d0ef --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,23 @@ +image: debian + +stages: + - build + +build: + stage: build + before_script: + - apt-get -q update + - apt-get -q -y install gawk make gfortran libhdf5-dev libopenmpi-dev + + script: + - cd ./build/ + - cp -al make.default make.config + - cp -al ./hosts/default ./hosts/$HOSTNAME + - export HDF5DIR=/usr/lib/x86_64-linux-gnu/hdf5/serial + - make MPI=N NDIMS=2 + - make clean + - make MPI=N NDIMS=3 + - make clean + - make MPI=Y NDIMS=2 + - make clean + - make MPI=Y NDIMS=3 diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..2575a22 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,13 @@ +# 2019-10-04 No version yet. ## +---- + +- support for rectangular adaptive domain in 2D and 3D; +- support for hydrodynamical (HYDRO) and magnetohydrodynamical (MHD) equations, both in classical and relativistic formulations; +- support for adiabatic (ADI) and isothermal (ISO) equation of state; +- support for viscosity and resistivity source terms; +- support for passive scalars; +- time integration using Euler and 2nd order Runge-Kutta methods or up to 4th order Stron Stability Preserving Runge-Kutta; +- a number of spatial interpolation using 2nd order TVD methods, up to 9th order Monotonicity-Preserving; +- HLL-family of approximate Riemann solvers (HLL, HLLC, and HLLD); +- GLM scheme for the induction equation; +- MPI parallelization; \ No newline at end of file diff --git a/README.md b/README.md index 9567e46..50c98ab 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,11 @@ --------------------------------------------------------------------------------- + # **The AMUN Code** -## Copyright (C) 2008-2019 Grzegorz Kowal ## --------------------------------------------------------------------------------- +## Copyright (C) 2008-2019 Grzegorz Kowal AMUN is a parallel code to perform numerical simulations in fluid approximation on uniform or non-uniform (adaptive) meshes. The goal in developing this code is to create a solid framework for simulations with support for number of numerical -methods which can be selected in an easy way through the parameter file. The +methods which can be selected in an easy way through a parameter file. The following features are already implemented: * hydrodynamic and magnetohydrodynamic set of equations (HD and MHD), @@ -18,11 +17,13 @@ following features are already implemented: * 2nd order TVD interpolation with number of limiters and higher order reconstructions, * Riemann solvers of Roe- and HLL-types (HLL, HLLC, and HLLD), -* periodic and open boundary conditions, +* standard boundary conditions: periodic, open, reflective, hydrostatic, etc. * viscous and resistive source terms, +* suppor for passive scalars (up to 100), * data stored in the HDF5 format, * MPI parallelization, -* completely written in Fortran 2003. +* completely written in Fortran 2003, +* Python interface to read data. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software @@ -46,12 +47,16 @@ Developers Requirements ============ -* Fortran 2003 compiler (tested compilers include - [GNU Fortran](http://gcc.gnu.org/fortran/) version 4.5 or newer, - [Intel Fortran](https://software.intel.com/en-us/fortran-compilers) compiler - version 9.0 or newer) -* [HDF5 libraries](http://www.hdfgroup.org/HDF5/) version 1.8 or newer. -* [OpenMPI](https://www.open-mpi.org/) version 1.8 or newer for parallel runs. +* Fortran 2003 compiler, tested compilers include: + - [GNU Fortran](https://gcc.gnu.org/fortran/) version 4.5 or newer, + - [PGI Community Edition](https://www.pgroup.com/products/community.htm), + version 18.10 or newer, + - [Intel Fortran](https://software.intel.com/en-us/fortran-compilers) + compiler version 9.0 or newer. +* [HDF5 libraries](https://www.hdfgroup.org/solutions/hdf5/), tested with + version 1.8 or newer. +* [OpenMPI](https://www.open-mpi.org/) for parallel runs, tested with version + 1.8 or newer. Environment Variables @@ -65,11 +70,15 @@ the HDF5 libraries have been installed. Compilation =========== -1. Clone the AMUN source code: `git clone https://bitbucket.org/amunteam/amun-code.git`, - or unpack the archive downloaded from page +1. Clone the AMUN source code: + - from Bitbucket: + `git clone https://grzegorz_kowal@bitbucket.org/amunteam/amun-code.git`, + - from GitLab: + `git clone https://gitlab.com/gkowal/amun-code.git` + - or unpack the archive downloaded from page [Downloads](https://bitbucket.org/amunteam/amun-code/downloads/). 2. Go to directory **build/hosts/** and copy file **default** to a new file named - exactly as your host name (name returned by command `hostname`). + exactly as your host name, i.e. `cp default $HOSTNAME`. 3. Customize your compiler and compilation options in your new host file. 4. Go up to directory **build/** and copy file **make.default** to **make.config**. 5. Customize compilation time options in **make.config**. @@ -80,16 +89,18 @@ Compilation Usage ===== -In order to run some test problems you can simply copy corresponding parameter -from directory **problems/** to the location when you wish to run your test. -Copy the executable file **amun.x** compiled earlier to the same directory. If -you provide option _-i _, the code will know that the parameters -have to be read from file __. If you don't provide this option, -the code will assume that the parameters are stored in file **params.in** in the -same director. +In order to run some test problems you can simply copy the problem parameter +file from directory **problems/** to the location where you wish to run your +test. Copy the executable file **amun.x** from the **build/** directory compiled +earlier. If you provide option _-i _, the code will know that +parameters have to be read from file __. If you don't provide +this option, the code assumes that the parameters are stored in file +**params.in** in the same director. -In order to run serial version, type in your terminal: `amun.x -i params.in`. +In order to run serial version, just type in your terminal: + `./amun.x -i ./params.in`. -In order to run the parallel version (after compiling the code with MPI -version), type in your terminal: `mpirun -n N ./amun.x -i params.in`, where N is -the number of processors. +In order to run parallel version (after compiling the code with MPI support), +type in your terminal: + `mpirun -n N ./amun.x -i ./params.in`, +where N is the number of processors to use. \ No newline at end of file diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml new file mode 100644 index 0000000..003e634 --- /dev/null +++ b/bitbucket-pipelines.yml @@ -0,0 +1,25 @@ +# This is a sample build configuration for Other. +# Check our guides at https://confluence.atlassian.com/x/5Q4SMw for more examples. +# Only use spaces to indent your .yml configuration. +# ----- +# You can specify a custom docker image from Docker Hub as your build environment. +image: atlassian/default-image:2 + +pipelines: + default: + - step: + name: Build + script: + - apt-get -q update + - apt-get -q -y install gawk make gfortran libhdf5-dev libopenmpi-dev + - cd ./build + - cp -al make.default make.config + - cp -al ./hosts/default ./hosts/$HOSTNAME + - export HDF5DIR=/usr/lib/x86_64-linux-gnu/hdf5/serial + - make MPI=N NDIMS=2 + - make clean + - make MPI=N NDIMS=3 + - make clean + - make MPI=Y NDIMS=2 + - make clean + - make MPI=Y NDIMS=3 \ No newline at end of file diff --git a/python/amun.py b/python/amun.py index f3591f1..7e8460c 100644 --- a/python/amun.py +++ b/python/amun.py @@ -37,6 +37,7 @@ import numpy as np import os.path as op import sys + def amun_compatible(fname): ''' Subroutine checks if the HDF5 file is AMUN compatible. @@ -47,39 +48,28 @@ def amun_compatible(fname): Return values: - ret - True or False; + True or False; Examples: comp = amun_compatible('p000010_00000.h5') ''' - try: - f = h5.File(fname, 'r') - - # check if the file is written in the AMUN format or at least contains - # necessary groups - # - ret = True - if 'code' in f.attrs: - if f.attrs['code'].astype(str) != "AMUN": - print("'%s' contains attribute 'code'", \ - " but it is not set to 'AMUN'!" % fname) - ret = False - elif not 'attributes' in f or \ - not 'coordinates' in f or \ - not 'variables' in f: - print("'%s' misses one of these groups: ", \ - "'attributes', 'coordinates' or 'variables'!" % fname) - ret = False - - f.close() - - except: - print("It seems '%s' is not an HDF5 file!" % fname) - ret = False - - return ret + with h5.File(fname, 'r') as f: + if 'codes' in f.attrs: + if f.attrs['code'].astype(str) == "AMUN": + return True + else: + print("'%s' contains attribute 'code'," % fname, \ + " but it is not 'AMUN'!") + return False + elif 'attributes' in f and 'coordinates' in f and \ + 'variables' in f: + return True + else: + print("'%s' misses one of these groups:" % fname, \ + "'attributes', 'coordinates' or 'variables'!") + return False def amun_attribute(fname, aname): @@ -93,7 +83,7 @@ def amun_attribute(fname, aname): Return values: - ret - the value of the attribute; + ret - the value of the attribute or None; Examples: @@ -101,29 +91,19 @@ def amun_attribute(fname, aname): ''' if not amun_compatible(fname): - return False + return None - try: - f = h5.File(fname, 'r') - g = f['attributes'] - - if aname in g.attrs: - attr = g.attrs[aname] + with h5.File(fname, 'r') as f: + if aname in f['attributes'].attrs: + attr = f['attributes'].attrs[aname] if attr.dtype.type is np.string_: ret = np.squeeze(attr).astype(str) else: ret = np.squeeze(attr) + return ret else: - print("Attribute '%s' cannot be retrieved from '%s'!" % (aname, fname)) - ret = False - - f.close() - - except: - print("Attribute '%s' cannot be retrieved from '%s'!" % (aname, fname)) - ret = False - - return ret + print("Attribute '%s' cannot be found in '%s'!" % (aname, fname)) + return None def amun_coordinate(fname, iname): @@ -137,7 +117,7 @@ def amun_coordinate(fname, iname): Return values: - ret - the values of the item; + ret - the value of the item or None; Examples: @@ -145,29 +125,14 @@ def amun_coordinate(fname, iname): ''' if not amun_compatible(fname): - return False + return None - try: - f = h5.File(fname, 'r') - g = f['coordinates'] - - if iname in g: - item = g[iname] - if item.dtype.type is np.string_: - ret = np.squeeze(item).astype(str) - else: - ret = np.squeeze(item) + with h5.File(fname, 'r') as f: + if iname in f['coordinates']: + return np.array(f['coordinates'][iname]) else: - print("Coordinate item '%s' cannot be retrieved from '%s'!" % (iname, fname)) - ret = False - - f.close() - - except: - print("Coordinate item '%s' cannot be retrieved from '%s'!" % (iname, fname)) - ret = False - - return ret + print("Coordinate item '%s' not found in group 'coordinate' of '%s'!" % (iname, fname)) + return None def amun_dataset(fname, vname, shrink = 1, progress = False): @@ -192,138 +157,138 @@ def amun_dataset(fname, vname, shrink = 1, progress = False): ''' if not amun_compatible(fname): - return False + return None - try: - dname = op.dirname(fname) + dname = op.dirname(fname) - if progress: - sys.stdout.write("Data file path:\n '%s'\n" % (dname)) + if progress: + sys.stdout.write("Data file path:\n '%s'\n" % (dname)) - # get attributes necessary to reconstruct the domain - # - eqsys = amun_attribute(fname, 'eqsys') - eos = amun_attribute(fname, 'eos') - nr = amun_attribute(fname, 'isnap') - nc = amun_attribute(fname, 'nprocs') - nl = amun_attribute(fname, 'nleafs') - if eos == 'adi': - gm = amun_attribute(fname, 'gamma') + # get attributes necessary to reconstruct the domain + # + eqsys = amun_attribute(fname, 'eqsys') + eos = amun_attribute(fname, 'eos') + nr = amun_attribute(fname, 'isnap') + nc = amun_attribute(fname, 'nprocs') + nl = amun_attribute(fname, 'nleafs') + if eos == 'adi': + gm = amun_attribute(fname, 'gamma') - # prepare array to hold data - # - ndims = amun_attribute(fname, 'ndims') - nn = amun_attribute(fname, 'ncells') - bm = np.array([nn, nn, nn]) - if ndims == 2: - bm[2] = 1 - ng = amun_attribute(fname, 'nghosts') - ml = amun_attribute(fname, 'maxlev') - f = h5.File(fname, 'r') - if 'rdims' in f['attributes'].attrs: - rm = amun_attribute(fname, 'rdims') - elif 'bdims' in f['attributes'].attrs: - rm = amun_attribute(fname, 'bdims') - else: - rm = amun_attribute(fname, 'domain_base_dims') - f.close() + # get block dimensions and the maximum level + # + ndims = amun_attribute(fname, 'ndims') + nn = amun_attribute(fname, 'ncells') + bm = np.array([nn, nn, nn]) + if ndims == 2: + bm[2] = 1 + ng = amun_attribute(fname, 'nghosts') + ml = amun_attribute(fname, 'maxlev') - # build the list of supported variables - # - variables = [] - f = h5.File(fname, 'r') + # get the base block dimensions + # + rm = amun_attribute(fname, 'bdims') + if rm is None: + rm = amun_attribute(fname, 'domain_base_dims') + if rm is None: + rm = amun_attribute(fname, 'rdims') + if rm is None: + return None + + # build the list of supported variables + # + variables = [] + with h5.File(fname, 'r') as f: for var in f['variables'].keys(): variables.append(var) - f.close() - # add derived variables if possible - # - variables.append('level') - if 'velx' in variables and 'vely' in variables and 'velz' in variables: - variables.append('velo') - variables.append('divv') - variables.append('vort') - if 'magx' in variables and 'magy' in variables and 'magz' in variables: - variables.append('magn') - variables.append('divb') - variables.append('curr') - if (eqsys == 'hd' or eqsys == 'mhd') and eos == 'adi' \ - and 'pres' in variables: - variables.append('eint') - if 'dens' in variables and 'pres' in variables: - variables.append('temp') - if (eqsys == 'hd' or eqsys == 'mhd') \ - and 'dens' in variables \ - and 'velx' in variables \ - and 'vely' in variables \ - and 'velz' in variables: - variables.append('ekin') - if (eqsys == 'mhd' or eqsys == 'srmhd') \ - and 'magx' in variables \ - and 'magy' in variables \ - and 'magz' in variables: - variables.append('emag') - if eqsys == 'hd' and 'ekin' in variables and 'eint' in variables: - variables.append('etot') - if eqsys == 'mhd' and 'eint' in variables \ - and 'ekin' in variables \ - and 'emag' in variables: - variables.append('etot') - if (eqsys == 'srhd' or eqsys == 'srmhd') and 'velo' in variables: - variables.append('lore') + # add derived variables if possible + # + variables.append('level') + if 'velx' in variables and 'vely' in variables and 'velz' in variables: + variables.append('velo') + variables.append('divv') + variables.append('vort') + if 'magx' in variables and 'magy' in variables and 'magz' in variables: + variables.append('magn') + variables.append('divb') + variables.append('curr') + if (eqsys == 'hd' or eqsys == 'mhd') and eos == 'adi' \ + and 'pres' in variables: + variables.append('eint') + if 'dens' in variables and 'pres' in variables: + variables.append('temp') + if (eqsys == 'hd' or eqsys == 'mhd') \ + and 'dens' in variables \ + and 'velx' in variables \ + and 'vely' in variables \ + and 'velz' in variables: + variables.append('ekin') + if (eqsys == 'mhd' or eqsys == 'srmhd') \ + and 'magx' in variables \ + and 'magy' in variables \ + and 'magz' in variables: + variables.append('emag') + if eqsys == 'hd' and 'ekin' in variables and 'eint' in variables: + variables.append('etot') + if eqsys == 'mhd' and 'eint' in variables \ + and 'ekin' in variables \ + and 'emag' in variables: + variables.append('etot') + if (eqsys == 'srhd' or eqsys == 'srmhd') and 'velo' in variables: + variables.append('lore') - # check if the requested variable is in the variable list - # - if not vname in variables: - print('The requested variable cannot be extracted from the file datasets!') - return False + # check if the requested variable is in the variable list + # + if not vname in variables: + print('The requested variable cannot be extracted from the file datasets!') + return None - # check if the shrink parameter is correct (block dimensions should be - # divisible by the shrink factor) - # - shrink = max(1, int(shrink)) - if shrink > 1: - if (nn % shrink) != 0: - print('The block dimension should be divisible by the shrink factor!') - return False - sh = shrink - while(sh > 2 and sh % 2 == 0): - sh = int(sh / 2) - if (sh % 2) != 0: - print('The shrink factor should be a power of 2!') - return False + # check if the shrink parameter is correct (block dimensions should be + # divisible by the shrink factor) + # + shrink = max(1, int(shrink)) + if shrink > 1: + if (nn % shrink) != 0: + print('The block dimension should be divisible by the shrink factor!') + return None + sh = shrink + while(sh > 2 and sh % 2 == 0): + sh = int(sh / 2) + if (sh % 2) != 0: + print('The shrink factor should be a power of 2!') + return None - # determine the actual maximum level from the blocks - # - ml = 0 - for n in range(nc): - fname = 'p%06d_%05d.h5' % (nr, n) - lname = op.join(dname, fname) - dblocks = amun_attribute(lname, 'dblocks') - if dblocks > 0: - levels = amun_coordinate(lname, 'levels') - ml = max(ml, levels.max()) + # determine the actual maximum level from the blocks + # + levs = [] + for n in range(nc): + fname = 'p%06d_%05d.h5' % (nr, n) + lname = op.join(dname, fname) + dblocks = amun_attribute(lname, 'dblocks') + if dblocks > 0: + levs = np.append(levs, [amun_coordinate(lname, 'levels')]) + ml = int(levs.max()) - # prepare dimensions of the output array and allocate it - # - dm = np.array(rm[0:ndims] * bm[0:ndims] * 2**(ml - 1) / shrink, \ + # prepare dimensions of the output array and allocate it + # + dm = np.array(rm[0:ndims] * bm[0:ndims] * 2**(ml - 1) / shrink, \ dtype = np.int32) - ret = np.zeros(dm[::-1]) + ret = np.zeros(dm[::-1]) - # iterate over all subdomain files - # - nb = 0 - for n in range(nc): - fname = 'p%06d_%05d.h5' % (nr, n) - lname = op.join(dname, fname) - dblocks = amun_attribute(lname, 'dblocks') - if dblocks > 0: - levels = amun_coordinate(lname, 'levels') - coords = amun_coordinate(lname, 'coords') - dx = amun_coordinate(lname, 'dx') - dy = amun_coordinate(lname, 'dy') - dz = amun_coordinate(lname, 'dz') - f = h5.File(lname, 'r') + # iterate over all subdomain files + # + nb = 0 + for n in range(nc): + fname = 'p%06d_%05d.h5' % (nr, n) + lname = op.join(dname, fname) + dblocks = amun_attribute(lname, 'dblocks') + if dblocks > 0: + levels = amun_coordinate(lname, 'levels') + coords = amun_coordinate(lname, 'coords') + dx = amun_coordinate(lname, 'dx') + dy = amun_coordinate(lname, 'dy') + dz = amun_coordinate(lname, 'dz') + with h5.File(lname, 'r') as f: g = f['variables'] if vname == 'level': dataset = np.zeros(g[variables[0]].shape) @@ -451,8 +416,6 @@ def amun_dataset(fname, vname, shrink = 1, progress = False): else: dataset = g[vname][:,:,:,:] - f.close() - # rescale all blocks to the effective resolution # for l in range(dblocks): @@ -479,13 +442,9 @@ def amun_dataset(fname, vname, shrink = 1, progress = False): % (vname, fname, nb, nl)) sys.stdout.flush() - if (progress): - sys.stdout.write('\n') - sys.stdout.flush() - - except: - print("Dataset '%s' cannot be retrieved from '%s'!" % (vname, fname)) - ret = False + if (progress): + sys.stdout.write('\n') + sys.stdout.flush() return ret diff --git a/python/setup.py b/python/setup.py new file mode 100644 index 0000000..42d2aa4 --- /dev/null +++ b/python/setup.py @@ -0,0 +1,13 @@ +from setuptools import setup + +setup( + name='amun', + description='Python Interface fo AMUN snapshots', + version='0.1', + author='Grzegorz Kowal', + author_email='grzegorz@amuncode.org', + url='https://www.amuncode.org/', + license='GPLv3', + py_modules=['amun'], + install_requires=['h5py', 'numpy'] + )