@misc{jaura2017sprai, abstract = {In this paper we introduce a new radiative transfer code SPRAI (Simplex Photon Radiation in the Arepo Implementation) based on the SimpleX radiation transfer method. This method, originally used only for post-processing, is now directly integrated into the Arepo code and takes advantage of its adaptive unstructured mesh. Radiated photons are transferred from the sources through the series of Voronoi gas cells within a specific solid angle. From the photon attenuation we derive corresponding photon fluxes and ionization rates and feed them to a primordial chemistry module. This gives us a self-consistent method for studying dynamical and chemical processes caused by ionizing sources in primordial gas. Since the computational cost of the SimpleX method does not scale directly with the number of sources, it is convenient for studying systems such as primordial star-forming halos that may form multiple ionizing sources.}, added-at = {2017-11-08T10:58:04.000+0100}, author = {Jaura, Ondrej and Glover, Simon C. O. and Klessen, Ralf S. and Paardekooper, Jan-Pieter}, biburl = {https://www.bibsonomy.org/bibtex/276e20bc4a73ac9af41394fb66c5b5d4f/miki}, description = {[1711.02542] SPRAI: Coupling of radiative feedback and primordial chemistry in moving mesh hydrodynamics}, interhash = {3fafdff8de492775144a347ad7c4f042}, intrahash = {76e20bc4a73ac9af41394fb66c5b5d4f}, keywords = {arepo code radiative transfer}, note = {cite arxiv:1711.02542}, timestamp = {2017-11-08T10:58:04.000+0100}, title = {SPRAI: Coupling of radiative feedback and primordial chemistry in moving mesh hydrodynamics}, url = {http://arxiv.org/abs/1711.02542}, year = 2017 } @misc{natale2017raytracing, abstract = {We present an extensively updated version of the purely ray-tracing 3D dust radiation transfer code DART-Ray. The new version includes five major upgrades : 1) a series of optimizations for the ray-angular density and the scattered radiation source function; 2) the implementation of several data and task parallelizations using hybrid MPI+OpenMP schemes; 3) the inclusion of dust self-heating; 4) the ability to produce surface brightness maps for observers within the models in HEALPix format; 5) the possibility to set the expected numerical accuracy already at the start of the calculation. We tested the updated code with benchmark models where the dust self-heating is not negligible. Furthermore, we performed a study of the extent of the source influence volumes, using galaxy models, which are critical in determining the efficiency of the DART-Ray algorithm. The new code is publicly available, documented for both users and developers, and accompanied by several programmes to create input grids for different model geometries and to import the results of N-body and SPH simulations. These programmes can be easily adapted to different input geometries, and for different dust models or stellar emission libraries.}, added-at = {2017-09-13T12:05:49.000+0200}, author = {Natale, Giovanni and Popescu, Cristina C. and Tuffs, Richard. J. and Clarke, Adam J. and Debattista, Victor P. and Fischera, Jörg and Pasetto, Stefano and Rushton, Mark and Thirlwall, Jordan J.}, biburl = {https://www.bibsonomy.org/bibtex/2a79b7d58f939e98cfd1cf871488aa93d/miki}, description = {[1709.03802] Ray-tracing 3D dust radiative transfer with DART-Ray: code upgrade and public release}, interhash = {510890fac27a3b9a8e70479d6e8c6f06}, intrahash = {a79b7d58f939e98cfd1cf871488aa93d}, keywords = {RT code dust}, note = {cite arxiv:1709.03802Comment: A&A accepted. DART-Ray can be downloaded at https://github.com/gnatale/DART-Ray/; Code documentation: http://www.star.uclan.ac.uk/~gn/dartray_doc/; N-body/SPH galaxy simulation examples: http://www.star.uclan.ac.uk/mgsv/}, timestamp = {2017-09-13T12:05:49.000+0200}, title = {Ray-tracing 3D dust radiative transfer with DART-Ray: code upgrade and public release}, url = {http://arxiv.org/abs/1709.03802}, year = 2017 } @misc{ferland2017release, abstract = {We describe the 2017 release of the spectral synthesis code Cloudy. A major development since the previous release has been exporting the atomic data into external data files. This greatly simplifies updates and maintenance of the data. Many large datasets have been incorporated with the result that we can now predict well over an order of magnitude more emission lines when all databases are fully used. The use of such large datasets is not realistic for most calculations due to the time and memory needs, and we describe the limited subset of data we use by default. Despite the fact that we now predict significantly more lines than the previous Cloudy release, this version is faster because of optimization of memory access patterns and other tuning. The size and use of the databases can easily be adjusted in the command-line interface. We give examples of the accuracy limits using small models, and the performance requirements of large complete models. We summarize several advances in the H- and He-like iso-electronic sequences. We use our complete collisional-radiative models of the ionization of these one and two-electron ions to establish the highest density for which the coronal or interstellar medium (ISM) approximation works, and the lowest density where Saha or local thermodynamic equilibrium can be assumed. The coronal approximation fails at surprisingly low densities for collisional ionization equilibrium but is valid to higher densities for photoionized gas clouds. Many other improvements to the physics have been made and are described. These include the treatment of isotropic continuum sources such as the cosmic microwave background (CMB) in the reported output, and the ability to follow the evolution of cooling non-equilibrium clouds.}, added-at = {2017-06-01T10:19:05.000+0200}, author = {Ferland, G. J. and Chatzikos, M. and Guzmán, F. and Lykins, M. L. and van Hoof, P. A. M. and Williams, R. J. R. and Abel, N. P. and Badnell, N. R. and Keenan, F. P. and Porter, R. L. and Stancil, P. C.}, biburl = {https://www.bibsonomy.org/bibtex/208c0af5f51261c032ce0c458745edeb1/miki}, description = {[1705.10877] The 2017 Release of Cloudy}, interhash = {92f77b7551ac2824b5b97baff0fcc336}, intrahash = {08c0af5f51261c032ce0c458745edeb1}, keywords = {cloudy code}, note = {cite arxiv:1705.10877Comment: Submitted to Revista Mexicana. Comments most welcome}, timestamp = {2017-06-01T10:19:05.000+0200}, title = {The 2017 Release of Cloudy}, url = {http://arxiv.org/abs/1705.10877}, year = 2017 } @misc{price2017phantom, abstract = {We present Phantom, a fast, parallel, modular and low-memory smoothed particle hydrodynamics and magnetohydrodynamics code developed over the last decade for astrophysical applications in three dimensions. The code has been developed with a focus on stellar, galactic, planetary and high energy astrophysics and has already been used widely for studies of accretion discs and turbulence, from the birth of planets to how black holes accrete. Here we describe and test the core algorithms as well as modules for magnetohydrodynamics, self-gravity, sink particles, H_2 chemistry, dust-gas mixtures, physical viscosity, external forces including numerous galactic potentials as well as implementations of Lense-Thirring precession, Poynting-Robertson drag and stochastic turbulent driving. Phantom is hereby made publicly available.}, added-at = {2017-02-15T14:09:57.000+0100}, author = {Price, Daniel J. and Wurster, James and Nixon, Chris and Tricco, Terrence S. and Toupin, Stéven and Pettitt, Alex and Chan, Conrad and Laibe, Guillaume and Glover, Simon and Dobbs, Clare and Nealon, Rebecca and Liptai, David and Worpel, Hauke and Bonnerot, Clément and Dipierro, Giovanni and Ragusa, Enrico and Federrath, Christoph and Iaconi, Roberto and Reichardt, Thomas and Forgan, Duncan and Hutchison, Mark and Constantino, Thomas and Ayliffe, Ben and Mentiplay, Daniel and Hirsh, Kieran and Lodato, Giuseppe}, biburl = {https://www.bibsonomy.org/bibtex/29e86e1d92f9b39fa472e97d0e53e74ea/miki}, description = {[1702.03930] Phantom: A smoothed particle hydrodynamics and magnetohydrodynamics code for astrophysics}, interhash = {64154e9770b10a8fdb08cb7e3eb4ebd7}, intrahash = {9e86e1d92f9b39fa472e97d0e53e74ea}, keywords = {code phantom sph}, note = {cite arxiv:1702.03930Comment: 77 pages, 51 figures, 335 equations, submitted to PASA. Code available from https://phantomsph.bitbucket.io/}, timestamp = {2017-02-15T14:09:57.000+0100}, title = {Phantom: A smoothed particle hydrodynamics and magnetohydrodynamics code for astrophysics}, url = {http://arxiv.org/abs/1702.03930}, year = 2017 } @misc{shiokawa2017patchwork, abstract = {We present a "multipatch" infrastructure for numerical simulation of fluid problems in which sub-regions require different gridscales, different grid geometries, different physical equations, or different reference frames. Its key element is a sophisticated client-router-server framework for efficiently linking processors supporting different regions ("patches") that must exchange boundary data. This infrastructure may be used with a wide variety of fluid dynamics codes; the only requirement is that their primary dependent variables be the same in all patches, e.g., fluid mass density, internal energy density, and velocity. Its structure can accommodate either Newtonian or relativistic dynamics. The overhead imposed by this system is both problem- and computer cluster architecture-dependent. Compared to a conventional simulation using the same number of cells and processors, the increase in runtime can be anywhere from negligible to a factor of a few; however, one of the infrastructure's advantages is that it can lead to a very large reduction in the total number of zone-updates.}, added-at = {2017-01-23T10:10:02.000+0100}, author = {Shiokawa, Hotaka and Cheng, Roseanne M. and Noble, Scott C. and Krolik, Julian H.}, biburl = {https://www.bibsonomy.org/bibtex/23992ff93646c7e0d53b70ab2c9182c67/miki}, description = {[1701.05610] PATCHWORK: A Multipatch Infrastructure for Multiphysics/Multiscale/Multiframe Fluid Simulations}, interhash = {5ee82828f1123758388f41cecf6ea64e}, intrahash = {3992ff93646c7e0d53b70ab2c9182c67}, keywords = {code patches simulation}, note = {cite arxiv:1701.05610Comment: 17 pages, 9 figures, submitted to ApJ}, timestamp = {2017-01-23T10:10:02.000+0100}, title = {PATCHWORK: A Multipatch Infrastructure for Multiphysics/Multiscale/Multiframe Fluid Simulations}, url = {http://arxiv.org/abs/1701.05610}, year = 2017 } @misc{few2016testing, abstract = {We examine how three fundamentally different numerical hydrodynamics codes follow the evolution of an isothermal galactic disc with an external spiral potential. We compare an adaptive mesh refinement code (RAMSES), a smoothed particle hydrodynamics code (sphNG), and a volume-discretised meshless code (GIZMO). Using standard refinement criteria, we find that RAMSES produces a disc that is less vertically concentrated and does not reach such high densities as the sphNG or GIZMO runs. The gas surface density in the spiral arms increases at a lower rate for the RAMSES simulations compared to the other codes. There is also a greater degree of substructure in the sphNG and GIZMO runs and secondary spiral arms are more pronounced. By resolving the Jeans' length with a greater number of grid cells we achieve more similar results to the Lagrangian codes used in this study. Other alterations to the refinement scheme (adding extra levels of refinement and refining based on local density gradients) are less successful in reducing the disparity between RAMSES and sphNG/GIZMO. Although more similar, sphNG displays different density distributions and vertical mass profiles to all modes of GIZMO (including the smoothed particle hydrodynamics version). This suggests differences also arise which are not intrinsic to the particular method but rather due to its implementation. The discrepancies between codes (in particular, the densities reached in the spiral arms) could potentially result in differences in the locations and timescales for gravitational collapse, and therefore impact star formation activity in more complex galaxy disc simulations.}, added-at = {2016-06-01T09:45:28.000+0200}, author = {Few, C. G. and Dobbs, C. and Pettitt, A. and Konstandin, L.}, biburl = {https://www.bibsonomy.org/bibtex/2375d425f9fef1f76cf093293b9b26162/miki}, description = {[1605.09792] Testing Hydrodynamics Schemes in Galaxy Disc Simulations}, doi = {10.1093/mnras/stw1226}, interhash = {20196bd75ccadb85cb1c383aa6ba86c5}, intrahash = {375d425f9fef1f76cf093293b9b26162}, keywords = {code comparison simulation}, note = {cite arxiv:1605.09792Comment: Accepted for publication in MNRAS: http://mnras.oxfordjournals.org/content/early/2016/05/26/mnras.stw1226.full.pdf+html. Monthly Notices of the Royal Astronomical Society 2016}, timestamp = {2016-06-01T09:45:28.000+0200}, title = {Testing Hydrodynamics Schemes in Galaxy Disc Simulations}, url = {http://arxiv.org/abs/1605.09792}, year = 2016 } @misc{bisbas2015torus3dpdr, abstract = {The interaction of ionizing and far-ultraviolet radiation with the interstellar medium is of great importance. It results in the formation of regions in which the gas is ionized, beyond which are photodissociation regions (PDRs) in which the gas transitions to its atomic and molecular form. Several numerical codes have been implemented to study these two main phases of the interstellar medium either dynamically or chemically. In this paper we present TORUS-3DPDR, a new self-consistent code for treating the chemistry of three-dimensional photoionization and photodissociation regions. It is an integrated code coupling the two codes TORUS, a hydrodynamics and Monte Carlo radiation transport code, and 3D-PDR, a photodissociation regions code. The new code uses a Monte Carlo radiative transfer scheme to account for the propagation of the ionizing radiation including the diffusive component as well as a ray-tracing scheme based on the HEALPix package in order to account for the escape probability and column density calculations. Here, we present the numerical techniques we followed and we show the capabilities of the new code in modelling three-dimensional objects including single or multiple sources. We discuss the effects introduced by the diffusive component of the UV field in determining the thermal balance of PDRs as well as the effects introduced by a multiple sources treatment of the radiation field. We find that diffuse radiation can positively contribute to the formation of CO. With this new code, three-dimensional synthetic observations for the major cooling lines are possible, for making feasible a detailed comparison between hydrodynamical simulations and observations.}, added-at = {2015-09-18T10:29:16.000+0200}, author = {Bisbas, T. G. and Haworth, T. J. and Barlow, M. J. and Viti, S. and Harries, T. J. and Bell, T. and Yates, J. A.}, biburl = {https://www.bibsonomy.org/bibtex/25eb856e1e5be7c396c890d65cc362359/miki}, description = {[1509.05038] TORUS-3DPDR: A self-consistent code treating three-dimensional photoionization and photodissociation regions}, interhash = {582066bffc3185138d4aa74110941b9b}, intrahash = {5eb856e1e5be7c396c890d65cc362359}, keywords = {3dpdr code pdr radiation torus transfer}, note = {cite arxiv:1509.05038Comment: 17 pages, 10 Figures, accepted for publication in MNRAS. Comments welcome}, timestamp = {2015-09-18T10:29:16.000+0200}, title = {TORUS-3DPDR: A self-consistent code treating three-dimensional photoionization and photodissociation regions}, url = {http://arxiv.org/abs/1509.05038}, year = 2015 } @misc{aubert2015cosmological, abstract = {EMMA is a cosmological simulation code aimed at investigating the reionization epoch. It handles simultaneously collisionless and gas dynamics, as well as radiative transfer physics using a moment-based description with the M1 approximation. Field quantities are stored and computed on an adaptive 3D mesh and the spatial resolution can be dynamically modified based on physically-motivated criteria. Physical processes can be coupled at all spatial and temporal scales. We also introduce a new and optional approximation to handle radiation : the light is transported at the resolution of the non-refined grid and only once the dynamics have been fully updated, whereas thermo-chemical processes are still tracked on the refined elements. Such an approximation reduces the overheads induced by the treatment of radiation physics. A suite of standard tests are presented and passed by EMMA, providing a validation for its future use in studies of the reionization epoch. The code is parallel and is able to use graphics processing units (GPUs) to accelerate hydrodynamics and radiative transfer calculations. Depending on the optimizations and the compilers used to generate the CPU reference, global GPU acceleration factors between x3.9 and x16.9 can be obtained. Vectorization and transfer operations currently prevent better GPU performances and we expect that future optimizations and hardware evolution will lead to greater accelerations.}, added-at = {2015-09-01T17:27:38.000+0200}, author = {Aubert, Dominique and Deparis, Nicolas and Ocvirk, Pierre}, biburl = {https://www.bibsonomy.org/bibtex/2e648eba5f9b309b94d2fd6b7a88b4905/miki}, description = {[1508.07888] EMMA: an AMR cosmological simulation code with radiative transfer}, interhash = {f3ce4231732df48d0a2e579ef984d900}, intrahash = {e648eba5f9b309b94d2fd6b7a88b4905}, keywords = {code radiative reionization simulation transfer}, note = {cite arxiv:1508.07888Comment: Accepted for publication in MNRAS, 28 pages, 27 figures}, timestamp = {2015-09-01T17:27:38.000+0200}, title = {EMMA: an AMR cosmological simulation code with radiative transfer}, url = {http://arxiv.org/abs/1508.07888}, year = 2015 } @misc{gabor2015comparison, abstract = {Supermassive black hole dynamics during galaxy mergers is crucial in determining the rate of black hole mergers and cosmic black hole growth. As simulations achieve higher resolution, it becomes important to assess whether the black hole dynamics is influenced by the treatment of the interstellar medium in different simulation codes. We here compare simulations of black hole growth in galaxy mergers with two codes: the Smoothed Particle Hydrodynamics code Gasoline, and the Adaptive Mesh Refinement code Ramses. We seek to identify predictions of these models that are robust despite differences in hydrodynamic methods and implementations of sub-grid physics. We find that the general behavior is consistent between codes. Black hole accretion is minimal while the galaxies are well-separated (and even as they "fly-by" within 10 kpc at first pericenter). At late stages, when the galaxies pass within a few kpc, tidal torques drive nuclear gas inflow that triggers bursts of black hole accretion accompanied by star formation. We also note quantitative discrepancies that are model-dependent: our Ramses simulations show less star formation and black hole growth, and a smoother gas distribution with larger clumps and filaments, than our Gasoline simulations. We attribute these differences primarily to the sub-grid models for black hole fueling and feedback and gas thermodynamics. The main conclusion is that differences exist quantitatively between codes, and this should be kept in mind when making comparisons with observations. However, reassuringly, both codes capture the same dynamical behaviors in terms of triggering of black hole accretion, star formation, and black hole dynamics.}, added-at = {2015-08-11T09:46:36.000+0200}, author = {Gabor, J. M. and Capelo, Pedro R. and Volonteri, Marta and Bournaud, Frédéric and Bellovary, Jillian and Governato, Fabio and Quinn, Thomas}, biburl = {https://www.bibsonomy.org/bibtex/2a6cea101b885d9a690db48f19c0880dc/miki}, description = {[1508.02224] A comparison of black hole growth in galaxy mergers with Gasoline and Ramses}, interhash = {229b7696a66ee5ebc8ed6a239418363d}, intrahash = {a6cea101b885d9a690db48f19c0880dc}, keywords = {accretion blackhole code comparison simulation}, note = {cite arxiv:1508.02224Comment: 11 pages, 7 figures. Submitted to A&A. Comments welcome}, timestamp = {2015-08-11T09:46:36.000+0200}, title = {A comparison of black hole growth in galaxy mergers with Gasoline and Ramses}, url = {http://arxiv.org/abs/1508.02224}, year = 2015 } @misc{theuns2015swift, abstract = {Simulations of galaxy formation follow the gravitational and hydrodynamical interactions between gas, stars and dark matter through cosmic time. The huge dynamic range of such calculations severely limits strong scaling behaviour of the community codes in use, with load-imbalance, cache inefficiencies and poor vectorisation limiting performance. The new swift code exploits task-based parallelism designed for many-core compute nodes interacting via MPI using asynchronous communication to improve speed and scaling. A graph-based domain decomposition schedules interdependent tasks over available resources. Strong scaling tests on realistic particle distributions yield excellent parallel efficiency, and efficient cache usage provides a large speed-up compared to current codes even on a single core. SWIFT is designed to be easy to use by shielding the astronomer from computational details such as the construction of the tasks or MPI communication. The techniques and algorithms used in SWIFT may benefit other computational physics areas as well, for example that of compressible hydrodynamics. For details of this open-source project, see www.swiftsim.com}, added-at = {2015-08-04T10:04:05.000+0200}, author = {Theuns, Tom and Chalk, Aidan and Schaller, Matthieu and Gonnet, Pedro}, biburl = {https://www.bibsonomy.org/bibtex/26ea91df20f6ca6c68aaa642a5680f38a/miki}, description = {[1508.00115] SWIFT: task-based hydrodynamics and gravity for cosmological simulations}, interhash = {7de8b64256c490b8bdf70722e256ae42}, intrahash = {6ea91df20f6ca6c68aaa642a5680f38a}, keywords = {code simulation swift}, note = {cite arxiv:1508.00115Comment: Proceedings of the EASC 2015 conference, Edinburgh, UK, April 21-23, 2015}, timestamp = {2015-08-04T10:04:05.000+0200}, title = {SWIFT: task-based hydrodynamics and gravity for cosmological simulations}, url = {http://arxiv.org/abs/1508.00115}, year = 2015 } @misc{bisbas2015starbench, abstract = {StarBench is a project focused on benchmarking and validating different star-formation and stellar feedback codes. In this first StarBench paper we perform a comparison study of the D-type expansion of an HII region. The aim of this work is to understand the differences observed between the twelve participating numerical codes against the various analytical expressions examining the D-type phase of HII region expansion. To do this, we propose two well-defined tests which are tackled by 1D and 3D grid- and SPH- based codes. The first test examines the `early phase' D-type scenario during which the mechanical pressure driving the expansion is significantly larger than the thermal pressure of the neutral medium. The second test examines the `late phase' D-type scenario during which the system relaxes to pressure equilibrium with the external medium. Although they are mutually in excellent agreement, all twelve participating codes follow a modified expansion law that deviates significantly from the classical Spitzer solution in both scenarios. We present a semi-empirical formula combining the two different solutions appropriate to both early and late phases that agrees with high-resolution simulations to $\lesssim2\%$. This formula provides a much better benchmark solution for code validation than the Spitzer solution. The present comparison has validated the participating codes and through this project we provide a dataset for calibrating the treatment of ionizing radiation hydrodynamics codes.}, added-at = {2015-07-22T09:48:31.000+0200}, author = {Bisbas, T. G. and Haworth, T. J. and Williams, R. J. R. and Mackey, J. and Tremblin, P. and Raga, A. C. and Arthur, S. J. and Baczynski, C. and Dale, J. E. and Frostholm, T. and Geen, S. and Haugboelle, T. and Hubber, D. and Iliev, I. T. and Kuiper, R. and Rosdahl, J. and Sullivan, D. and Walch, S. and Wuensch, R.}, biburl = {https://www.bibsonomy.org/bibtex/2736be2caef66f20caf49efd94de08d88/miki}, description = {[1507.05621] StarBench: The D-type expansion of an HII region}, interhash = {93e0efffeac563ec7374ec793aea6bf1}, intrahash = {736be2caef66f20caf49efd94de08d88}, keywords = {HII code comparison expansion region}, note = {cite arxiv:1507.05621Comment: 20 pages, 12 Figures, 4 Tables. Accepted for publication in MNRAS. Comments are welcome. Participation in future StarBench tests is also welcome}, timestamp = {2015-07-22T09:48:31.000+0200}, title = {StarBench: The D-type expansion of an HII region}, url = {http://arxiv.org/abs/1507.05621}, year = 2015 } @misc{schaal2015astrophysical, abstract = {Solving the Euler equations of ideal hydrodynamics as accurately and efficiently as possible is a key requirement in many astrophysical simulations. It is therefore important to continuously advance the numerical methods implemented in current astrophysical codes, especially also in light of evolving computer technology, which favours certain computational approaches over others. Here we introduce the new adaptive mesh refinement (AMR) code TENET, which employs a high-order Discontinuous Galerkin (DG) scheme for hydrodynamics. The Euler equations in this method are solved in a weak formulation with a polynomial basis by means of explicit Runge-Kutta time integration and Gauss-Legendre quadrature. This approach offers significant advantages over commonly employed finite volume (FV) solvers. In particular, the higher order capability renders it computationally more efficient, in the sense that the same precision can be obtained at significantly less computational cost. Also, the DG scheme inherently conserves angular momentum in regions where no limiting takes place, and it typically produces much smaller numerical diffusion and advection errors than a FV approach. A further advantage lies in a more natural handling of AMR refinement boundaries, where a fall back to first order can be avoided. Finally, DG requires no deep stencils at high order, and offers an improved compute to memory access ratio compared with FV schemes, which is favorable for current and upcoming highly parallel supercomputers. We describe the formulation and implementation details of our new code, and demonstrate its performance and accuracy with a set of two- and three-dimensional test problems. The results confirm that DG schemes have a high potential for astrophysical applications.}, added-at = {2015-06-23T09:33:21.000+0200}, author = {Schaal, Kevin and Bauer, Andreas and Chandrashekar, Praveen and Pakmor, Rüdiger and Klingenberg, Christian and Springel, Volker}, biburl = {https://www.bibsonomy.org/bibtex/2c66228ffaee1685536cc321c9188d71c/miki}, description = {[1506.06140] Astrophysical hydrodynamics with a high-order discontinuous Galerkin scheme and adaptive mesh refinement}, interhash = {5f90c0d7b841a1462c3b322f55d96b9b}, intrahash = {c66228ffaee1685536cc321c9188d71c}, keywords = {amr code hydrodynamics}, note = {cite arxiv:1506.06140Comment: 23 pages, 12 figures, a movie may be accessed online: https://youtu.be/cTRQP6DSaqA, comments are welcome}, timestamp = {2015-06-23T09:33:21.000+0200}, title = {Astrophysical hydrodynamics with a high-order discontinuous Galerkin scheme and adaptive mesh refinement}, url = {http://arxiv.org/abs/1506.06140}, year = 2015 } @misc{schneider2014cholla, abstract = {We present Cholla (Computational Hydrodynamics On ParaLLel Architectures), a new three-dimensional hydrodynamics code that harnesses the power of graphics processing units (GPUs) to accelerate astrophysical simulations. Cholla models the Euler equations on a static mesh using state-of-the-art techniques, including the unsplit Corner Transport Upwind (CTU) algorithm, a variety of exact and approximate Riemann solvers, and multiple spatial reconstruction techniques including the piecewise parabolic method (PPM). Cholla performs all hydrodynamical calculations in a massively-parallel manner, using GPUs to evolve the fluid properties of thousands of cells simultaneously while leaving the power of central processing units (CPUs) available for modeling additional physics. On current hardware, Cholla can update more than ten million cells per GPU-second while using an exact Riemann solver and PPM reconstruction with the CTU algorithm. Owing to the massively-parallel architecture of GPUs and the design of the Cholla code, astrophysical simulations with physically interesting grid resolutions (> 256^3) can easily be computed on a single device. Cholla utilizes the Message Passing Interface library to extend calculations onto multiple devices, and exhibits nearly ideal scaling beyond 100,000 GPU cores. The excellent performance of Cholla is demonstrated on a suite of test problems that highlights the physical accuracy of our modeling and provides a useful comparison to other codes. We also provide a set of Appendices that uniformly documents all of the reconstruction methods and Riemann solvers implemented in Cholla, and discusses strengths and weakness of the various methods.}, added-at = {2014-10-17T09:58:00.000+0200}, author = {Schneider, Evan E. and Robertson, Brant E.}, biburl = {https://www.bibsonomy.org/bibtex/20cd17e50971fde58568994beddbfa3df/miki}, description = {[1410.4194] Cholla : A New Massively-Parallel Hydrodynamics Code For Astrophysical Simulation}, interhash = {fa8ba328f1d1b23db23d8a819cd068e0}, intrahash = {0cd17e50971fde58568994beddbfa3df}, keywords = {code gpu hydrodynamics parallel}, note = {cite arxiv:1410.4194Comment: 34 pages, 18 figures. Submitted to ApJS}, timestamp = {2014-10-17T09:58:00.000+0200}, title = {Cholla : A New Massively-Parallel Hydrodynamics Code For Astrophysical Simulation}, url = {http://arxiv.org/abs/1410.4194}, year = 2014 } @misc{toonen2013popcorn, abstract = {Binary population synthesis (BPS) modelling is a very effective tool to study the evolution and properties of close binary systems. The uncertainty in the parameters of the model and their effect on a population can be tested in a statistical way, which then leads to a deeper understanding of the underlying physical processes involved. To understand the predictive power of BPS codes, we study the similarities and differences in the predicted populations of four different BPS codes for low- and intermediate-mass binaries. We investigate whether the differences are caused by different assumptions made in the BPS codes or by numerical effects. To simplify the complex problem of comparing BPS codes, we equalise the inherent assumptions as much as possible. We find that the simulated populations are similar between the codes. Regarding the population of binaries with one WD, there is very good agreement between the physical characteristics, the evolutionary channels that lead to the birth of these systems, and their birthrates. Regarding the double WD population, there is a good agreement on which evolutionary channels exist to create double WDs and a rough agreement on the characteristics of the double WD population. Regarding which progenitor systems lead to a single and double WD system and which systems do not, the four codes agree well. Most importantly, we find that for these two populations, the differences in the predictions from the four codes are not due to numerical differences, but because of different inherent assumptions. We identify critical assumptions for BPS studies that need to be studied in more detail.}, added-at = {2013-11-27T19:21:10.000+0100}, author = {Toonen, S. and Claeys, J. S. W. and Mennekens, N. and Ruiter, A. J.}, biburl = {https://www.bibsonomy.org/bibtex/2eed1de1c6a184445668ffe71418597bc/miki}, description = {[1311.6503] PopCORN: Hunting down the differences between binary population synthesis codes}, interhash = {802d7e915e411f1038a67e05698485a7}, intrahash = {eed1de1c6a184445668ffe71418597bc}, keywords = {binary code population}, note = {cite arxiv:1311.6503Comment: 13 pages, +21 pages appendix, 35 figures, accepted for publishing in A&A}, timestamp = {2013-11-27T19:21:10.000+0100}, title = {PopCORN: Hunting down the differences between binary population synthesis codes}, url = {http://arxiv.org/abs/1311.6503}, year = 2013 } @misc{monaco2013accurate, abstract = {We present a new parallel implementation of the PINpointing Orbit Crossing-Collapsed HIerarchical Objects (PINOCCHIO) algorithm, a quick tool, based on Lagrangian Perturbation Theory, for the hierarchical build-up of Dark Matter halos in cosmological volumes. To assess its ability to predict halo correlations on large scales, we compare its results with those of an N-body simulation of a 3 Gpc/h box sampled with 2048^3 particles taken from the MICE suite, matching the same seeds for the initial conditions. Thanks to the FFTW libraries and to the relatively simple design, the code shows very good scaling properties. The CPU time required by PINOCCHIO is a tiny fraction (~1/2000) of that required by the MICE simulation. Varying some of PINOCCHIO numerical parameters allows one to produce a universal mass function that lies in the range allowed by published fits, although it underestimates the MICE mass function of FoF halos in the high mass tail. We compare the matter-halo and the halo-halo power spectra with those of the MICE simulation and find that these 2-point statistics are well recovered on large scales. In particular, when catalogs are matched in number density, agreement within ten per cent is achieved for the halo power spectrum. At scales k>0.1 h/Mpc, the inaccuracy of the Zel'dovich approximation in locating halo positions causes an underestimate of the power spectrum that can be modeled as a Gaussian factor with a damping scale of d=3 Mpc/h at z=0, decreasing at higher redshift. Finally, a remarkable match is obtained for the reduced halo bispectrum, showing a good description of nonlinear halo bias. Our results demonstrate the potential of PINOCCHIO as an accurate and flexible tool for generating large ensembles of mock galaxy surveys, with interesting applications for the analysis of large galaxy redshift surveys.}, added-at = {2013-05-08T17:01:36.000+0200}, author = {Monaco, P. and Sefusatti, E. and Borgani, S. and Crocce, M. and Fosalba, P. and Sheth, R. K. and Theuns, T.}, biburl = {https://www.bibsonomy.org/bibtex/21860f19304a101e7ee68920f901c0ebe/miki}, description = {[1305.1505] An accurate tool for the fast generation of dark matter halo catalogs}, interhash = {02a2bffb5189ad80d579a8ea546c2bd1}, intrahash = {1860f19304a101e7ee68920f901c0ebe}, keywords = {catalogue code darkmatter halo}, note = {cite arxiv:1305.1505Comment: 14 pages, 9 PDF figures, accepted by Monthly Notices of the Royal Astronomical Society. The PINOCCHIO code can be downloaded here: http://adlibitum.oats.inaf.it/monaco/Homepage/Pinocchio/index.html}, timestamp = {2013-05-08T17:01:36.000+0200}, title = {An accurate tool for the fast generation of dark matter halo catalogs}, url = {http://arxiv.org/abs/1305.1505}, year = 2013 } @misc{rosdahl2013ramsesrt, abstract = {We present a new implementation of radiation hydrodynamics (RHD) in the adaptive mesh refinement (AMR) code RAMSES. The multi-group radiative transfer (RT) is performed on the AMR grid with a first-order Godunov method using the M1 closure for the Eddington tensor, and is coupled to the hydrodynamics via non-equilibrium thermochemistry of hydrogen and helium. This moment-based approach has the large advantage that the computational cost is independent of the number of radiative sources - it can even deal with continuous regions of emission such as bound-free emission from gas. As it is built directly into RAMSES, the RT takes natural advantage of the refinement and parallelization strategies already in place. Since we use an explicit advection solver for the radiative transport, the time step is restricted by the speed of light - a severe limitation that can be alleviated using the so-called 'reduced speed of light' approximation. We propose a rigorous framework to assess the validity of this approximation in various conditions encountered in cosmology and galaxy formation. We finally perform with our newly developed code a complete suite of RHD tests, comparing our results to other RHD codes. The tests demonstrate that our code performs very well and is ideally suited for exploring the effect of radiation on current scenarios of structure and galaxy formation.}, added-at = {2013-04-29T17:01:34.000+0200}, author = {Rosdahl, Joakim and Blaizot, Jeremy and Aubert, Dominique and Stranex, Timothy and Teyssier, Romain}, biburl = {https://www.bibsonomy.org/bibtex/2224a52b0214e41fc3772f2aa252559b3/miki}, description = {[1304.7126] RAMSES-RT: Radiation hydrodynamics in the cosmological context}, interhash = {aecb2088a1e8464edb0b007aac1115e7}, intrahash = {224a52b0214e41fc3772f2aa252559b3}, keywords = {RT code hydrodynamics}, note = {cite arxiv:1304.7126Comment: 46 pages, 37 figures, submitted to MNRAS, comments are welcome}, timestamp = {2013-04-29T17:01:34.000+0200}, title = {RAMSES-RT: Radiation hydrodynamics in the cosmological context}, url = {http://arxiv.org/abs/1304.7126}, year = 2013 } @misc{altay2013urchin, abstract = {We describe URCHIN, a reverse ray tracing radiative transfer scheme optimised to model self-shielding from the post-reionisation ultraviolet (UV) background in cosmological simulations. The reverse ray tracing strategy provides several benefits over forward ray tracing codes including: (1) the preservation of adaptive density field resolution (2) completely uniform sampling of gas elements by rays; (3) the preservation of galilean invariance; (4) the ability to sample the UV background spectrum with hundreds of frequency bins; and (5) exact preservation of the input UV background spectrum and amplitude in optically thin gas. The implementation described here focuses on Smoothed Particle Hydrodynamics (SPH). However, the method can be applied to any density field representation in which resolution elements admit ray intersection tests and can be associated with optical depths. We characterise the errors in our implementation in stages beginning with comparison to known analytic solutions and ending with a realistic model of the z = 3 cosmological UV background incident onto a suite of spherically symmetric models of gaseous galactic halos.}, added-at = {2013-04-17T18:21:10.000+0200}, author = {Altay, Gabriel and Theuns, Tom}, biburl = {https://www.bibsonomy.org/bibtex/2b80ff8126bd7af30fddff2f7249a605c/miki}, description = {[1304.4235] Urchin: A Reverse Ray Tracer for Astrophysical Applications}, interhash = {882f5a5b74bd2d40dc2ba1c4e906ca2c}, intrahash = {b80ff8126bd7af30fddff2f7249a605c}, keywords = {RadTran code}, note = {cite arxiv:1304.4235Comment: 17 pages}, timestamp = {2013-04-17T18:21:10.000+0200}, title = {Urchin: A Reverse Ray Tracer for Astrophysical Applications}, url = {http://arxiv.org/abs/1304.4235}, year = 2013 } @misc{krumholz2013despotic, abstract = {I describe DESPOTIC, a code to Derive the Energetics and SPectra of Optically Thick Interstellar Clouds. DESPOTIC represents such clouds using a one-zone model, and can calculate line luminosities, line cooling rates, and in restricted cases line profiles using an escape probability formalism. It also includes approximate treatments of the other dominant heating and cooling processes for the cold interstellar medium, including cosmic ray and X-ray heating, grain photoelectric heating, heating of the dust by infrared and ultraviolet radiation, thermal cooling of the dust, and collisional energy exchange between dust and gas. Based on these heating and cooling rates, DESPOTIC can calculate clouds' equilibrium gas and dust temperatures, and their time-dependent thermal evolution. The software is intended to allow rapid and interactive calculation of clouds' characteristic temperatures, identification of their dominant heating and cooling mechanisms, and prediction of their observable spectra across a wide range of interstellar environments. DESPOTIC is implemented as a Python package, and is released under the GNU General Public License.}, added-at = {2013-04-10T19:02:55.000+0200}, author = {Krumholz, Mark R.}, biburl = {https://www.bibsonomy.org/bibtex/2522530413495438b0cf9f73656f1aa1b/miki}, description = {[1304.2404] DESPOTIC -- A New Software Library to Derive the Energetics and SPectra of Optically Thick Interstellar Clouds}, interhash = {3cf23d411be16219aa65bdb210083c12}, intrahash = {522530413495438b0cf9f73656f1aa1b}, keywords = {clouds code cooling heating}, note = {cite arxiv:1304.2404Comment: 20 pages, 11 figures, submitted to MNRAS}, timestamp = {2013-04-10T19:02:55.000+0200}, title = {DESPOTIC -- A New Software Library to Derive the Energetics and SPectra of Optically Thick Interstellar Clouds}, url = {http://arxiv.org/abs/1304.2404}, year = 2013 } @misc{bisbas20123dpdr, abstract = {Photodissociation regions (PDRs) define the transition zone between an ionized and a dark molecular region. They consist of neutral gas which interacts with far-ultraviolet radiation and are characterized by strong infrared line emission. Various numerical codes treating one-dimensional PDRs have been developed in the past, simulating the complexity of chemical reactions occurring and providing a better understanding of the structure of a PDR. In this paper we present the three-dimensional code, 3D-PDR, which can treat PDRs of arbitrary density distribution. The code solves the chemistry and the thermal balance self-consistently within a given three-dimensional cloud. It calculates the total heating and cooling functions at any point in a given PDR by adopting an escape probability method. It uses a HEALPix-based ray-tracing scheme to evaluate the attenuation of the far-ultraviolet radiation in the PDR and the propagation of the far-infrared/submm line emission out of the PDR. We present benchmarking results and apply 3D-PDR to i) a uniform-density spherical cloud interacting with a plane-parallel external radiation field, ii) a uniform-density spherical cloud interacting with a two-component external radiation field, and ii) a cometary globule interacting with a plane-parallel external radiation field. We find that the code is able to reproduce the benchmarking results of various other one-dimensional numerical codes treating PDRs. We also find that the accurate treatment of the radiation field in the fully three-dimensional treatment of PDRs can in some cases leads to different results when compared to a standard one-dimensional treatment.}, added-at = {2012-09-07T16:18:05.000+0200}, author = {Bisbas, T. G. and Bell, T. A. and Viti, S. and Yates, J. and Barlow, M. J.}, biburl = {https://www.bibsonomy.org/bibtex/2aa3375f0b57424d0782e5652527a7e3c/miki}, description = {[1209.1091] 3D-PDR: A new three-dimensional astrochemistry code for treating Photodissociation Regions}, interhash = {8a2cfbdc26b965b21fbff97e84e6f072}, intrahash = {aa3375f0b57424d0782e5652527a7e3c}, keywords = {code numerical pdr}, note = {cite arxiv:1209.1091Comment: 19 pages, 11 figures, accepted for publication in MNRAS}, timestamp = {2012-09-07T16:18:05.000+0200}, title = {3D-PDR: A new three-dimensional astrochemistry code for treating Photodissociation Regions}, url = {http://arxiv.org/abs/1209.1091}, year = 2012 } @misc{Few2012, abstract = { We present a new chemodynamical code - Ramses-CH - for use in simulating the self-consistent evolution of chemical and hydrodynamical properties of galaxies within a fully cosmological framework. We build upon the adaptive mesh refinement code Ramses, which includes a treatment of self-gravity, hydrodynamics, star formation, radiative cooling, and supernovae feedback, to trace the dominant isotopes of C, N, O, Ne, Mg, Si, and Fe. We include the contribution of Type Ia and II supernovae, in addition to low- and intermediate-mass asymptotic giant branch stars, relaxing the instantaneous recycling approximation. The new chemical evolution modules are highly flexible and portable, lending themselves to ready exploration of variations in the underpining stellar and nuclear physics. We apply Ramses-CH to the cosmological simulation of a typical L\star galaxy, demonstrating the successful recovery of the basic empirical constraints regarding, [{\alpha}/Fe]-[Fe/H] and Type Ia/II supernovae rates. }, added-at = {2012-03-01T22:09:26.000+0100}, author = {Few, C. Gareth and Courty, Stephanie and Gibson, Brad K. and Kawata, Daisuke and Calura, Francesco and Teyssier, Romain}, biburl = {https://www.bibsonomy.org/bibtex/201bf2a3bc16e94ac0cce02a5fa708633/miki}, description = {[1202.6400] RAMSES-CH: A New Chemodynamical Code for Cosmological Simulations}, interhash = {37c68797ec476cdf36857557d7ae03c4}, intrahash = {01bf2a3bc16e94ac0cce02a5fa708633}, keywords = {chemical code network ramses}, note = {cite arxiv:1202.6400Comment: Submitted to MNRAS letters,5 pages, 4 figures, comments welcome}, timestamp = {2012-03-01T22:09:26.000+0100}, title = {RAMSES-CH: A New Chemodynamical Code for Cosmological Simulations}, url = {http://arxiv.org/abs/1202.6400}, year = 2012 }