@@ -267,8 +267,10 @@ void WarpXOpenPMDPlot::CloseStep (bool isBTD, bool isLastBTDFlush)
267267 // close BTD file only when isLastBTDFlush is true
268268 if (isBTD and !isLastBTDFlush) callClose = false ;
269269 if (callClose) {
270- if (m_Series)
271- m_Series->iterations [m_CurrentStep].close ();
270+ if (!m_Series)
271+ return ;
272+ auto iterations = m_Series->writeIterations ();
273+ iterations[m_CurrentStep].close ();
272274
273275 // create a little helper file for ParaView 5.9+
274276 if (amrex::ParallelDescriptor::IOProcessor ())
@@ -295,23 +297,37 @@ WarpXOpenPMDPlot::Init (openPMD::Access access, bool isBTD)
295297 std::string filepath = m_dirPrefix;
296298 GetFileName (filepath);
297299
300+ std::string useSteps = R"(
301+ {
302+ "adios2": {
303+ "engine": {
304+ "type": "bp4",
305+ "usesteps": true
306+ }
307+ }
308+ }
309+ )" ;
298310 // close a previously open series before creating a new one
299311 // see ADIOS1 limitation: https://github.com/openPMD/openPMD-api/pull/686
300- m_Series = nullptr ;
312+ if (m_OneFilePerTS)
313+ m_Series = nullptr ;
314+ else if (m_Series != nullptr )
315+ return ;
301316
302317 if (amrex::ParallelDescriptor::NProcs () > 1 ) {
303318#if defined(AMREX_USE_MPI)
304319 m_Series = std::make_unique<openPMD::Series>(
305320 filepath, access,
306- amrex::ParallelDescriptor::Communicator ()
321+ amrex::ParallelDescriptor::Communicator (),
322+ useSteps
307323 );
308324 m_MPISize = amrex::ParallelDescriptor::NProcs ();
309325 m_MPIRank = amrex::ParallelDescriptor::MyProc ();
310326#else
311327 amrex::Abort (" openPMD-api not built with MPI support!" );
312328#endif
313329 } else {
314- m_Series = std::make_unique<openPMD::Series>(filepath, access);
330+ m_Series = std::make_unique<openPMD::Series>(filepath, access, useSteps );
315331 m_MPISize = 1 ;
316332 m_MPIRank = 1 ;
317333 }
@@ -430,7 +446,9 @@ WarpXOpenPMDPlot::DumpToFile (ParticleContainer* pc,
430446
431447 WarpXParticleCounter counter (pc);
432448
433- openPMD::Iteration currIteration = m_Series->iterations [iteration];
449+ openPMD::WriteIterations iterations = m_Series->writeIterations ();
450+ auto currIteration = iterations[iteration];
451+
434452 openPMD::ParticleSpecies currSpecies = currIteration.particles [name];
435453 // meta data for ED-PIC extension
436454 currSpecies.setAttribute ( " particleShape" , double ( WarpX::noz ) );
@@ -766,56 +784,17 @@ WarpXOpenPMDPlot::SetupPos(
766784}
767785
768786
769- //
770- // this is originally copied from FieldIO.cpp
771- //
787+ /*
788+ * Set up paramter for mesh container using the geometry (from level 0)
789+ *
790+ * @param [IN] meshes: openPMD-api mesh container
791+ * @param [IN] full_geom: field geometry
792+ *
793+ */
772794void
773- WarpXOpenPMDPlot::WriteOpenPMDFields ( // const std::string& filename,
774- const std::vector<std::string>& varnames,
775- const amrex::MultiFab& mf,
776- const amrex::Geometry& geom, // geometry of the mf/Fab
777- const int iteration,
778- const double time, bool isBTD,
779- const amrex::Geometry& full_BTD_snapshot ) const
795+ WarpXOpenPMDPlot::SetupFields ( openPMD::Container< openPMD::Mesh >& meshes,
796+ amrex::Geometry& full_geom ) const
780797{
781- // This is AMReX's tiny profiler. Possibly will apply it later
782- WARPX_PROFILE (" WarpXOpenPMDPlot::WriteOpenPMDFields()" );
783-
784- AMREX_ALWAYS_ASSERT_WITH_MESSAGE (m_Series != nullptr , " openPMD series must be initialized" );
785-
786- amrex::Geometry full_geom = geom;
787- if ( isBTD )
788- full_geom = full_BTD_snapshot;
789-
790- // is this either a regular write (true) or the first write in a
791- // backtransformed diagnostic (BTD):
792- bool const first_write_to_iteration = ! m_Series->iterations .contains ( iteration );
793-
794- int const ncomp = mf.nComp ();
795-
796- // Create a few vectors that store info on the global domain
797- // Swap the indices for each of them, since AMReX data is Fortran order
798- // and since the openPMD API assumes contiguous C order
799- // - Size of the box, in integer number of cells
800- amrex::Box const & global_box = full_geom.Domain ();
801- auto const global_size = getReversedVec (global_box.size ());
802- // - Grid spacing
803- std::vector<double > const grid_spacing = getReversedVec (full_geom.CellSize ());
804- // - Global offset
805- std::vector<double > const global_offset = getReversedVec (full_geom.ProbLo ());
806- // - AxisLabels
807- std::vector<std::string> axis_labels = detail::getFieldAxisLabels ();
808-
809- // Prepare the type of dataset that will be written
810- openPMD::Datatype const datatype = openPMD::determineDatatype<amrex::Real>();
811- auto const dataset = openPMD::Dataset (datatype, global_size);
812-
813- // meta data
814- auto series_iteration = m_Series->iterations [iteration];
815- auto meshes = series_iteration.meshes ;
816- if ( first_write_to_iteration ) {
817- series_iteration.setTime ( time );
818-
819798 // meta data for ED-PIC extension
820799 auto const period = full_geom.periodicity (); // TODO double-check: is this the proper global bound or of some level?
821800 std::vector<std::string> fieldBoundary (6 , " reflecting" );
@@ -875,16 +854,58 @@ WarpXOpenPMDPlot::WriteOpenPMDFields ( //const std::string& filename,
875854 }());
876855 if (WarpX::do_dive_cleaning)
877856 meshes.setAttribute (" chargeCorrectionParameters" , " period=1" );
878- }
857+ }
879858
880- // Loop through the different components, i.e. different fields stored in mf
881- for (int icomp=0 ; icomp<ncomp; icomp++){
882- std::string const & varname = varnames[icomp];
883859
884- // assume fields are scalar unless they match the following match of known vector fields
885- std::string field_name = varname;
886- std::string comp_name = openPMD::MeshRecordComponent::SCALAR;
860+ /*
861+ * Setup component properties for a field mesh
862+ * @param [IN]: mesh a mesh field
863+ * @param [IN]: full_geom geometry for the mesh
864+ * @param [IN]: mesh_comp a component for the mesh
865+ */
866+ void
867+ WarpXOpenPMDPlot::SetupMeshComp ( openPMD::Mesh& mesh,
868+ amrex::Geometry& full_geom,
869+ openPMD::MeshRecordComponent& mesh_comp ) const
870+ {
871+ amrex::Box const & global_box = full_geom.Domain ();
872+ auto const global_size = getReversedVec (global_box.size ());
873+ // - Grid spacing
874+ std::vector<double > const grid_spacing = getReversedVec (full_geom.CellSize ());
875+ // - Global offset
876+ std::vector<double > const global_offset = getReversedVec (full_geom.ProbLo ());
877+ // - AxisLabels
878+ std::vector<std::string> axis_labels = detail::getFieldAxisLabels ();
879+
880+ // Prepare the type of dataset that will be written
881+ openPMD::Datatype const datatype = openPMD::determineDatatype<amrex::Real>();
882+ auto const dataset = openPMD::Dataset (datatype, global_size);
883+
884+ mesh.setDataOrder (openPMD::Mesh::DataOrder::C);
885+ mesh.setAxisLabels (axis_labels);
886+ mesh.setGridSpacing (grid_spacing);
887+ mesh.setGridGlobalOffset (global_offset);
888+ mesh.setAttribute (" fieldSmoothing" , " none" );
889+ // detail::setOpenPMDUnit(mesh, field_name);
890+ mesh_comp.resetDataset (dataset);
891+
892+ }
887893
894+ /*
895+ * Get component names of a field for openPMD-api book-keeping
896+ * Level is reflected as _lvl<meshLevel>
897+ *
898+ * @param meshLevel [IN]: level of mesh
899+ * @param varname [IN]: name from WarpX
900+ * @param field_name [OUT]: field name for openPMD-api output
901+ * @param comp_name [OUT]: comp name for openPMD-api output
902+ */
903+ void
904+ WarpXOpenPMDPlot::GetMeshCompNames ( int meshLevel,
905+ const std::string& varname,
906+ std::string& field_name,
907+ std::string& comp_name ) const
908+ {
888909 if (varname.size () >= 2u ) {
889910 std::string const varname_1st = varname.substr (0u , 1u ); // 1st character
890911 std::string const varname_2nd = varname.substr (1u , 1u ); // 2nd character
@@ -904,51 +925,97 @@ WarpXOpenPMDPlot::WriteOpenPMDFields ( //const std::string& filename,
904925 }
905926 }
906927
907- // Setup the mesh record accordingly
908- auto mesh = meshes[field_name];
909- // MultiFab: Fortran order of indices and axes;
910- // we invert (only) meta-data arrays to assign labels and offsets in the
911- // order: slowest to fastest varying index when accessing the mesh
912- // contiguously (as 1D flattened logical memory)
913- if ( first_write_to_iteration ) {
914- mesh.setDataOrder (openPMD::Mesh::DataOrder::C);
915- mesh.setAxisLabels (axis_labels);
916- mesh.setGridSpacing (grid_spacing);
917- mesh.setGridGlobalOffset (global_offset);
918- mesh.setAttribute (" fieldSmoothing" , " none" );
919- detail::setOpenPMDUnit (mesh, field_name);
920- }
921-
922- // Create a new mesh record component, and store the associated metadata
923- auto mesh_comp = mesh[comp_name];
924- if ( first_write_to_iteration ) {
925- mesh_comp.resetDataset (dataset);
928+ if ( 0 == meshLevel )
929+ return ;
926930
927- auto relative_cell_pos = utils::getRelativeCellPosition (mf); // AMReX Fortran index order
928- std::reverse (relative_cell_pos.begin (), relative_cell_pos.end ()); // now in C order
929- mesh_comp.setPosition (relative_cell_pos);
930- }
931+ field_name += " _lvl" +std::to_string (meshLevel);
932+ }
933+ /*
934+ * Write Field with all mesh levels
935+ *
936+ */
937+ void
938+ WarpXOpenPMDPlot::WriteOpenPMDFieldsAll ( // const std::string& filename,
939+ const std::vector<std::string>& varnames,
940+ const amrex::Vector<amrex::MultiFab>& mf,
941+ amrex::Vector<amrex::Geometry>& geom,
942+ const int iteration,
943+ const double time, bool isBTD,
944+ const amrex::Geometry& full_BTD_snapshot ) const
945+ {
946+ // This is AMReX's tiny profiler. Possibly will apply it later
947+ WARPX_PROFILE (" WarpXOpenPMDPlot::WriteOpenPMDFields()" );
931948
932- // Loop through the multifab, and store each box as a chunk,
933- // in the openPMD file.
934- for ( amrex::MFIter mfi (mf); mfi.isValid (); ++mfi ) {
949+ AMREX_ALWAYS_ASSERT_WITH_MESSAGE (m_Series != nullptr , " openPMD series must be initialized" );
935950
936- amrex::FArrayBox const & fab = mf[mfi];
937- amrex::Box const & local_box = fab.box ();
951+ // is this either a regular write (true) or the first write in a
952+ // backtransformed diagnostic (BTD):
953+ bool const first_write_to_iteration = ! m_Series->iterations .contains ( iteration );
938954
939- // Determine the offset and size of this chunk
940- amrex::IntVect const box_offset = local_box.smallEnd () - global_box.smallEnd ();
941- auto const chunk_offset = getReversedVec ( box_offset );
942- auto const chunk_size = getReversedVec ( local_box.size () );
955+ // meta data
956+ openPMD::WriteIterations iterations = m_Series->writeIterations ();
957+ auto series_iteration = iterations[iteration];
943958
944- // Write local data
945- amrex::Real const * local_data = fab.dataPtr ( icomp );
946- mesh_comp.storeChunk ( openPMD::shareRaw (local_data),
947- chunk_offset, chunk_size );
948- }
959+ auto meshes = series_iteration.meshes ;
960+ if (first_write_to_iteration) {
961+ // lets see whether full_geom varies from geom[0] xgeom[1]
962+ series_iteration.setTime ( time );
949963 }
950- // Flush data to disk after looping over all components
951- m_Series->flush ();
964+
965+ for (int i=0 ; i<geom.size (); i++) {
966+ amrex::Geometry full_geom = geom[i];
967+ if ( isBTD )
968+ full_geom = full_BTD_snapshot;
969+
970+ // setup is called once. So it uses property "period" from first
971+ // geometry for <all> field levels.
972+ if ( (0 == i) && first_write_to_iteration )
973+ SetupFields (meshes, full_geom);
974+
975+ amrex::Box const & global_box = full_geom.Domain ();
976+ auto const global_size = getReversedVec (global_box.size ());
977+
978+ int const ncomp = mf[i].nComp ();
979+ for ( int icomp=0 ; icomp<ncomp; icomp++ ) {
980+ std::string const & varname = varnames[icomp];
981+
982+ // assume fields are scalar unless they match the following match of known vector fields
983+ std::string field_name = varname;
984+ std::string comp_name = openPMD::MeshRecordComponent::SCALAR;
985+ GetMeshCompNames ( i, varname, field_name, comp_name );
986+
987+ auto mesh = meshes[field_name];
988+ auto mesh_comp = mesh[comp_name];
989+ if ( first_write_to_iteration )
990+ {
991+ SetupMeshComp ( mesh, full_geom, mesh_comp );
992+ detail::setOpenPMDUnit ( mesh, field_name );
993+
994+ auto relative_cell_pos = utils::getRelativeCellPosition (mf[i]); // AMReX Fortran index order
995+ std::reverse ( relative_cell_pos.begin (), relative_cell_pos.end () ); // now in C order
996+ mesh_comp.setPosition ( relative_cell_pos );
997+ }
998+
999+ // Loop through the multifab, and store each box as a chunk,
1000+ // in the openPMD file.
1001+ for ( amrex::MFIter mfi (mf[i]); mfi.isValid (); ++mfi )
1002+ {
1003+ amrex::FArrayBox const & fab = mf[i][mfi];
1004+ amrex::Box const & local_box = fab.box ();
1005+
1006+ // Determine the offset and size of this chunk
1007+ amrex::IntVect const box_offset = local_box.smallEnd () - global_box.smallEnd ();
1008+ auto const chunk_offset = getReversedVec ( box_offset );
1009+ auto const chunk_size = getReversedVec ( local_box.size () );
1010+
1011+ amrex::Real const * local_data = fab.dataPtr ( icomp );
1012+ mesh_comp.storeChunk ( openPMD::shareRaw (local_data),
1013+ chunk_offset, chunk_size );
1014+ }
1015+ } // icomp loop
1016+ // Flush data to disk after looping over all components
1017+ m_Series->flush ();
1018+ } // levels lopp (i)
9521019}
9531020#endif // WARPX_USE_OPENPMD
9541021
0 commit comments