@@ -408,7 +408,8 @@ void HDF5IOHandlerImpl::createPath(
408408 else
409409 position = writable; /* root does not have a parent but might still
410410 have to be written */
411- File file = getFile (position).value ();
411+ File file =
412+ requireFile (" createPath" , position, /* checkParent = */ false );
412413 hid_t node_id =
413414 H5Gopen (file.id , concrete_h5_file_position (position).c_str (), gapl);
414415 VERIFY (
@@ -569,9 +570,8 @@ void HDF5IOHandlerImpl::createDataset(
569570 if (chunks_json.json ().is_string ())
570571 {
571572
572- compute_chunking =
573- json::asLowerCaseStringDynamic (chunks_json.json ())
574- .value ();
573+ compute_chunking = auxiliary::lowerCase (
574+ chunks_json.json ().get <std::string>());
575575 }
576576 else if (chunks_json.json ().is_array ())
577577 {
@@ -849,13 +849,10 @@ void HDF5IOHandlerImpl::extendDataset(
849849 " HDF5" , " Joined Arrays currently only supported in ADIOS2" );
850850 }
851851
852- auto res = getFile (writable);
853- if (!res)
854- res = getFile (writable->parent );
852+ File file =
853+ requireFile (" extendDataset" , writable, /* checkParent = */ true );
855854 hid_t dataset_id = H5Dopen (
856- res.value ().id ,
857- concrete_h5_file_position (writable).c_str (),
858- H5P_DEFAULT);
855+ file.id , concrete_h5_file_position (writable).c_str (), H5P_DEFAULT);
859856 VERIFY (
860857 dataset_id >= 0 ,
861858 " [HDF5] Internal error: Failed to open HDF5 dataset during dataset "
@@ -954,8 +951,7 @@ void HDF5IOHandlerImpl::availableChunks(
954951 {
955952 extent.push_back (e);
956953 }
957- parameters.chunks ->push_back (
958- WrittenChunkInfo (std::move (offset), std::move (extent)));
954+ parameters.chunks ->emplace_back (std::move (offset), std::move (extent));
959955
960956 herr_t status;
961957 status = H5Sclose (dataset_space);
@@ -1028,13 +1024,13 @@ void HDF5IOHandlerImpl::closeFile(
10281024 Writable *writable, Parameter<Operation::CLOSE_FILE> const &)
10291025{
10301026 auto optionalFile = getFile (writable);
1031- if (!optionalFile)
1027+ if (!optionalFile. has_value () )
10321028 {
10331029 throw std::runtime_error (
10341030 " [HDF5] Trying to close a file that is not "
10351031 " present in the backend" );
10361032 }
1037- File file = optionalFile. value () ;
1033+ File file = * optionalFile;
10381034 H5Fclose (file.id );
10391035 m_openFileIDs.erase (file.id );
10401036 m_fileNames.erase (writable);
@@ -1045,7 +1041,8 @@ void HDF5IOHandlerImpl::closeFile(
10451041void HDF5IOHandlerImpl::openPath (
10461042 Writable *writable, Parameter<Operation::OPEN_PATH> const ¶meters)
10471043{
1048- File file = getFile (writable->parent ).value ();
1044+ File file =
1045+ requireFile (" openPath" , writable->parent , /* checkParent = */ false );
10491046 hid_t node_id, path_id;
10501047
10511048 hid_t gapl = H5Pcreate (H5P_GROUP_ACCESS);
@@ -1132,7 +1129,14 @@ void HDF5IOHandlerImpl::openPath(
11321129void HDF5IOHandlerImpl::openDataset (
11331130 Writable *writable, Parameter<Operation::OPEN_DATASET> ¶meters)
11341131{
1135- File file = getFile (writable->parent ).value ();
1132+ std::optional<File> fileOpt = getFile (writable->parent );
1133+ if (!fileOpt.has_value ())
1134+ {
1135+ throw error::Internal (
1136+ " [HDF5] Failed to retrieve file for dataset opening. No file "
1137+ " associated with the writable's parent." );
1138+ }
1139+ File file = *fileOpt;
11361140 hid_t node_id, dataset_id;
11371141
11381142 hid_t gapl = H5Pcreate (H5P_GROUP_ACCESS);
@@ -1383,7 +1387,8 @@ void HDF5IOHandlerImpl::deleteFile(
13831387
13841388 if (writable->written )
13851389 {
1386- hid_t file_id = getFile (writable).value ().id ;
1390+ hid_t file_id =
1391+ requireFile (" deleteFile" , writable, /* checkParent = */ false ).id ;
13871392 herr_t status = H5Fclose (file_id);
13881393 VERIFY (
13891394 status == 0 ,
@@ -1429,8 +1434,8 @@ void HDF5IOHandlerImpl::deletePath(
14291434 * Ugly hack: H5Ldelete can't delete "."
14301435 * Work around this by deleting from the parent
14311436 */
1432- auto res = getFile (writable);
1433- File file = res ? res. value () : getFile ( writable-> parent ). value ( );
1437+ File file =
1438+ requireFile ( " deletePath " , writable, /* checkParent = */ true );
14341439 hid_t node_id = H5Gopen (
14351440 file.id ,
14361441 concrete_h5_file_position (writable->parent ).c_str (),
@@ -1481,8 +1486,8 @@ void HDF5IOHandlerImpl::deleteDataset(
14811486 * Ugly hack: H5Ldelete can't delete "."
14821487 * Work around this by deleting from the parent
14831488 */
1484- auto res = getFile (writable);
1485- File file = res ? res. value () : getFile ( writable-> parent ). value ( );
1489+ File file =
1490+ requireFile ( " deleteDataset " , writable, /* checkParent = */ true );
14861491 hid_t node_id = H5Gopen (
14871492 file.id ,
14881493 concrete_h5_file_position (writable->parent ).c_str (),
@@ -1525,8 +1530,8 @@ void HDF5IOHandlerImpl::deleteAttribute(
15251530 std::string name = parameters.name ;
15261531
15271532 /* Open H5Object to delete in */
1528- auto res = getFile (writable);
1529- File file = res ? res. value () : getFile ( writable-> parent ). value ( );
1533+ File file =
1534+ requireFile ( " deleteAttribute " , writable, /* checkParent = */ true );
15301535 hid_t node_id = H5Oopen (
15311536 file.id , concrete_h5_file_position (writable).c_str (), H5P_DEFAULT);
15321537 VERIFY (
@@ -1555,8 +1560,7 @@ void HDF5IOHandlerImpl::writeDataset(
15551560 " [HDF5] Writing into a dataset in a file opened as read only is "
15561561 " not possible." );
15571562
1558- auto res = getFile (writable);
1559- File file = res ? res.value () : getFile (writable->parent ).value ();
1563+ File file = requireFile (" writeDataset" , writable, /* checkParent = */ true );
15601564
15611565 hid_t dataset_id, filespace, memspace;
15621566 herr_t status;
@@ -2040,8 +2044,7 @@ void HDF5IOHandlerImpl::writeAttribute(
20402044void HDF5IOHandlerImpl::readDataset (
20412045 Writable *writable, Parameter<Operation::READ_DATASET> ¶meters)
20422046{
2043- auto res = getFile (writable);
2044- File file = res ? res.value () : getFile (writable->parent ).value ();
2047+ File file = requireFile (" readDataset" , writable, /* checkParent = */ true );
20452048 hid_t dataset_id, memspace, filespace;
20462049 herr_t status;
20472050 dataset_id = H5Dopen (
@@ -2213,8 +2216,8 @@ void HDF5IOHandlerImpl::readAttribute(
22132216 " [HDF5] Internal error: Writable not marked written during "
22142217 " attribute read" );
22152218
2216- auto res = getFile (writable);
2217- File file = res ? res. value () : getFile ( writable-> parent ). value ( );
2219+ File file =
2220+ requireFile ( " readAttribute " , writable, /* checkParent = */ true );
22182221
22192222 hid_t obj_id, attr_id;
22202223 herr_t status;
@@ -2693,7 +2696,14 @@ void HDF5IOHandlerImpl::readAttribute(
26932696 if (H5Tis_variable_str (attr_type))
26942697 {
26952698 std::vector<char *> vc (dims[0 ]);
2696- status = H5Aread (attr_id, attr_type, vc.data ());
2699+ // clang-format off
2700+ // NOLINTBEGIN(bugprone-multi-level-implicit-pointer-conversion)
2701+ // clang-format on
2702+ status =
2703+ H5Aread (attr_id, attr_type, static_cast <void *>(vc.data ()));
2704+ // clang-format off
2705+ // NOLINTEND(bugprone-multi-level-implicit-pointer-conversion)
2706+ // clang-format on
26972707 if (status != 0 )
26982708 {
26992709 throw error::ReadError (
@@ -2706,8 +2716,17 @@ void HDF5IOHandlerImpl::readAttribute(
27062716 }
27072717 for (auto const &val : vc)
27082718 vs.push_back (auxiliary::strip (std::string (val), {' \0 ' }));
2719+ // clang-format off
2720+ // NOLINTBEGIN(bugprone-multi-level-implicit-pointer-conversion)
2721+ // clang-format on
27092722 status = H5Dvlen_reclaim (
2710- attr_type, attr_space, H5P_DEFAULT, vc.data ());
2723+ attr_type,
2724+ attr_space,
2725+ H5P_DEFAULT,
2726+ static_cast <void *>(vc.data ()));
2727+ // clang-format off
2728+ // NOLINTEND(bugprone-multi-level-implicit-pointer-conversion)
2729+ // clang-format on
27112730 }
27122731 else
27132732 {
@@ -2837,7 +2856,7 @@ void HDF5IOHandlerImpl::listPaths(
28372856 " listing" );
28382857
28392858 auto res = getFile (writable);
2840- File file = res ? res. value () : getFile (writable-> parent ). value ( );
2859+ File file = requireFile ( " listPaths " , writable, /* checkParent = */ true );
28412860
28422861 hid_t gapl = H5Pcreate (H5P_GROUP_ACCESS);
28432862#if H5_VERSION_GE(1, 10, 0) && openPMD_HAVE_MPI
@@ -2868,7 +2887,7 @@ void HDF5IOHandlerImpl::listPaths(
28682887 ssize_t name_length = H5Gget_objname_by_idx (node_id, i, nullptr , 0 );
28692888 std::vector<char > name (name_length + 1 );
28702889 H5Gget_objname_by_idx (node_id, i, name.data (), name_length + 1 );
2871- paths->push_back ( std::string ( name.data (), name_length) );
2890+ paths->emplace_back ( name.data (), name_length);
28722891 }
28732892 }
28742893
@@ -2892,8 +2911,7 @@ void HDF5IOHandlerImpl::listDatasets(
28922911 " [HDF5] Internal error: Writable not marked written during dataset "
28932912 " listing" );
28942913
2895- auto res = getFile (writable);
2896- File file = res ? res.value () : getFile (writable->parent ).value ();
2914+ File file = requireFile (" listDatasets" , writable, /* checkParent = */ true );
28972915
28982916 hid_t gapl = H5Pcreate (H5P_GROUP_ACCESS);
28992917#if H5_VERSION_GE(1, 10, 0) && openPMD_HAVE_MPI
@@ -2925,7 +2943,7 @@ void HDF5IOHandlerImpl::listDatasets(
29252943 ssize_t name_length = H5Gget_objname_by_idx (node_id, i, nullptr , 0 );
29262944 std::vector<char > name (name_length + 1 );
29272945 H5Gget_objname_by_idx (node_id, i, name.data (), name_length + 1 );
2928- datasets->push_back ( std::string ( name.data (), name_length) );
2946+ datasets->emplace_back ( name.data (), name_length);
29292947 }
29302948 }
29312949
@@ -2949,8 +2967,8 @@ void HDF5IOHandlerImpl::listAttributes(
29492967 " [HDF5] Internal error: Writable not marked written during "
29502968 " attribute listing" );
29512969
2952- auto res = getFile (writable);
2953- File file = res ? res. value () : getFile ( writable-> parent ). value ( );
2970+ File file =
2971+ requireFile ( " listAttributes " , writable, /* checkParent = */ true );
29542972 hid_t node_id;
29552973
29562974 hid_t fapl = H5Pcreate (H5P_LINK_ACCESS);
@@ -3003,7 +3021,7 @@ void HDF5IOHandlerImpl::listAttributes(
30033021 name.data (),
30043022 name_length + 1 ,
30053023 H5P_DEFAULT);
3006- attributes->push_back ( std::string ( name.data (), name_length) );
3024+ attributes->emplace_back ( name.data (), name_length);
30073025 }
30083026
30093027 status = H5Oclose (node_id);
@@ -3047,6 +3065,41 @@ HDF5IOHandlerImpl::getFile(Writable *writable)
30473065 res.id = it2->second ;
30483066 return std::make_optional (std::move (res));
30493067}
3068+ auto HDF5IOHandlerImpl::requireFile (
3069+ std::string const &functionName, Writable *w, bool checkParent) -> File
3070+ {
3071+ std::optional<File> fileOpt = getFile (w);
3072+ if (!fileOpt.has_value ())
3073+ {
3074+ if (checkParent)
3075+ {
3076+ fileOpt = getFile (w->parent );
3077+ if (!fileOpt.has_value ())
3078+ {
3079+
3080+ throw error::Internal (
3081+ " [HDF5IOHandlerImpl::" + functionName +
3082+ " ] Control flow error: getFile returned no file for the "
3083+ " current Writable or its parent." );
3084+ }
3085+ else
3086+ {
3087+ return *fileOpt;
3088+ }
3089+ }
3090+ else
3091+ {
3092+
3093+ throw error::Internal (
3094+ " [HDF5IOHandlerImpl::" + functionName +
3095+ " ] Control flow error: getFile returned no file." );
3096+ }
3097+ }
3098+ else
3099+ {
3100+ return *fileOpt;
3101+ }
3102+ }
30503103
30513104std::future<void > HDF5IOHandlerImpl::flush (internal::ParsedFlushParams ¶ms)
30523105{
0 commit comments