diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 08b7b7b58..f76e19ae4 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -38,10 +38,10 @@ jobs: submodules: 'recursive' - name: Build & Test run: | - cmake -S src -B build ` - -DBLT_CXX_STD=c++14 ` - -DCMAKE_BUILD_TYPE=Release ` - -DENABLE_PYTHON=ON ` + cmake -S src -B build ` + -DBLT_CXX_STD=c++14 ` + -DCMAKE_BUILD_TYPE=Release ` + -DENABLE_PYTHON=ON ` -DENABLE_MPI=ON cmake --build build --config Release --parallel 2 cmake -E env CTEST_OUTPUT_ON_FAILURE=1 cmake --build build --config Release --target RUN_TESTS --parallel 1 diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a4eb286c0..f20bb7356 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -8,8 +8,8 @@ # TO USE A NEW CONTAINER, UPDATE TAG NAME HERE AS PART OF YOUR PR! ##### variables: - ubuntu_18_tag: alpinedav/ascent-ci:ubuntu-18-devel - ubuntu_21_10_tag: alpinedav/ascent-ci:ubuntu-21.10-devel + ubuntu_20_04_tag: alpinedav/ascent-devel:ubuntu-20.04-x86_64 + ubuntu_21_10_tag: alpinedav/ascent-devel:ubuntu-21.10-devel # only build merge target pr to develop trigger: none @@ -54,12 +54,12 @@ stages: - job: Ubuntu pool: vmImage: 'ubuntu-latest' - container: ${{ variables.ubuntu_18_tag }} + container: ${{ variables.ubuntu_20_04_tag }} timeoutInMinutes: 0 strategy: matrix: - ub_18_shared_minimal_cmake_3_21: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_shared_minimal_cmake_3_21: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: gcc COMPILER_CXX: g++ COMPILER_FC: gfortran @@ -83,8 +83,8 @@ stages: BLT_CXX_STD: c++14 BLT_ENABLE_FIND_MPI: ON - ub_18_static_py37_cmake_3_21: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_static_py37_cmake_3_21: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: gcc COMPILER_CXX: g++ COMPILER_FC: gfortran @@ -109,8 +109,8 @@ stages: BLT_CXX_STD: c++14 BLT_ENABLE_FIND_MPI: ON - ub_18_shared_py37_cmake_3_21: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_shared_py37_cmake_3_21: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: gcc COMPILER_CXX: g++ COMPILER_FC: gfortran @@ -135,8 +135,8 @@ stages: BLT_CXX_STD: c++14 BLT_ENABLE_FIND_MPI: ON - ub_18_shared_py38_cmake_3_21: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_shared_py38_cmake_3_21: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: gcc COMPILER_CXX: g++ COMPILER_FC: gfortran @@ -161,8 +161,8 @@ stages: BLT_CXX_STD: c++14 BLT_ENABLE_FIND_MPI: ON - ub_18_shared_py38_cmake_3_21_hdf5_1_10: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_shared_py38_cmake_3_21_hdf5_1_10: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: gcc COMPILER_CXX: g++ COMPILER_FC: gfortran @@ -187,8 +187,8 @@ stages: BLT_CXX_STD: c++14 BLT_ENABLE_FIND_MPI: ON - ub_18_shared_py38_cmake_3_21_hdf5_1_12: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_shared_py38_cmake_3_21_hdf5_1_12: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: gcc COMPILER_CXX: g++ COMPILER_FC: gfortran @@ -215,8 +215,8 @@ stages: BLT_CXX_STD: c++14 BLT_ENABLE_FIND_MPI: ON - ub_18_shared_py310_cmake_3_24_hdf5_1_14: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_shared_py310_cmake_3_24_hdf5_1_14: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: gcc COMPILER_CXX: g++ COMPILER_FC: gfortran @@ -243,8 +243,8 @@ stages: BLT_CXX_STD: c++14 BLT_ENABLE_FIND_MPI: ON - ub_18_shared_py310_cmake_3_26_hdf5_1_14: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_shared_py310_cmake_3_26_hdf5_1_14: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: gcc COMPILER_CXX: g++ COMPILER_FC: gfortran @@ -272,8 +272,8 @@ stages: BLT_ENABLE_FIND_MPI: ON # TODO: Need to solve spack mysterty to enable this - # ub_18_shared_py311_cmake_3_26_hdf5_1_14: - # containerImage: ${{ variables.ubuntu_18_tag }} + # ub_20_04_shared_py311_cmake_3_26_hdf5_1_14: + # containerImage: ${{ variables.ubuntu_20_04_tag }} # COMPILER_CC: gcc # COMPILER_CXX: g++ # COMPILER_FC: gfortran @@ -300,8 +300,8 @@ stages: # BLT_CXX_STD: c++14 # BLT_ENABLE_FIND_MPI: ON - ub_18_shared_mpi_compilers_direct_cmake_3_21: - containerImage: ${{ variables.ubuntu_18_tag }} + ub_20_04_shared_mpi_compilers_direct_cmake_3_21: + containerImage: ${{ variables.ubuntu_20_04_tag }} COMPILER_CC: mpicc COMPILER_CXX: mpic++ COMPILER_FC: mpif90 diff --git a/src/libs/relay/conduit_relay_io_hdf5.cpp b/src/libs/relay/conduit_relay_io_hdf5.cpp index 96b21d6ee..f70f40d56 100644 --- a/src/libs/relay/conduit_relay_io_hdf5.cpp +++ b/src/libs/relay/conduit_relay_io_hdf5.cpp @@ -1076,9 +1076,25 @@ check_if_conduit_leaf_is_compatible_with_hdf5_obj(const DataType &dtype, std::ostringstream oss; oss << "Conduit Node (leaf) at path '" << ref_path << "'" << " is not compatible with given HDF5 Dataset at path" - << "'" << ref_path << "'" + << " '" << ref_path << "'" << "\nConduit leaf vs HDF5 Dataset: Bad HDF5 Leaf ID" - << " or HDF5 ID is not a HDF5 Group"; + << " or HDF5 ID is not a HDF5 Dataset"; + + // check if we have the root group of a file, if so provide a + // more info + if( h5_obj_info.type == H5O_TYPE_GROUP ) + { + // we have a group, check if the id is actually a file as well + H5F_info_t h5_file_info; + h5_status = H5Fget_info(hdf5_id, &h5_file_info); + // dest is root group of a hdf5 file + if( CONDUIT_HDF5_STATUS_OK(h5_status) ) + { + oss << "\nAttempt to write Conduit leaf dataset to HDF5 file root." + << "\nThe root of a HDF5 file is always a HDF5 Group and only" + << " supports Conduit `Object` or `List` Nodes."; + } + } incompat_details = oss.str(); res = false; @@ -1301,7 +1317,7 @@ check_if_conduit_node_is_compatible_with_hdf5_tree(const Node &node, std::ostringstream oss; oss << "Conduit Node at path '" << ref_path << "'" << " has an unsupported dtype (" << dt.name() << ")" - << " for HDF5 i/o and cannot be written to HDF5 path" + << " for HDF5 I/O and cannot be written to HDF5 path" << " '" << ref_path << "'"; incompat_details = oss.str(); diff --git a/src/tests/relay/t_relay_io_hdf5.cpp b/src/tests/relay/t_relay_io_hdf5.cpp index 87d71f855..a4e1e29dc 100644 --- a/src/tests/relay/t_relay_io_hdf5.cpp +++ b/src/tests/relay/t_relay_io_hdf5.cpp @@ -2260,3 +2260,51 @@ TEST(conduit_relay_io_hdf5, wrong_proto_message) std::cout << e.message() << std::endl; } } + +//----------------------------------------------------------------------------- +TEST(conduit_relay_io_hdf5, conduit_hdf5_error_writing_incompat_leaf) +{ + Node n; + n["thing"].set(42); + bool err_occured = false; + + std::string test_file_name = "tout_imcompat.hdf5:/"; + try + { + conduit::relay::io::save(n,test_file_name); + n["thing"].set("string"); + conduit::relay::io::save_merged(n,test_file_name); + } + catch(conduit::Error &e) + { + std::string emsg = e.message(); + std::cout << emsg << std::endl; + err_occured = true; + } + + EXPECT_TRUE(err_occured); +} + +//----------------------------------------------------------------------------- +TEST(conduit_relay_io_hdf5, conduit_hdf5_error_writing_leaf_to_root) +{ + Node n; + n.set(42); + bool err_occured = false; + + std::string test_file_name = "tout_cant_write_to_root.hdf5:/"; + try + { + conduit::relay::io::save(n,test_file_name); + } + catch(conduit::Error &e) + { + std::string emsg = e.message(); + std::size_t found = emsg.find("Attempt to write Conduit leaf dataset to HDF5 file root."); + EXPECT_TRUE(found!=std::string::npos); + std::cout << emsg << std::endl; + err_occured = true; + } + + EXPECT_TRUE(err_occured); +} \ No newline at end of file