Skip to content

Commit

Permalink
Merge pull request #966 from svalinn/v3.2.4-rc1
Browse files Browse the repository at this point in the history
Merge v3.2.4 release candidate for final release
  • Loading branch information
bam241 authored Jan 7, 2025
2 parents c1d8da1 + 94f5cac commit 65c33fe
Show file tree
Hide file tree
Showing 9 changed files with 171 additions and 91 deletions.
71 changes: 37 additions & 34 deletions .github/workflows/windows_build_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,33 +3,33 @@ name: Windows Build/Test
on:
# allows us to run workflows manually
workflow_dispatch:
pull_request:
branches:
- develop
paths-ignore:
- '.github/workflows/docker_publish.yml'
- '.github/workflows/linux_build_test.yml'
- '.github/workflows/linux_upstream_test_*'
- '.github/workflows/mac_build_test.yml'
- '.github/workflows/housekeeping.yml'
- '.github/workflows/changelog_test.yml'
- '.github/actions/**'
- 'CI/**'
- 'doc/**'
# pull_request:
# branches:
# - develop
# paths-ignore:
# - '.github/workflows/docker_publish.yml'
# - '.github/workflows/linux_build_test.yml'
# - '.github/workflows/linux_upstream_test_*'
# - '.github/workflows/mac_build_test.yml'
# - '.github/workflows/housekeeping.yml'
# - '.github/workflows/changelog_test.yml'
# - '.github/actions/**'
# - 'CI/**'
# - 'doc/**'

push:
branches:
- develop
paths-ignore:
- '.github/workflows/docker_publish.yml'
- '.github/workflows/linux_build_test.yml'
- '.github/workflows/linux_upstream_test_*'
- '.github/workflows/mac_build_test.yml'
- '.github/workflows/housekeeping.yml'
- '.github/workflows/changelog_test.yml'
- '.github/actions/**'
- 'CI/**'
- 'doc/**'
# push:
# branches:
# - develop
# paths-ignore:
# - '.github/workflows/docker_publish.yml'
# - '.github/workflows/linux_build_test.yml'
# - '.github/workflows/linux_upstream_test_*'
# - '.github/workflows/mac_build_test.yml'
# - '.github/workflows/housekeeping.yml'
# - '.github/workflows/changelog_test.yml'
# - '.github/actions/**'
# - 'CI/**'
# - 'doc/**'

env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
Expand All @@ -41,16 +41,16 @@ jobs:

- uses: conda-incubator/setup-miniconda@v3
with:
auto-activate-base: true
auto-update-conda: true
auto-activate-base: false
activate-environment: ""

# yaml-cpp package has conflicting gtest headers and is not needed for this project
- name: Conda dependencies
shell: bash -l {0}
run: |
conda install curl eigen
conda install -c conda-forge hdf5=1.10.6
conda remove -y yaml-cpp
conda install -c conda-forge hdf5==1.10.6
- name: Environment Variables
shell: bash -l {0}
Expand Down Expand Up @@ -114,12 +114,15 @@ jobs:
-DMOAB_DIR=../install_dir \
-DHDF5_ROOT="${CONDA_LOC}" \
-DHDF5_hdf5_LIBRARY_RELEASE="${CONDA_LOC}/lib/libhdf5_hl.lib;${CONDA_LOC}/lib/libhdf5.lib;${CONDA_LOC}/lib/zlib.lib;${CONDA_LOC}/lib/libhdf5_cpp.lib" \
-DCMAKE_INSTALL_PREFIX=../install_dir/ \
-DCMAKE_EXE_LINKER_FLAGS="" \
-DCMAKE_MODULE_LINKER_FLAGS="" \
-DCMAKE_SHARED_LINKER_FLAGS="" \
-DCMAKE_EXE_LINKER_FLAGS="/std:c++latest -DH5_BUILT_AS_DYNAMIC_LIB" \
-DCMAKE_MODULE_LINKER_FLAGS="/std:c++latest" \
-DCMAKE_SHARED_LINKER_FLAGS="/std:c++latest" \
-DCMAKE_STATIC_LINKER_FLAGS="" \
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_EXE_LINKER_FLAGS="" \
-DCMAKE_INSTALL_PREFIX=../install_dir/ \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_C_COMPILER="C:/Program Files (x86)/Microsoft Visual Studio/2019/Community/VC/Tools/MSVC/14.27.29110/bin/Hostx64/x64/cl.exe" \
-DCMAKE_CXX_COMPILER="C:/Program Files (x86)/Microsoft Visual Studio/2019/Community/VC/Tools/MSVC/14.27.29110/bin/Hostx64/x64/cl.exe"
cmake --build . --config Release
cmake --install . --config Release
Expand Down
21 changes: 3 additions & 18 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
project(DAGMC)
cmake_minimum_required(VERSION 3.1)
cmake_minimum_required(VERSION 3.18)
enable_language(CXX)

# Set DAGMC version
set(DAGMC_MAJOR_VERSION 3)
set(DAGMC_MINOR_VERSION 2)
set(DAGMC_PATCH_VERSION 3)
set(DAGMC_PATCH_VERSION 4)
set(DAGMC_VERSION ${DAGMC_MAJOR_VERSION}.${DAGMC_MINOR_VERSION}.${DAGMC_PATCH_VERSION})

if(MSVC)
set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS TRUE)
add_definitions( -DH5_BUILT_AS_DYNAMIC_LIB )
endif()


Expand All @@ -29,22 +30,6 @@ if(GIT_FOUND AND EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/.git")
endif()

option(GIT_SUBMODULE "Check submodules during build" ON)
if(GIT_SUBMODULE)
message(STATUS "Submodule update")
execute_process(COMMAND ${GIT_EXECUTABLE} "submodule" "update" "--init" "--recursive"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE GIT_SUBMOD_RESULT)
if(NOT GIT_SUBMOD_RESULT EQUAL 0)
message(FATAL_ERROR "git submodule update --init --recursive failed with \
${GIT_SUBMOD_RESULT}, please checkout submodules")
endif()
endif()
endif()

# Check to see if submodules exist (by checking one)
if(NOT EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/src/pyne/pyne/readme.rst")
message(FATAL_ERROR "The git submodules were not downloaded! GIT_SUBMODULE was \
turned off or failed. Please update submodules and try again.")
endif()

# Make the scripts in the "cmake" directory available to CMake
Expand Down
8 changes: 7 additions & 1 deletion doc/CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@ DAGMC Changelog
Next version
====================

v3.2.4
====================

**Changed:**

* Adding info messages to CMake output for double down (#962)
* Update hdf5 to v1.14.3 from v1.10.4 (#931 #933)
* Update hdf5 to v1.14.3 from v1.10.4 for linux builds (#931 #933)
* Ensure implicit complement handle is placed at the back of DAGMC volume indices (#935)
* Update MOAB to 5.5.1 from 5.3.0 (#939 #940)
* Update README regarding OpenMC (#938)
Expand All @@ -21,6 +24,9 @@ Next version
* Update github actions to newer versions as necessary (#958)
* CMake error message update (#960)
* Updated documentation to build dependencies (#963)
* Pause support for Windows (#966)
* Localize invocation of git submodule for PyNE (#968)
* Fixed the name of the Graveyard and the Vaccuum to mat:Graveyard and mat:Vacuum (and lower case) (#971)

v3.2.3
====================
Expand Down
17 changes: 17 additions & 0 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,23 @@ if (BUILD_MCNP5 OR BUILD_MCNP6 OR BUILD_GEANT4 OR BUILD_FLUKA OR BUILD_CI_TESTS)
endif ()
if (BUILD_UWUW)
add_subdirectory(uwuw)

if(GIT_SUBMODULE)
message(STATUS "Submodule update")
execute_process(COMMAND ${GIT_EXECUTABLE} "submodule" "update" "--init" "--recursive"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE GIT_SUBMOD_RESULT)
if(NOT GIT_SUBMOD_RESULT EQUAL 0)
message(FATAL_ERROR "git submodule update --init --recursive failed with \
${GIT_SUBMOD_RESULT}, please checkout submodules")
endif()
endif()

# Check to see if submodules exist (by checking one)
if(NOT EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/pyne/pyne/readme.rst")
message(FATAL_ERROR "The git submodules were not downloaded! GIT_SUBMODULE was \
turned off or failed. Please update submodules and try again.")
endif()
add_subdirectory(pyne)
endif ()

Expand Down
38 changes: 18 additions & 20 deletions src/dagmc/dagmcmetadata.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -209,32 +209,30 @@ void dagmcMetaData::parse_material_data() {

// set the material value
volume_material_property_data_eh[eh] = grp_name;

bool is_graveyard =
to_lower(grp_name).find(to_lower(graveyard_str)) != std::string::npos;
bool is_vacuum =
to_lower(grp_name).find(to_lower(vacuum_str)) != std::string::npos;
logger.message("Group name -- " + grp_name);
bool is_graveyard = (to_lower(grp_name) == to_lower(graveyard_mat_str()));
bool is_vacuum = (to_lower(grp_name) == to_lower(vacuum_mat_str()));

// not graveyard or vacuum or implicit compliment
if (!is_graveyard && !is_vacuum && !DAG->is_implicit_complement(eh)) {
volume_material_data_eh[eh] = material_props[0];
}
// found graveyard
else if (is_graveyard) {
volume_material_property_data_eh[eh] = "mat:Graveyard";
volume_material_data_eh[eh] = graveyard_str;
volume_material_property_data_eh[eh] = graveyard_mat_str();
volume_material_data_eh[eh] = graveyard_str();
}
// vacuum
else if (is_vacuum) {
volume_material_property_data_eh[eh] = "mat:Vacuum";
volume_material_data_eh[eh] = vacuum_str;
volume_material_property_data_eh[eh] = vacuum_mat_str();
volume_material_data_eh[eh] = vacuum_str();
}
// implicit complement
else if (DAG->is_implicit_complement(eh)) {
if (implicit_complement_material == "") {
logger.message("Implicit Complement assumed to be Vacuum");
volume_material_property_data_eh[eh] = "mat:Vacuum";
volume_material_data_eh[eh] = vacuum_str;
volume_material_property_data_eh[eh] = vacuum_mat_str();
volume_material_data_eh[eh] = vacuum_str();
} else {
volume_material_property_data_eh[eh] =
"mat:" + implicit_complement_material;
Expand Down Expand Up @@ -379,18 +377,18 @@ void dagmcMetaData::parse_boundary_data() {
exit(EXIT_FAILURE);
}
// 2d entities have been tagged with the boundary condition property
// ie. both surfaces and its members triangles,
// ie. both surfaces and its member triangles

std::string bc_string = to_lower(boundary_assignment[0]);

if (bc_string.find(to_lower(reflecting_str)) != std::string::npos)
surface_boundary_data_eh[eh] = reflecting_str;
if (bc_string.find(to_lower(white_str)) != std::string::npos)
surface_boundary_data_eh[eh] = white_str;
if (bc_string.find(to_lower(periodic_str)) != std::string::npos)
surface_boundary_data_eh[eh] = periodic_str;
if (bc_string.find(to_lower(vacuum_str)) != std::string::npos)
surface_boundary_data_eh[eh] = vacuum_str;
if (bc_string.find(to_lower(reflecting_str())) != std::string::npos)
surface_boundary_data_eh[eh] = reflecting_str();
if (bc_string.find(to_lower(white_str())) != std::string::npos)
surface_boundary_data_eh[eh] = white_str();
if (bc_string.find(to_lower(periodic_str())) != std::string::npos)
surface_boundary_data_eh[eh] = periodic_str();
if (bc_string.find(to_lower(vacuum_str())) != std::string::npos)
surface_boundary_data_eh[eh] = vacuum_str();
}
}

Expand Down
28 changes: 23 additions & 5 deletions src/dagmc/dagmcmetadata.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -316,6 +316,21 @@ class dagmcMetaData {
*/
std::map<moab::EntityHandle, std::map<std::string, double>> importance_map;

// Getting some constant keyword values
const std::string& graveyard_str() const { return graveyard_str_; }
const std::string& vacuum_str() const { return vacuum_str_; }
const std::string& vacuum_mat_str() const { return vacuum_mat_str_; }
const std::string& graveyard_mat_str() const { return graveyard_mat_str_; }
const std::string& reflecting_str() const { return reflecting_str_; }
const std::string& white_str() const { return white_str_; }
const std::string& periodic_str() const { return periodic_str_; }

// Allowing modify some constant keyword values
void set_graveyard_str(std::string val) { graveyard_str_ = val; }
void set_vacuum_str(std::string val) { vacuum_str_ = val; }
void set_vacuum_mat_str(std::string val) { vacuum_mat_str_ = val; }
void set_graveyard_mat_str(std::string val) { graveyard_mat_str_ = val; }

// private member variables
private:
/**
Expand Down Expand Up @@ -351,11 +366,14 @@ class dagmcMetaData {
std::map<std::string, std::string> keyword_synonyms;

// Some constant keyword values
const std::string graveyard_str{"Graveyard"};
const std::string vacuum_str{"Vacuum"};
const std::string reflecting_str{"Reflecting"};
const std::string white_str{"White"};
const std::string periodic_str{"Periodic"};
const std::string reflecting_str_{"Reflecting"};
const std::string white_str_{"White"};
const std::string periodic_str_{"Periodic"};
// Some less constant keyword values
std::string graveyard_str_{"Graveyard"};
std::string vacuum_str_{"Vacuum"};
std::string vacuum_mat_str_{"mat:Vacuum"};
std::string graveyard_mat_str_{"mat:Graveyard"};

DagMC_Logger logger;
};
Expand Down
57 changes: 57 additions & 0 deletions src/dagmc/tests/dagmc_unit_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,63 @@ TEST_F(DagmcMetadataTest, TestMatAssigns) {
}
}
//---------------------------------------------------------------------------//
// FIXTURE-BASED TESTS: Tests to make sure that vacuum detection is done
// properly
//---------------------------------------------------------------------------//
TEST_F(DagmcMetadataTest, TestVacuumName) {
// Test default behavior for vacuum name
{
// new metadata instance
dgm = std::make_shared<dagmcMetaData>(DAG.get());
// process
dgm->load_property_data();

int num_vol = DAG->num_entities(3);
std::vector<int> vol_ids = {1, 2, 3, 4};

std::vector<std::string> vacuum_names = {"Hydrogen", "Hydrogen", "Hydrogen",
"Vacuum"};
for (int id : vol_ids) {
std::string mat_prop = dgm->get_volume_property("material", id, false);
EXPECT_EQ(mat_prop, vacuum_names[id - 1]);
}
}

// Changing the vacuum name to detect mat:Hydrogen as the vacuum
{
dgm = std::make_shared<dagmcMetaData>(DAG.get());

dgm->set_vacuum_mat_str("mat:Hydrogen");
dgm->load_property_data();
int num_vol = DAG->num_entities(3);
std::vector<int> vol_ids = {1, 2, 3, 4};

std::vector<std::string> vacuum_names = {"Vacuum", "Vacuum", "Vacuum",
"Vacuum"};
for (int id : vol_ids) {
std::string mat_prop = dgm->get_volume_property("material", id, false);
EXPECT_EQ(mat_prop, vacuum_names[id - 1]);
}
}

// Ensuring that partial name overlap don't affect vacuum detection
{
dgm = std::make_shared<dagmcMetaData>(DAG.get());

dgm->set_vacuum_mat_str("Hydro");
dgm->load_property_data();
int num_vol = DAG->num_entities(3);
std::vector<int> vol_ids = {1, 2, 3, 4};

std::vector<std::string> vacuum_names = {"Hydrogen", "Hydrogen", "Hydrogen",
"Vacuum"};
for (int id : vol_ids) {
std::string mat_prop = dgm->get_volume_property("material", id, false);
EXPECT_EQ(mat_prop, vacuum_names[id - 1]);
}
}
}
//---------------------------------------------------------------------------//
// FIXTURE-BASED TESTS: Tests to make sure that all densities have successfully
// been assigned and successfully retreved from the metadata class
// in this test there was no density data assigned, so it should be ""
Expand Down
Loading

0 comments on commit 65c33fe

Please sign in to comment.