Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add submodule #27

Merged
merged 16 commits into from
Aug 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 18 additions & 3 deletions .github/workflows/ci-cd-pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -85,16 +85,31 @@ jobs:
git checkout main
git pull origin main --rebase

# Configure git to use the CICD_REPO_SECRET for fetching submodules
if ! git config --global --get url."https://${{ secrets.CICD_REPO_SECRET }}@github.com/".insteadOf; then
git config --global url."https://${{ secrets.CICD_REPO_SECRET }}@github.com/".insteadOf "https://github.com/"
fi
# For authenticated access to the repository the secret will be changed periodically and the git config will have to be updated
# On the VM, do:
# git config --global --get-regexp url.*.insteadOf
# to see the current git config. And then remove the config with:
# git config --global --unset-all url.https://[email protected]/.insteadof
# Where OLD_SECRET is the old secret from the first command. Then add the new secret with:
# git config --global url."https://[email protected]/".insteadOf "
# Where the NEW_SECRET is genertated in developer settings on GitHub and added to the repository secrets.
# or, it will be automatically updated when this action runs again.

echo "Initializing and updating submodules..."
git submodule update --init --recursive --remote

echo "Checking out appropriate branch..."
if [ "${{ github.event_name }}" = "pull_request" ]; then
git fetch origin ${{ github.sha }}
git checkout ${{ github.sha }}
git pull origin ${{ github.sha }} --rebase
elif [ "${{ github.event_name }}" = "push" ] && [ "${{ github.ref }}" = "refs/heads/main" ]; then
git checkout main
git pull origin main --rebase
else
git checkout ${{ github.ref }}
git pull origin ${{ github.ref }} --rebase
fi
git lfs pull # Pull LFS files
rm -rf build # Remove build directory, this prevents some false positives in tests
Expand Down
61 changes: 40 additions & 21 deletions .github/workflows/performance-pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -74,28 +74,47 @@ jobs:
if: ${{ github.event_name == 'workflow_dispatch' }}
run: |
ssh -T -o ConnectTimeout=10 ${{ secrets.VM_USERNAME }}@${{ secrets.VM_IP }} << 'EOF'
set -e # Exit on error
cd ~/aperi-mech
echo "Fetching git branches..."
git fetch --all

echo "Checking out main branch and pulling latest changes..."
git checkout main
git pull origin main

echo "Checking out appropriate branch..."
if [ "${{ github.event_name }}" = "pull_request" ]; then
git checkout ${{ github.sha }}
git pull origin ${{ github.sha }}
elif [ "${{ github.event_name }}" = "push" ] && [ "${{ github.ref }}" = "refs/heads/main" ]; then
set -e # Exit on error

cd ~/aperi-mech
echo "Fetching git branches..."
git fetch --all

echo "Stashing any unstaged changes..."
git stash --include-untracked

echo "Checking out main branch and pulling latest changes..."
git checkout main
git pull origin main
else
git checkout ${{ github.ref }}
git pull origin ${{ github.ref }}
fi
git lfs pull # Pull LFS files
rm -rf build # Remove build directory, this prevents some false positives in tests
git pull origin main --rebase

# Configure git to use the CICD_REPO_SECRET for fetching submodules
if ! git config --global --get url."https://${{ secrets.CICD_REPO_SECRET }}@github.com/".insteadOf; then
git config --global url."https://${{ secrets.CICD_REPO_SECRET }}@github.com/".insteadOf "https://github.com/"
fi
# For authenticated access to the repository the secret will be changed periodically and the git config will have to be updated
# On the VM, do:
# git config --global --get-regexp url.*.insteadOf
# to see the current git config. And then remove the config with:
# git config --global --unset-all url.https://[email protected]/.insteadof
# Where OLD_SECRET is the old secret from the first command. Then add the new secret with:
# git config --global url."https://[email protected]/".insteadOf "
# Where the NEW_SECRET is genertated in developer settings on GitHub and added to the repository secrets.
# or, it will be automatically updated when this action runs again.

echo "Initializing and updating submodules..."
git submodule update --init --recursive --remote

echo "Checking out appropriate branch..."
if [ "${{ github.event_name }}" = "pull_request" ]; then
git fetch origin ${{ github.sha }}
git checkout ${{ github.sha }}
elif [ "${{ github.event_name }}" = "push" ] && [ "${{ github.ref }}" = "refs/heads/main" ]; then
git checkout main
else
git checkout ${{ github.ref }}
fi
git lfs pull # Pull LFS files
rm -rf build # Remove build directory, this prevents some false positives in tests
EOF

- name: Skipping Checkout Code on VM
Expand Down
3 changes: 3 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[submodule "protego-mech"]
path = protego-mech
url = [email protected]:aperijake/protego-mech.git
1 change: 1 addition & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@
"mpirun",
"pamgen",
"pnetcdf",
"protego",
"SIDESET",
"spack",
"spyhis",
Expand Down
5 changes: 5 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
cmake_minimum_required(VERSION 3.20)
project(aperi-mech)

# Add options
option(CHECK_CODE_COVERAGE "Enable code coverage" OFF)
option(USE_GPU "Enable GPU support" OFF)
option(USE_PROTEGO_MECH "Include protego-mech library" OFF)

# Set the flags for the RelWithDebInfo configuration
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "-O2 -g -pg")

Expand Down
2 changes: 1 addition & 1 deletion cmake/build_settings.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ set(CMAKE_CUDA_STANDARD 14)

# Set the project languages
enable_language(CXX)
IF(GPU)
IF(USE_GPU)
enable_language(CUDA)
set(CUDA_SEPARABLE_COMPILATION ON)
ENDIF()
Expand Down
2 changes: 1 addition & 1 deletion cmake/code_coverage.cmake
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
############# CODE COVERAGE #############
if (CODE_COVERAGE)
if (CHECK_CODE_COVERAGE)
list(APPEND CMAKE_PREFIX_PATH "${LCOV_BIN_DIR}")
include(CodeCoverage)
append_coverage_compiler_flags()
Expand Down
7 changes: 7 additions & 0 deletions cmake/targets.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,13 @@ target_include_directories(aperimech PRIVATE
${MPI_INCLUDE_PATH}
)

# Conditionally include protego-mech
if(USE_PROTEGO_MECH)
add_subdirectory(protego-mech)
target_link_libraries(aperimech protegomech)
target_compile_definitions(aperimech PRIVATE USE_PROTEGO_MECH)
endif()

# Add the executable
set(MAIN_SOURCES
src/main.cpp;
Expand Down
8 changes: 7 additions & 1 deletion cmake/version_info.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,18 @@ set(BUILD_TYPE ${CMAKE_BUILD_TYPE})
string(TIMESTAMP BUILD_DATE "%Y-%m-%d")
string(TIMESTAMP BUILD_TIME "%H:%M:%S")

IF(GPU)
IF(USE_GPU)
set(GPU_OR_CPU "GPU")
else()
set(GPU_OR_CPU "CPU")
endif()

IF(USE_PROTEGO_MECH)
set(PROTEGO_MECH "ON")
else()
set(PROTEGO_MECH "OFF")
endif()

# Generate git_commit.h with the current hash
configure_file(
${CMAKE_SOURCE_DIR}/include/git_commit.h.in
Expand Down
63 changes: 30 additions & 33 deletions do_configure
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ set -e

# Initialize variables with default values
BUILD_TYPE="Release"
CODE_COVERAGE=false
GPU=false
USE_LOCAL_COMPADRE=false
CHECK_CODE_COVERAGE=OFF
USE_GPU=OFF
USE_PROTEGO_MECH=OFF

# Get the current directory
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
Expand All @@ -20,19 +20,19 @@ while [[ $# -gt 0 ]]; do
shift
;;
-c | --code-coverage)
CODE_COVERAGE=true
CHECK_CODE_COVERAGE=ON
BUILD_TYPE="Debug"
echo "Requested code coverage. Build type set to Debug"
shift
;;
-g | --gpu)
GPU=true
USE_GPU=ON
echo "Requested GPU support."
shift
;;
-l | --local-compadre)
USE_LOCAL_COMPADRE=true
echo "Using local compadre."
-p | --protego-mech)
USE_PROTEGO_MECH=ON
echo "Including protego-mech."
shift
;;
*)
Expand All @@ -42,22 +42,9 @@ while [[ $# -gt 0 ]]; do
esac
done

# Build directory base
BUILD_DIR="build/"

# Append to build directory based on build type
BUILD_DIR+="${BUILD_TYPE}"

# If some of the spack commmands below fail, make sure the correct environment is activated. e.g.:
# spack env activate aperi-mech

CUDA_PATH=""
# Set build directory and GPU option
if ${GPU}; then
BUILD_DIR+="_gpu"
CUDA_PATH=$(spack location -i cuda)
fi

# Check for cmake in the spack environment, if not found, use the system cmake
if spack find -p cmake; then
cmake=$(spack location -i --first cmake)/bin/cmake
Expand All @@ -68,24 +55,19 @@ fi
# Configure CMake with specified build type and other options
cmake_command="${cmake}"

cmake_command+=" -D GPU:BOOL=${GPU}"
cmake_command+=" -D USE_GPU:BOOL=${USE_GPU}"
cmake_command+=" -D USE_PROTEGO_MECH:BOOL=${USE_PROTEGO_MECH}"
cmake_command+=" -D CMAKE_BUILD_TYPE:STRING=\"${BUILD_TYPE}\""
cmake_command+=" -D TRILINOS_PATH:FILEPATH=$(spack location -i trilinos)"
cmake_command+=" -D Kokkos_ROOT:FILEPATH=$(spack location -i kokkos)"
cmake_command+=" -D EIGEN_PATH:FILEPATH=$(spack location -i eigen)"
cmake_command+=" -D GTEST_PATH:FILEPATH=$(spack location -i googletest)"
cmake_command+=" -D YAML-CPP_PATH:FILEPATH=$(spack location -i yaml-cpp)"
cmake_command+=" -D OPENMPI_PATH:FILEPATH=$(spack location -i openmpi)"

if ${USE_LOCAL_COMPADRE}; then
COMPADRE_PATH="/home/azureuser/projects/compadre/${BUILD_DIR}/install"
else
COMPADRE_PATH=$(spack location -i compadre)
fi
cmake_command+=" -D COMPADRE_PATH:FILEPATH=${COMPADRE_PATH}"
cmake_command+=" -D COMPADRE_PATH:FILEPATH=$(spack location -i compadre)"

# Add the lcov path to the cmake command
if ${CODE_COVERAGE}; then
if [[ ${CHECK_CODE_COVERAGE} == "ON" ]]; then
# Verify that lcov is in /usr/bin
if [[ -f /usr/bin/lcov ]]; then
cmake_command+=" -D LCOV_BIN_DIR:FILEPATH=/usr/bin"
Expand All @@ -101,9 +83,11 @@ if ${CODE_COVERAGE}; then
fi
fi
fi
cmake_command+=" -D CODE_COVERAGE:BOOL=${CODE_COVERAGE}"
cmake_command+=" -D CHECK_CODE_COVERAGE:BOOL=${CHECK_CODE_COVERAGE}"

if ${GPU}; then
if [[ ${USE_GPU} == "ON" ]]; then
# Add the cuda path to the cmake command
CUDA_PATH=$(spack location -i cuda)
# Add the cuda compiler to the cmake command
cmake_command+=" -D CMAKE_CUDA_COMPILER:FILEPATH=${CUDA_PATH}/bin/nvcc"
# Add the cuda flags to the cmake command. There has got to be a better way to do this. Needed?
Expand Down Expand Up @@ -134,7 +118,20 @@ cmake_command+=" -D CMAKE_CXX_EXTENSIONS=Off"
cmake_command+=" ${SCRIPT_DIR}"

# Create build directory
[[ ${CODE_COVERAGE} == true ]] && BUILD_DIR+="_cov"
# Build directory base
BUILD_DIR="build/${BUILD_TYPE}"

# Append to build directory based on GPU usage
if [[ ${USE_GPU} == "ON" ]]; then
BUILD_DIR+="_gpu"
fi

# Append to build directory based on code coverage
if [[ ${CHECK_CODE_COVERAGE} == "ON" ]]; then
BUILD_DIR+="_cov"
fi

# Create the build directory and change to it
mkdir -p "${BUILD_DIR}"
cd "${BUILD_DIR}" || exit

Expand Down
6 changes: 5 additions & 1 deletion include/BoundaryCondition.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ namespace aperi {

class BoundaryCondition {
public:
BoundaryCondition(std::vector<std::pair<size_t, double>> components_and_values, std::pair<std::function<double(double)>, std::function<double(double)>> time_functions, std::vector<std::string> sets, std::shared_ptr<aperi::MeshData> mesh_data) : m_components_and_values(components_and_values), m_velocity_time_function(time_functions.first), m_acceleration_time_function(time_functions.second) {
BoundaryCondition(std::vector<std::pair<size_t, double>> components_and_values, std::pair<std::function<double(double)>, std::function<double(double)>> time_functions, std::vector<std::string> sets, std::shared_ptr<aperi::MeshData> mesh_data) : m_components_and_values(components_and_values), m_velocity_time_function(time_functions.first), m_acceleration_time_function(time_functions.second), m_sets(sets) {
const std::array<FieldQueryData<double>, 1> velocity_field_query_data_vec = {FieldQueryData<double>{"velocity", FieldQueryState::NP1}};
const std::array<FieldQueryData<double>, 1> acceleration_field_query_data_vec = {FieldQueryData<double>{"acceleration", FieldQueryState::NP1}};
m_node_processor_velocity = std::make_shared<aperi::NodeProcessor<1>>(velocity_field_query_data_vec, mesh_data, sets);
Expand All @@ -30,10 +30,14 @@ class BoundaryCondition {
// Apply the acceleration boundary condition to the field
void ApplyAcceleration(double time);

// Get the set names
std::vector<std::string> GetSetNames() const { return m_sets; }

private:
std::vector<std::pair<size_t, double>> m_components_and_values;
std::function<double(double)> m_velocity_time_function;
std::function<double(double)> m_acceleration_time_function;
std::vector<std::string> m_sets;
std::shared_ptr<aperi::NodeProcessor<1>> m_node_processor_velocity;
std::shared_ptr<aperi::NodeProcessor<1>> m_node_processor_acceleration;
};
Expand Down
1 change: 1 addition & 0 deletions include/ElementReproducingKernel.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
#include "ElementProcessor.h"
#include "ElementUtils.h"
#include "FieldData.h"
#include "FunctionValueStorageProcessor.h"
#include "Kokkos_Core.hpp"
#include "Material.h"
#include "MeshData.h"
Expand Down
34 changes: 2 additions & 32 deletions include/FieldData.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#pragma once

#include <stdexcept>
#include <string>
#include <variant>
#include <vector>
Expand Down Expand Up @@ -99,37 +100,6 @@ struct FieldQueryData {
* @brief Function to get default field data.
* @return A vector of default FieldData.
*/
inline std::vector<FieldData> GetFieldData(bool use_strain_smoothing = true) {
std::vector<FieldData> field_data;

// Node data
field_data.push_back(FieldData("velocity", FieldDataRank::VECTOR, FieldDataTopologyRank::NODE, 2, std::vector<double>{})); // The velocity field, generalized
field_data.push_back(FieldData("displacement", FieldDataRank::VECTOR, FieldDataTopologyRank::NODE, 2, std::vector<double>{})); // The displacement field, generalized
field_data.push_back(FieldData("acceleration", FieldDataRank::VECTOR, FieldDataTopologyRank::NODE, 2, std::vector<double>{})); // The acceleration field, generalized
field_data.push_back(FieldData("force", FieldDataRank::VECTOR, FieldDataTopologyRank::NODE, 2, std::vector<double>{})); // The force field, generalized
field_data.push_back(FieldData("mass_from_elements", FieldDataRank::VECTOR, FieldDataTopologyRank::NODE, 1, std::vector<double>{})); // The mass as determined from the attached elements
field_data.push_back(FieldData("mass", FieldDataRank::VECTOR, FieldDataTopologyRank::NODE, 1, std::vector<double>{})); // The mass field (mass_from_elements as coefficients on the approximation functions)

// Element data
field_data.push_back(FieldData("volume", FieldDataRank::SCALAR, FieldDataTopologyRank::ELEMENT, 1, std::vector<double>{}));

// TODO(jake): Add ability to turn this on / off per part
if (use_strain_smoothing) {
// TODO(jake): Some of these fields are only really needed for RK, but NeighborSearchProcessor needs to be refactored to allow for this
// Node neighbor data.
field_data.push_back(FieldData("num_neighbors", FieldDataRank::SCALAR, FieldDataTopologyRank::NODE, 1, std::vector<uint64_t>{})); // The number of neighbors for the node
field_data.push_back(FieldData("neighbors", FieldDataRank::CUSTOM, FieldDataTopologyRank::NODE, 1, MAX_NODE_NUM_NEIGHBORS, std::vector<uint64_t>{})); // The neighbors of the node
field_data.push_back(FieldData("function_values", FieldDataRank::CUSTOM, FieldDataTopologyRank::NODE, 1, MAX_NODE_NUM_NEIGHBORS, std::vector<double>{})); // The function values of neighbors at the node
field_data.push_back(FieldData("kernel_radius", FieldDataRank::SCALAR, FieldDataTopologyRank::NODE, 1, std::vector<double>{})); // The kernel radius for the node

// Cell neighbor data.
field_data.push_back(FieldData("num_neighbors", FieldDataRank::SCALAR, FieldDataTopologyRank::ELEMENT, 1, std::vector<uint64_t>{})); // The number of neighbors for the cell
field_data.push_back(FieldData("neighbors", FieldDataRank::CUSTOM, FieldDataTopologyRank::ELEMENT, 1, MAX_CELL_NUM_NEIGHBORS, std::vector<uint64_t>{})); // The neighbors of the cell
field_data.push_back(FieldData("function_derivatives_x", FieldDataRank::CUSTOM, FieldDataTopologyRank::ELEMENT, 1, MAX_CELL_NUM_NEIGHBORS, std::vector<double>{})); // The function derivatives in x of neighbors at the cell
field_data.push_back(FieldData("function_derivatives_y", FieldDataRank::CUSTOM, FieldDataTopologyRank::ELEMENT, 1, MAX_CELL_NUM_NEIGHBORS, std::vector<double>{})); // The function derivatives in y of neighbors at the cell
field_data.push_back(FieldData("function_derivatives_z", FieldDataRank::CUSTOM, FieldDataTopologyRank::ELEMENT, 1, MAX_CELL_NUM_NEIGHBORS, std::vector<double>{})); // The function derivatives in z of neighbors at the cell
}
return field_data;
}
std::vector<FieldData> GetFieldData(bool use_strain_smoothing = true);

} // namespace aperi
1 change: 1 addition & 0 deletions include/ForceContribution.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ class ForceContribution {
public:
ForceContribution() = default;
~ForceContribution() = default;
virtual void Preprocess(){};
virtual void ComputeForce() = 0;
};

Expand Down
Loading