diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 05a6388fc..4a8d7bbad 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -90,9 +90,9 @@ jobs: "$InstallDir/bin/jiminy_double_pendulum" - mkdir -p "$RootDir/examples/pip_extension/build" - cd "$RootDir/examples/pip_extension/build" - cmake "$RootDir/examples/pip_extension" -DCMAKE_INSTALL_PREFIX="$InstallDir" \ + mkdir -p "$RootDir/examples/cpp/pip_extension/build" + cd "$RootDir/examples/cpp/pip_extension/build" + cmake "$RootDir/examples/cpp/pip_extension" -DCMAKE_INSTALL_PREFIX="$InstallDir" \ -DCMAKE_PREFIX_PATH="$InstallDir" -DPYTHON_EXECUTABLE="${PYTHON_EXECUTABLE}" \ -DCMAKE_BUILD_TYPE="${BUILD_TYPE}" make install diff --git a/.github/workflows/manylinux.yml b/.github/workflows/manylinux.yml index 4ceab5afd..bafe0728b 100644 --- a/.github/workflows/manylinux.yml +++ b/.github/workflows/manylinux.yml @@ -15,7 +15,7 @@ jobs: strategy: matrix: - container: ['quay.io/pypa/manylinux2010_x86_64', + container: ['quay.io/pypa/manylinux2014_x86_64', 'quay.io/pypa/manylinux_2_24_x86_64'] PYTHON_VERSION: ['cp36', 'cp37', 'cp38', 'cp39'] legacy: [false] @@ -136,7 +136,7 @@ jobs: packages_dir: build/wheelhouse - name: Publish on PyPi the wheel of Gym Jiminy (Any platform / Any python3 version) if: >- - success() && matrix.container == 'manylinux2010_x86_64' && matrix.PYTHON_VERSION == 'cp36' && matrix.legacy == false && + success() && matrix.container == 'quay.io/pypa/manylinux2014_x86_64' && matrix.PYTHON_VERSION == 'cp36' && matrix.legacy == false && github.repository == 'duburcqa/jiminy' && github.event_name == 'push' && github.ref == 'refs/heads/master' uses: pypa/gh-action-pypi-publish@master with: diff --git a/.github/workflows/ubuntu.yml b/.github/workflows/ubuntu.yml index 49eec3a1c..df6bb1678 100644 --- a/.github/workflows/ubuntu.yml +++ b/.github/workflows/ubuntu.yml @@ -22,9 +22,6 @@ jobs: run: shell: bash -ieo pipefail {0} # Using bash enables automatic sourcing `.bashrc` and fail-fast behavior - env: - BUILD_TYPE: "Release" - ##################################################################################### steps: @@ -49,8 +46,8 @@ jobs: - name: Installing requirements run: | sudo env "PATH=$PATH" "${GITHUB_WORKSPACE}/build_tools/easy_install_deps_ubuntu.sh" - "${PYTHON_EXECUTABLE}" -m pip install --upgrade numpy "${PYTHON_EXECUTABLE}" -m pip install tensorflow + "${PYTHON_EXECUTABLE}" -m pip install --upgrade numpy "${PYTHON_EXECUTABLE}" -m pip install "torch==1.8.0+cpu" -f https://download.pytorch.org/whl/torch_stable.html "${PYTHON_EXECUTABLE}" -m pip install --prefer-binary "gym>=0.18.3" "stable_baselines3>=0.10" "importlib-metadata>=3.3.0" "${PYTHON_EXECUTABLE}" -m pip install "ray[default,rllib]<=1.4.0" # Type checking is not working with 1.4.1 @@ -73,7 +70,9 @@ jobs: -DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF \ -DBoost_NO_SYSTEM_PATHS=OFF -DPYTHON_EXECUTABLE="${PYTHON_EXECUTABLE}" \ -DBUILD_TESTING=ON -DBUILD_EXAMPLES=ON -DBUILD_PYTHON_INTERFACE=ON \ - -DCMAKE_BUILD_TYPE="${BUILD_TYPE}" + -DCMAKE_BUILD_TYPE="Debug" + make install -j2 + cmake -DCMAKE_BUILD_TYPE="Release" . make install -j2 ##################################################################################### @@ -82,10 +81,10 @@ jobs: run: | "$InstallDir/bin/jiminy_double_pendulum" - mkdir -p "$RootDir/examples/pip_extension/build" - cd "$RootDir/examples/pip_extension/build" - cmake "$RootDir/examples/pip_extension" -DCMAKE_INSTALL_PREFIX="$InstallDir" \ - -DPYTHON_EXECUTABLE="${PYTHON_EXECUTABLE}" -DCMAKE_BUILD_TYPE="${BUILD_TYPE}" + mkdir -p "$RootDir/examples/cpp/pip_extension/build" + cd "$RootDir/examples/cpp/pip_extension/build" + cmake "$RootDir/examples/cpp/pip_extension" -DCMAKE_INSTALL_PREFIX="$InstallDir" \ + -DPYTHON_EXECUTABLE="${PYTHON_EXECUTABLE}" -DCMAKE_BUILD_TYPE="Release" make install "$InstallDir/bin/pip_double_pendulum" diff --git a/.github/workflows/win.yml b/.github/workflows/win.yml index 3ed4e40bb..a0b752976 100644 --- a/.github/workflows/win.yml +++ b/.github/workflows/win.yml @@ -136,13 +136,13 @@ jobs: & "$InstallDir/bin/jiminy_double_pendulum.exe" - mkdir -p "$RootDir/examples/pip_extension/build" - cd "$RootDir/examples/pip_extension/build" + mkdir -p "$RootDir/examples/cpp/pip_extension/build" + cd "$RootDir/examples/cpp/pip_extension/build" $JIMINY_LIB_DIR = (python -c "import os, jiminy_py ; print(os.path.dirname(jiminy_py.get_libraries()))") $env:Path += ";$JIMINY_LIB_DIR" - cmake "$RootDir/examples/pip_extension" -DCMAKE_INSTALL_PREFIX="$InstallDir" ` + cmake "$RootDir/examples/cpp/pip_extension" -DCMAKE_INSTALL_PREFIX="$InstallDir" ` -DCMAKE_PREFIX_PATH="$InstallDir" -DPYTHON_REQUIRED_VERSION="${{ matrix.python-version }}" ` -DCMAKE_BUILD_TYPE="${BUILD_TYPE}" cmake --build . --target INSTALL --config "${env:BUILD_TYPE}" diff --git a/CMakeLists.txt b/CMakeLists.txt index bb9781a17..6fc3691e6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.10) # Set the build version -set(BUILD_VERSION 1.6.30) +set(BUILD_VERSION 1.7.3) # Set compatibility if(CMAKE_VERSION VERSION_GREATER "3.11.0") diff --git a/build_tools/build_install_deps_linux.sh b/build_tools/build_install_deps_linux.sh index e0a6cda42..68cca48e7 100755 --- a/build_tools/build_install_deps_linux.sh +++ b/build_tools/build_install_deps_linux.sh @@ -10,9 +10,9 @@ if [ -z ${BUILD_TYPE} ]; then fi ### Set common CMAKE_C/CXX_FLAGS -CMAKE_CXX_FLAGS="${CMAKE_CXX_FLAGS} -fPIC -s" +CMAKE_CXX_FLAGS="${CMAKE_CXX_FLAGS} -fPIC" if [ "${BUILD_TYPE}" == "Release" ]; then - CMAKE_CXX_FLAGS="${CMAKE_CXX_FLAGS} -O3 -DNDEBUG" + CMAKE_CXX_FLAGS="${CMAKE_CXX_FLAGS} -O3 -s -DNDEBUG" else CMAKE_CXX_FLAGS="${CMAKE_CXX_FLAGS} -O0 -g" fi diff --git a/core/include/jiminy/core/Types.h b/core/include/jiminy/core/Types.h index 16dccdd1b..dfdb7b5d7 100644 --- a/core/include/jiminy/core/Types.h +++ b/core/include/jiminy/core/Types.h @@ -67,10 +67,11 @@ namespace jiminy using matrixN_t = Eigen::Matrix; using matrix2_t = Eigen::Matrix; using matrix3_t = Eigen::Matrix; + using matrix6N_t = Eigen::Matrix; using vectorN_t = Eigen::Matrix; + using vector2_t = Eigen::Matrix; using vector3_t = Eigen::Matrix; using vector6_t = Eigen::Matrix; - using rowN_t = Eigen::Matrix; using constMatrixBlock_t = Eigen::Block const; using constVectorBlock_t = Eigen::VectorBlock const; @@ -120,7 +121,7 @@ namespace jiminy /* Ground profile signature. Note that it is impossible to use function pointer since it does not support functors. */ - using heatMapFunctor_t = std::function(vector3_t const & /*pos*/)>; + using heightmapFunctor_t = std::function(vector3_t const & /*pos*/)>; // Flexible joints struct flexibleJointData_t @@ -159,7 +160,7 @@ namespace jiminy // Configuration/option holder using configField_t = boost::make_recursive_variant< - bool_t, uint32_t, int32_t, float64_t, std::string, vectorN_t, matrixN_t, heatMapFunctor_t, + bool_t, uint32_t, int32_t, float64_t, std::string, vectorN_t, matrixN_t, heightmapFunctor_t, std::vector, std::vector, std::vector, flexibilityConfig_t, std::unordered_map >::type; diff --git a/core/include/jiminy/core/constraints/FixedFrameConstraint.h b/core/include/jiminy/core/constraints/FixedFrameConstraint.h index 9840dfba4..2d7996f3a 100644 --- a/core/include/jiminy/core/constraints/FixedFrameConstraint.h +++ b/core/include/jiminy/core/constraints/FixedFrameConstraint.h @@ -39,8 +39,7 @@ namespace jiminy /// \param[in] frameName Name of the frame on which the constraint is to be applied. /////////////////////////////////////////////////////////////////////////////////////////////// FixedFrameConstraint(std::string const & frameName, - Eigen::Matrix const & maskFixed = Eigen::Matrix::Constant(true), - pinocchio::ReferenceFrame const & frameRef = pinocchio::LOCAL_WORLD_ALIGNED); + Eigen::Matrix const & maskFixed = Eigen::Matrix::Constant(true)); virtual ~FixedFrameConstraint(void); std::string const & getFrameName(void) const; @@ -48,10 +47,11 @@ namespace jiminy std::vector const & getDofsFixed(void) const; - pinocchio::ReferenceFrame const & getReferenceFrame(void) const; - void setReferenceTransform(pinocchio::SE3 const & transformRef); - pinocchio::SE3 & getReferenceTransform(void); + pinocchio::SE3 const & getReferenceTransform(void) const; + + void setLocalFrame(matrix3_t const & frameRot); + matrix3_t const & getLocalFrame(void) const; virtual hresult_t reset(vectorN_t const & q, vectorN_t const & v) override final; @@ -62,11 +62,11 @@ namespace jiminy private: std::string const frameName_; ///< Name of the frame on which the constraint operates. frameIndex_t frameIdx_; ///< Corresponding frame index. - pinocchio::ReferenceFrame frameRef_; ///< Reference frame. std::vector dofsFixed_; ///< Degrees of freedom to fix. pinocchio::SE3 transformRef_; ///< Reference pose of the frame to enforce. - matrixN_t frameJacobian_; ///< Stores full frame jacobian in reference frame. - vector6_t frameDrift_; ///< Stores full frame drift in reference frame. + matrix3_t rotationLocal_; ///< Rotation matrix of the local frame in which to apply masking + matrix6N_t frameJacobian_; ///< Stores full frame jacobian in reference frame. + pinocchio::Motion frameDrift_; ///< Stores full frame drift in reference frame. }; } diff --git a/core/include/jiminy/core/constraints/JointConstraint.h b/core/include/jiminy/core/constraints/JointConstraint.h index 38c6dd330..e7384d4c3 100644 --- a/core/include/jiminy/core/constraints/JointConstraint.h +++ b/core/include/jiminy/core/constraints/JointConstraint.h @@ -34,12 +34,8 @@ namespace jiminy std::string const & getJointName(void) const; jointIndex_t const & getJointIdx(void) const; - template - void setReferenceConfiguration(Eigen::MatrixBase const & configurationRef) - { - configurationRef_ = configurationRef; - } - vectorN_t & getReferenceConfiguration(void); + void setReferenceConfiguration(vectorN_t const & configurationRef); + vectorN_t const & getReferenceConfiguration(void) const; virtual hresult_t reset(vectorN_t const & q, vectorN_t const & v) override final; diff --git a/core/include/jiminy/core/constraints/SphereConstraint.h b/core/include/jiminy/core/constraints/SphereConstraint.h index 046550cf0..360369c0d 100644 --- a/core/include/jiminy/core/constraints/SphereConstraint.h +++ b/core/include/jiminy/core/constraints/SphereConstraint.h @@ -43,14 +43,14 @@ namespace jiminy /////////////////////////////////////////////////////////////////////////////////////////////// SphereConstraint(std::string const & frameName, float64_t const & sphereRadius, - vector3_t const & groundNormal = (vector3_t() << 0.0, 0.0, 1.0).finished()); + vector3_t const & groundNormal = vector3_t::UnitZ()); virtual ~SphereConstraint(void); std::string const & getFrameName(void) const; frameIndex_t const & getFrameIdx(void) const; void setReferenceTransform(pinocchio::SE3 const & transformRef); - pinocchio::SE3 & getReferenceTransform(void); + pinocchio::SE3 const & getReferenceTransform(void) const; virtual hresult_t reset(vectorN_t const & /* q */, vectorN_t const & /* v */) override final; diff --git a/core/include/jiminy/core/constraints/WheelConstraint.h b/core/include/jiminy/core/constraints/WheelConstraint.h index 3dd72e8f7..00cd2b9a4 100644 --- a/core/include/jiminy/core/constraints/WheelConstraint.h +++ b/core/include/jiminy/core/constraints/WheelConstraint.h @@ -52,7 +52,7 @@ namespace jiminy frameIndex_t const & getFrameIdx(void) const; void setReferenceTransform(pinocchio::SE3 const & transformRef); - pinocchio::SE3 & getReferenceTransform(void); + pinocchio::SE3 const & getReferenceTransform(void) const; virtual hresult_t reset(vectorN_t const & /* q */, vectorN_t const & /* v */) override final; diff --git a/core/include/jiminy/core/engine/EngineMultiRobot.h b/core/include/jiminy/core/engine/EngineMultiRobot.h index 466daa5c7..8d46ac6e8 100644 --- a/core/include/jiminy/core/engine/EngineMultiRobot.h +++ b/core/include/jiminy/core/engine/EngineMultiRobot.h @@ -116,12 +116,15 @@ namespace jiminy configHolder_t config; config["model"] = std::string("spring_damper"); // ["spring_damper", "impulse"] config["solver"] = std::string("PGS"); // ["PGS",] + config["tolAbs"] = 1.0e-4; + config["tolRel"] = 1.0e-3; config["regularization"] = 0.0; // Relative inverse damping wrt. diagonal of J.Minv.J.t. 0.0 to enforce the minimum absolute regularizer. - config["stabilizationFreq"] = 0.0; // [s-1]: 0.0 to disable + config["stabilizationFreq"] = 20.0; // [s-1]: 0.0 to disable config["stiffness"] = 1.0e6; config["damping"] = 2.0e3; config["transitionEps"] = 1.0e-3; // [m] config["friction"] = 1.0; + config["torsion"] = 0.0; config["transitionVelocity"] = 1.0e-2; // [m.s-1] return config; @@ -140,10 +143,10 @@ namespace jiminy { configHolder_t config; config["gravity"] = (vectorN_t(6) << 0.0, 0.0, -9.81, 0.0, 0.0, 0.0).finished(); - config["groundProfile"] = heatMapFunctor_t( - [](vector3_t const & /* pos */) -> std::pair + config["groundProfile"] = heightmapFunctor_t( + [](vector3_t const & /* pos */) -> std::pair { - return {0.0, (vector3_t() << 0.0, 0.0, 1.0).finished()}; + return {0.0, vector3_t::UnitZ()}; }); return config; @@ -199,23 +202,29 @@ namespace jiminy { std::string const model; std::string const solver; + float64_t const tolAbs; + float64_t const tolRel; float64_t const regularization; float64_t const stabilizationFreq; float64_t const stiffness; float64_t const damping; float64_t const transitionEps; float64_t const friction; + float64_t const torsion; float64_t const transitionVelocity; contactOptions_t(configHolder_t const & options) : model(boost::get(options.at("model"))), solver(boost::get(options.at("solver"))), + tolAbs(boost::get(options.at("tolAbs"))), + tolRel(boost::get(options.at("tolRel"))), regularization(boost::get(options.at("regularization"))), stabilizationFreq(boost::get(options.at("stabilizationFreq"))), stiffness(boost::get(options.at("stiffness"))), damping(boost::get(options.at("damping"))), transitionEps(boost::get(options.at("transitionEps"))), friction(boost::get(options.at("friction"))), + torsion(boost::get(options.at("torsion"))), transitionVelocity(boost::get(options.at("transitionVelocity"))) { // Empty. @@ -238,11 +247,11 @@ namespace jiminy struct worldOptions_t { vectorN_t const gravity; - heatMapFunctor_t const groundProfile; + heightmapFunctor_t const groundProfile; worldOptions_t(configHolder_t const & options) : gravity(boost::get(options.at("gravity"))), - groundProfile(boost::get(options.at("groundProfile"))) + groundProfile(boost::get(options.at("groundProfile"))) { // Empty. } @@ -515,8 +524,6 @@ namespace jiminy /// \return Contact force, at parent joint, in the local frame. void computeContactDynamicsAtBody(systemHolder_t const & system, pairIndex_t const & collisionPairIdx, - vectorN_t const & q, - vectorN_t const & v, std::shared_ptr & contactConstraint, pinocchio::Force & fextLocal) const; @@ -527,8 +534,6 @@ namespace jiminy /// \return Contact force, at parent joint, in the local frame. void computeContactDynamicsAtFrame(systemHolder_t const & system, frameIndex_t const & frameIdx, - vectorN_t const & q, - vectorN_t const & v, std::shared_ptr & collisionConstraint, pinocchio::Force & fextLocal) const; @@ -550,8 +555,6 @@ namespace jiminy vectorN_t & uInternal) const; void computeCollisionForces(systemHolder_t const & system, systemDataHolder_t & systemData, - vectorN_t const & q, - vectorN_t const & v, forceVector_t & fext) const; void computeExternalForces(systemHolder_t const & system, systemDataHolder_t & systemData, diff --git a/core/include/jiminy/core/robot/AbstractMotor.h b/core/include/jiminy/core/robot/AbstractMotor.h index c978a86c4..ffd291716 100644 --- a/core/include/jiminy/core/robot/AbstractMotor.h +++ b/core/include/jiminy/core/robot/AbstractMotor.h @@ -101,9 +101,6 @@ namespace jiminy AbstractMotorBase(AbstractMotorBase const & abstractMotor) = delete; AbstractMotorBase & operator = (AbstractMotorBase const & other) = delete; - auto shared_from_this() { return shared_from(this); } - auto shared_from_this() const { return shared_from(this); } - /////////////////////////////////////////////////////////////////////////////////////////////// /// \brief Constructor /// diff --git a/core/include/jiminy/core/robot/Model.h b/core/include/jiminy/core/robot/Model.h index 693183ede..e3189edfe 100644 --- a/core/include/jiminy/core/robot/Model.h +++ b/core/include/jiminy/core/robot/Model.h @@ -251,9 +251,6 @@ namespace jiminy Model(void); ~Model(void) = default; - auto shared_from_this() { return shared_from(this); } - auto shared_from_this() const { return shared_from(this); } - hresult_t initialize(std::string const & urdfPath, bool_t const & hasFreeflyer = true, std::vector const & meshPackageDirs = {}); @@ -332,6 +329,7 @@ namespace jiminy virtual void reset(void); bool_t const & getIsInitialized(void) const; + std::string const & getName(void) const; std::string const & getUrdfPath(void) const; std::vector const & getMeshPackageDirs(void) const; bool_t const & getHasFreeflyer(void) const; @@ -372,7 +370,8 @@ namespace jiminy protected: hresult_t initialize(pinocchio::Model const & pncModel, - pinocchio::GeometryModel const & collisionModel); + pinocchio::GeometryModel const & collisionModel, + pinocchio::GeometryModel const & visualModel); hresult_t generateModelFlexible(void); hresult_t generateModelBiased(void); @@ -400,12 +399,13 @@ namespace jiminy virtual hresult_t refreshProxies(void); public: + pinocchio::Model pncModelRigidOrig_; pinocchio::Model pncModel_; - mutable pinocchio::Data pncData_; pinocchio::GeometryModel collisionModel_; - mutable std::unique_ptr pncGeometryData_; // Using smart ptr to avoid having to initialize it with an empty GeometryModel, which causes Pinocchio segfault at least up to v2.5.6 - pinocchio::Model pncModelRigidOrig_; + pinocchio::GeometryModel visualModel_; pinocchio::Data pncDataRigidOrig_; + mutable pinocchio::Data pncData_; + mutable std::unique_ptr pncCollisionData_; // Using smart ptr to avoid having to initialize it with an empty GeometryModel, which causes Pinocchio segfault at least up to v2.5.6 std::unique_ptr mdlOptions_; forceVector_t contactForces_; ///< Buffer storing the contact forces diff --git a/core/include/jiminy/core/robot/PinocchioOverloadAlgorithms.h b/core/include/jiminy/core/robot/PinocchioOverloadAlgorithms.h index f95cf4f43..71d3950c0 100644 --- a/core/include/jiminy/core/robot/PinocchioOverloadAlgorithms.h +++ b/core/include/jiminy/core/robot/PinocchioOverloadAlgorithms.h @@ -349,13 +349,15 @@ namespace pinocchio_overload pinocchio::cholesky::decompose(model, data); } - // Compute sqrt(D)^-1 * U^-1 * J.T + // Compute sDUiJt := sqrt(D)^-1 * U^-1 * J.T data.sDUiJt = J.transpose(); pinocchio::cholesky::Uiv(model, data, data.sDUiJt); data.sDUiJt.array().colwise() /= data.D.array().sqrt(); - // Compute JMinvJt - data.JMinvJt.noalias() = data.sDUiJt.transpose() * data.sDUiJt; + // Compute JMinvJt := sDUiJt.T * sDUiJt + data.JMinvJt = matrixN_t::Zero(J.rows(), J.rows()); + data.JMinvJt.selfadjointView().rankUpdate(data.sDUiJt.transpose()); + data.JMinvJt.triangularView() = data.JMinvJt.transpose(); return data.JMinvJt; } diff --git a/core/include/jiminy/core/robot/Robot.h b/core/include/jiminy/core/robot/Robot.h index ab203851a..fe1ee1117 100644 --- a/core/include/jiminy/core/robot/Robot.h +++ b/core/include/jiminy/core/robot/Robot.h @@ -39,6 +39,9 @@ namespace jiminy auto shared_from_this() { return shared_from(this); } auto shared_from_this() const { return shared_from(this); } + hresult_t initialize(pinocchio::Model const & pncModel, + pinocchio::GeometryModel const & collisionModel, + pinocchio::GeometryModel const & visualModel); hresult_t initialize(std::string const & urdfPath, bool_t const & hasFreeflyer = true, std::vector const & meshPackageDirs = {}); diff --git a/core/include/jiminy/core/solver/LCPSolvers.h b/core/include/jiminy/core/solver/LCPSolvers.h index 6d91b3280..55c70e32a 100644 --- a/core/include/jiminy/core/solver/LCPSolvers.h +++ b/core/include/jiminy/core/solver/LCPSolvers.h @@ -6,8 +6,6 @@ namespace jiminy { - float64_t const EPS_DIVISION = 1.0e-9; - class AbstractLCPSolver { public: @@ -17,7 +15,7 @@ namespace jiminy /// /// using boxed bounds lo < x < hi instead of 0 < x: /// s.t. if fIdx[i] < 0, lo[i] < x[i] < hi[i] - /// else, - hi[i] x[fIdx[i]] < x[i] < hi[i] x[fIdx[i]] + /// else, - hi[i] * x[fIdx[i]] < x[i] < hi[i] * x[fIdx[i]] /// /// The result x will be stored in data.lambda_c. virtual bool_t BoxedForwardDynamics(pinocchio::Model const & model, @@ -55,8 +53,6 @@ namespace jiminy vectorN_t const & lo, vectorN_t const & hi, std::vector const & fIdx, - bool_t const & checkAbs, - bool_t const & checkRel, vectorN_t & x); bool_t ProjectedGaussSeidelSolver(matrixN_t & A, vectorN_t & b, diff --git a/core/include/jiminy/core/stepper/LieGroup.h b/core/include/jiminy/core/stepper/LieGroup.h index 32b1e3939..705a70811 100644 --- a/core/include/jiminy/core/stepper/LieGroup.h +++ b/core/include/jiminy/core/stepper/LieGroup.h @@ -443,7 +443,7 @@ namespace Eigen StateBase & out) const { // 'Sum' q = q + v, remember q is part of a Lie group (dim(q) != dim(v)) - assert(robot() == velocity.robot() == out.robot()); + assert(robot() == velocity.robot() && robot() == out.robot()); pinocchio::integrate(robot()->pncModel_, q(), velocity.v(), out.q()); out.v() = v() + velocity.a(); return out; @@ -459,7 +459,7 @@ namespace Eigen StateDerivativeBase & difference(StateBase const & position, StateDerivativeBase & out) const { - assert(robot() == position.robot() == out.robot()); + assert(robot() == position.robot() && robot() == out.robot()); pinocchio::difference(robot()->pncModel_, q(), position.q(), out.v()); out.a() = v() - position.v(); return out; @@ -1257,7 +1257,7 @@ namespace Eigen { \ std::vector::ValueType> const & \ vectorIn = other.vector(); \ - assert(vector_.size() == vectorOut.size()); \ + assert(vector_.size() == vectorIn.size()); \ for (std::size_t i = 0; i < vector_.size(); ++i) \ { \ vector_[i] += scale * vectorIn[i]; \ @@ -1305,7 +1305,7 @@ namespace Eigen { \ std::vector::ValueType> const & \ vectorIn = other.vector(); \ - assert(vector_.size() == vectorOut.size()); \ + assert(vector_.size() == vectorIn.size()); \ for (std::size_t i = 0; i < vector_.size(); ++i) \ { \ vector_[i].sumInPlace(scale * vectorIn[i]); \ diff --git a/core/include/jiminy/core/stepper/RungeKuttaDOPRIStepper.h b/core/include/jiminy/core/stepper/RungeKuttaDOPRIStepper.h index f7d53c17c..ed576d7a7 100644 --- a/core/include/jiminy/core/stepper/RungeKuttaDOPRIStepper.h +++ b/core/include/jiminy/core/stepper/RungeKuttaDOPRIStepper.h @@ -68,9 +68,9 @@ namespace jiminy /// \param[in] solution Current solution computed by the main Runge-Kutta step. /// \param[in, out] dt Timestep to be scaled. /// \return True on step success, false otherwise. dt is updated in place. - virtual bool adjustStep(state_t const & initialState, - state_t const & solution, - float64_t & dt) override final; + virtual bool_t adjustStep(state_t const & initialState, + state_t const & solution, + float64_t & dt) override final; private: /// \brief Run error computation algorithm to return normalized error. diff --git a/core/include/jiminy/core/utilities/Helpers.h b/core/include/jiminy/core/utilities/Helpers.h index f03987b4e..7b45b2156 100644 --- a/core/include/jiminy/core/utilities/Helpers.h +++ b/core/include/jiminy/core/utilities/Helpers.h @@ -107,20 +107,16 @@ namespace jiminy template typename std::common_type_t min(T0 && val1, T1 && val2, Ts &&... vs); - template - auto clamp(Eigen::MatrixBase const & data, - float64_t const & minThr = -INF, - float64_t const & maxThr = +INF); + template + constexpr ScalarType const & clamp(ScalarType const & data, + ScalarType const & minThr, + ScalarType const & maxThr); template Eigen::MatrixBase clamp(Eigen::MatrixBase const & data, Eigen::MatrixBase const & minThr, Eigen::MatrixBase const & maxThr); - float64_t clamp(float64_t const & data, - float64_t const & minThr = -INF, - float64_t const & maxThr = +INF); - template float64_t minClipped(float64_t val1, float64_t val2, Args ... vs); diff --git a/core/include/jiminy/core/utilities/Helpers.tpp b/core/include/jiminy/core/utilities/Helpers.tpp index bbd8c537f..d161a02cf 100644 --- a/core/include/jiminy/core/utilities/Helpers.tpp +++ b/core/include/jiminy/core/utilities/Helpers.tpp @@ -18,6 +18,22 @@ namespace jiminy return min(std::min(val1, val2), std::forward(vs)...); } + template + constexpr ScalarType const & clamp(ScalarType const & data, + ScalarType const & minThr, + ScalarType const & maxThr) + { + if (data < minThr) + { + return minThr; + } + if (maxThr < data) + { + return maxThr; + } + return data; + } + template auto clamp(Eigen::MatrixBase const & data, float64_t const & minThr, diff --git a/core/include/jiminy/core/utilities/Json.tpp b/core/include/jiminy/core/utilities/Json.tpp index 0cb2a6343..b8f28b4c4 100644 --- a/core/include/jiminy/core/utilities/Json.tpp +++ b/core/include/jiminy/core/utilities/Json.tpp @@ -25,7 +25,7 @@ namespace jiminy Json::Value convertToJson(flexibleJointData_t const & value); template<> - Json::Value convertToJson(heatMapFunctor_t const & value); + Json::Value convertToJson(heightmapFunctor_t const & value); template constexpr std::enable_if_t::value @@ -107,7 +107,7 @@ namespace jiminy flexibleJointData_t convertFromJson(Json::Value const & value); template<> - heatMapFunctor_t convertFromJson(Json::Value const & value); + heightmapFunctor_t convertFromJson(Json::Value const & value); template std::enable_if_t, T> diff --git a/core/include/jiminy/core/utilities/Random.h b/core/include/jiminy/core/utilities/Random.h index f0f8712af..c9c76eaae 100644 --- a/core/include/jiminy/core/utilities/Random.h +++ b/core/include/jiminy/core/utilities/Random.h @@ -33,6 +33,8 @@ namespace jiminy void shuffleIndices(std::vector & vector); + // ************ Continuous 1D Perlin processes *************** + class PeriodicGaussianProcess { public: @@ -185,11 +187,11 @@ namespace jiminy /* \brief Sum of Perlin noise octaves. \details The original implementation uses fixed size permutation table to generate - random gradient directions. As a result, the generated process is inherently - periodic, which must be avoided. To circumvent this limitation, MurmurHash3 - algorithm is used to get random gradients at every point in time, without any - periodicity, but deterministically for a given seed. It is computationally more - depending but not critically slower. + random gradient directions. As a result, the generated process is inherently + periodic, which must be avoided. To circumvent this limitation, MurmurHash3 + algorithm is used to get random gradients at every point in time, without any + periodicity, but deterministically for a given seed. It is computationally more + depending but not critically slower. /sa For technical references: - https://www.scratchapixel.com/lessons/procedural-generation-virtual-worlds/perlin-noise-part-2 @@ -262,6 +264,22 @@ namespace jiminy private: float64_t const period_; }; + + // ************ Random terrain generators *************** + + heightmapFunctor_t randomTileGround(vector2_t const & size, + float64_t const & heightMax, + vector2_t const & interpDelta, + uint32_t const & sparsity, + float64_t const & orientation, + uint32_t const & seed); + + heightmapFunctor_t sumHeightmap(std::vector const & heightmaps); + heightmapFunctor_t mergeHeightmap(std::vector const & heightmaps); + + matrixN_t discretizeHeightmap(heightmapFunctor_t const & heightmap, + float64_t const & gridSize, + float64_t const & gridUnit); } #endif // JIMINY_RANDOM_H diff --git a/core/src/Constants.cc b/core/src/Constants.cc index 2b460f94d..0924def51 100644 --- a/core/src/Constants.cc +++ b/core/src/Constants.cc @@ -19,7 +19,7 @@ namespace jiminy float64_t const SIMULATION_MIN_TIMESTEP = 1e-6; float64_t const SIMULATION_MAX_TIMESTEP = 0.02; - uint32_t const PGS_MAX_ITERATIONS = 100U; + uint32_t const PGS_MAX_ITERATIONS = 50U; uint32_t const PGS_RANDOM_PERMUTATION_PERIOD = 0U; // 0 to disable float64_t const PGS_MIN_REGULARIZER = 1.0e-12; } diff --git a/core/src/constraints/FixedFrameConstraint.cc b/core/src/constraints/FixedFrameConstraint.cc index 33f9cce98..d7f81e0da 100644 --- a/core/src/constraints/FixedFrameConstraint.cc +++ b/core/src/constraints/FixedFrameConstraint.cc @@ -12,14 +12,13 @@ namespace jiminy std::string const AbstractConstraintTpl::type_("FixedFrameConstraint"); FixedFrameConstraint::FixedFrameConstraint(std::string const & frameName, - Eigen::Matrix const & maskFixed, - pinocchio::ReferenceFrame const & frameRef) : + Eigen::Matrix const & maskFixed) : AbstractConstraintTpl(), frameName_(frameName), frameIdx_(0), - frameRef_(frameRef), dofsFixed_(), transformRef_(), + rotationLocal_(), frameJacobian_(), frameDrift_() { @@ -50,11 +49,6 @@ namespace jiminy return frameIdx_; } - pinocchio::ReferenceFrame const & FixedFrameConstraint::getReferenceFrame(void) const - { - return frameRef_; - } - std::vector const & FixedFrameConstraint::getDofsFixed(void) const { return dofsFixed_; @@ -65,11 +59,21 @@ namespace jiminy transformRef_ = transformRef; } - pinocchio::SE3 & FixedFrameConstraint::getReferenceTransform(void) + pinocchio::SE3 const & FixedFrameConstraint::getReferenceTransform(void) const { return transformRef_; } + void FixedFrameConstraint::setLocalFrame(matrix3_t const & frameRot) + { + rotationLocal_ = frameRot; + } + + matrix3_t const & FixedFrameConstraint::getLocalFrame(void) const + { + return rotationLocal_; + } + hresult_t FixedFrameConstraint::reset(vectorN_t const & /* q */, vectorN_t const & /* v */) { @@ -92,7 +96,7 @@ namespace jiminy if (returnCode == hresult_t::SUCCESS) { // Initialize jacobian, drift and multipliers - frameJacobian_ = matrixN_t::Zero(6, model->pncModel_.nv); + frameJacobian_ = matrix6N_t::Zero(6, model->pncModel_.nv); frameDrift_ = vector6_t::Zero(); uint64_t const dim = dofsFixed_.size(); jacobian_ = matrixN_t::Zero(dim, model->pncModel_.nv); @@ -101,6 +105,9 @@ namespace jiminy // Get the current frame position and use it as reference transformRef_ = model->pncData_.oMf[frameIdx_]; + + // Set local frame to world by default + rotationLocal_.setIdentity(); } return returnCode; @@ -118,40 +125,53 @@ namespace jiminy // Assuming the model still exists. auto model = model_.lock(); - // Get jacobian + // Define inverse rotation matrix of local frame + auto rotInvLocal = rotationLocal_.transpose(); + + // Get jacobian in local frame getFrameJacobian(model->pncModel_, model->pncData_, frameIdx_, - frameRef_, + pinocchio::LOCAL_WORLD_ALIGNED, frameJacobian_); - // Get drift + pinocchio::Frame const & frame = model->pncModel_.frames[frameIdx_]; + pinocchio::JointModel const & joint = model->pncModel_.joints[frame.parent]; + int32_t const colRef = joint.nv() + joint.idx_v() - 1; + for(Eigen::DenseIndex j=colRef; j>=0; j=model->pncData_.parents_fromRow[j]) + { + pinocchio::MotionRef J_col(frameJacobian_.col(j)); + J_col.linear() = rotInvLocal * J_col.linear(); + J_col.angular() = rotInvLocal * J_col.angular(); + } + + // Get drift in world frame frameDrift_ = getFrameAcceleration(model->pncModel_, model->pncData_, frameIdx_, - frameRef_).toVector(); - - // Add Baumgarte stabilization drift - if (frameRef_ == pinocchio::LOCAL_WORLD_ALIGNED || frameRef_ == pinocchio::WORLD) - { - auto deltaPosition = model->pncData_.oMf[frameIdx_].translation() - transformRef_.translation(); - frameDrift_.head<3>() += kp_ * deltaPosition; - auto deltaRotation = transformRef_.rotation().transpose() * model->pncData_.oMf[frameIdx_].rotation(); - vectorN_t const axis = pinocchio::log3(deltaRotation); - frameDrift_.tail<3>() += kp_ * axis; - } - vector6_t const velocity = getFrameVelocity(model->pncModel_, - model->pncData_, - frameIdx_, - frameRef_).toVector(); + pinocchio::LOCAL_WORLD_ALIGNED); + + // Add Baumgarte stabilization to drift in world frame + vector3_t const deltaPosition = model->pncData_.oMf[frameIdx_].translation() - transformRef_.translation(); + matrix3_t const deltaRotation = transformRef_.rotation().transpose() * model->pncData_.oMf[frameIdx_].rotation(); + frameDrift_.linear() += kp_ * deltaPosition; + frameDrift_.angular() += kp_ * pinocchio::log3(deltaRotation); + pinocchio::Motion const velocity = getFrameVelocity(model->pncModel_, + model->pncData_, + frameIdx_, + pinocchio::LOCAL_WORLD_ALIGNED); frameDrift_ += kd_ * velocity; + // Compute drift in local frame + frameDrift_.linear() = rotInvLocal * frameDrift_.linear(); + frameDrift_.angular() = rotInvLocal * frameDrift_.angular(); + // Extract masked jacobian and drift, only containing fixed dofs for (uint32_t i=0; i < dofsFixed_.size(); ++i) { uint32_t const & dofIndex = dofsFixed_[i]; jacobian_.row(i) = frameJacobian_.row(dofIndex); - drift_[i] = frameDrift_[dofIndex]; + drift_[i] = frameDrift_.toVector()[dofIndex]; } return hresult_t::SUCCESS; diff --git a/core/src/constraints/JointConstraint.cc b/core/src/constraints/JointConstraint.cc index 5c9169830..d9df66ef2 100644 --- a/core/src/constraints/JointConstraint.cc +++ b/core/src/constraints/JointConstraint.cc @@ -34,7 +34,12 @@ namespace jiminy return jointIdx_; } - vectorN_t & JointConstraint::getReferenceConfiguration(void) + void JointConstraint::setReferenceConfiguration(vectorN_t const & configurationRef) + { + configurationRef_ = configurationRef; + } + + vectorN_t const & JointConstraint::getReferenceConfiguration(void) const { return configurationRef_; } diff --git a/core/src/constraints/SphereConstraint.cc b/core/src/constraints/SphereConstraint.cc index 60386894a..d37cc1627 100644 --- a/core/src/constraints/SphereConstraint.cc +++ b/core/src/constraints/SphereConstraint.cc @@ -46,7 +46,7 @@ namespace jiminy transformRef_ = transformRef; } - pinocchio::SE3 & SphereConstraint::getReferenceTransform(void) + pinocchio::SE3 const & SphereConstraint::getReferenceTransform(void) const { return transformRef_; } diff --git a/core/src/constraints/WheelConstraint.cc b/core/src/constraints/WheelConstraint.cc index 564b2aded..3c06e7369 100644 --- a/core/src/constraints/WheelConstraint.cc +++ b/core/src/constraints/WheelConstraint.cc @@ -50,7 +50,7 @@ namespace jiminy transformRef_ = transformRef; } - pinocchio::SE3 & WheelConstraint::getReferenceTransform(void) + pinocchio::SE3 const & WheelConstraint::getReferenceTransform(void) const { return transformRef_; } diff --git a/core/src/control/AbstractController.cc b/core/src/control/AbstractController.cc index b6823af8e..5e3d78de6 100644 --- a/core/src/control/AbstractController.cc +++ b/core/src/control/AbstractController.cc @@ -47,13 +47,15 @@ namespace jiminy // Backup robot robot_ = robotIn; + /* Set initialization flag to true temporarily to enable calling + 'reset', 'computeCommand' and 'internalDynamics' methods. */ + isInitialized_ = true; + // Reset the controller completely - reset(true); + reset(true); // It cannot fail at this point try { - // isInitialized_ must be true to execute the 'computeCommand' and 'internalDynamics' methods - isInitialized_ = true; float64_t t = 0.0; vectorN_t q = pinocchio::neutral(robot->pncModel_); vectorN_t v = vectorN_t::Zero(robot->nv()); @@ -80,6 +82,8 @@ namespace jiminy catch (std::exception const & e) { isInitialized_ = false; + robot_.reset(); + sensorsData_.clear(); PRINT_ERROR("Something is wrong, probably because of 'commandFct'.\n" "Raised from exception: ", e.what()); return hresult_t::ERROR_GENERIC; @@ -90,6 +94,12 @@ namespace jiminy hresult_t AbstractController::reset(bool_t const & resetDynamicTelemetry) { + if (!isInitialized_) + { + PRINT_ERROR("The controller is not initialized."); + return hresult_t::ERROR_INIT_FAILED; + } + // Reset the telemetry buffer of dynamically registered quantities if (resetDynamicTelemetry) { diff --git a/core/src/engine/EngineMultiRobot.cc b/core/src/engine/EngineMultiRobot.cc index c428b7914..c6a660c72 100644 --- a/core/src/engine/EngineMultiRobot.cc +++ b/core/src/engine/EngineMultiRobot.cc @@ -880,9 +880,21 @@ namespace jiminy systemData.state.clear(); systemData.statePrev.clear(); } + + // Instantiate desired LCP solver + std::string const & contactSolver = engineOptions_->contacts.solver; + if (CONTACT_SOLVERS_MAP.at(contactSolver) == contactSolver_t::PGS) + { + contactSolver_ = std::make_unique( + PGS_MAX_ITERATIONS, + PGS_RANDOM_PERMUTATION_PERIOD, + engineOptions_->contacts.tolAbs, + engineOptions_->contacts.tolRel); + } } - void computeExtraTerms(systemHolder_t & system) + void computeExtraTerms(systemHolder_t & system, + systemDataHolder_t const & systemData) { /// This method is optimized to avoid redundant computations. /// See `pinocchio::computeAllTerms` for reference: @@ -931,8 +943,9 @@ namespace jiminy for (int32_t i = 1; i < model.njoints; ++i) { data.h[i] = model.inertias[i] * data.v[i]; - data.f[i] = model.inertias[i] * data.a[i] + data.v[i].cross(data.h[i]); + data.f[i] = model.inertias[i] * data.a[i] + data.v[i].cross(data.h[i]) - systemData.state.fExternal[i]; } + for (int32_t i = model.njoints - 1; i > 0; --i) { jointIndex_t const & parentIdx = model.parents[i]; @@ -960,11 +973,14 @@ namespace jiminy data.dhg.angular() += data.dhg.linear().cross(data.com[0]); } - void computeAllExtraTerms(std::vector & systems) + void computeAllExtraTerms(std::vector & systems, + vector_aligned_t const & systemsDataHolder) { - for (auto & system : systems) + auto systemIt = systems.begin(); + auto systemDataIt = systemsDataHolder.begin(); + for ( ; systemIt != systems.end(); ++systemIt, ++systemDataIt) { - computeExtraTerms(system); + computeExtraTerms(*systemIt, *systemDataIt); } } @@ -1283,17 +1299,24 @@ namespace jiminy collisionPairsIdx[i].size(), pinocchio::Force::Zero()); } - // Set Baumgarte stabilization natural frequency for every constraints - systemDataIt->constraintsHolder.foreach( - [freq = engineOptions_->contacts.stabilizationFreq]( // by-copy to avoid compilation failure for gcc<7.3 - std::shared_ptr const & constraint, - constraintsHolderType_t const & /* holderType */) - { - if (constraint) + // Set Baumgarte stabilization natural frequency for contact constraints + std::array holderTypes {{ + constraintsHolderType_t::BOUNDS_JOINTS, + constraintsHolderType_t::CONTACT_FRAMES, + constraintsHolderType_t::COLLISION_BODIES}}; + for (constraintsHolderType_t holderType : holderTypes) + { + systemDataIt->constraintsHolder.foreach(holderType, + [freq = engineOptions_->contacts.stabilizationFreq]( // by-copy to avoid compilation failure for gcc<7.3 + std::shared_ptr const & constraint, + constraintsHolderType_t const & /* holderType */) { - constraint->setBaumgarteFreq(freq); // It cannot fail at this point - } - }); + if (constraint) + { + constraint->setBaumgarteFreq(freq); // It cannot fail at this point + } + }); + } // Initialize some addition buffers used by impulse contact solver systemDataIt->jointJacobian = matrixN_t::Zero(6, systemIt->robot->pncModel_.nv); @@ -1311,7 +1334,7 @@ namespace jiminy auto & constraint = systemDataIt->constraintsHolder.contactFrames[i].second; pinocchio::Force & fextLocal = systemDataIt->contactFramesForces[i]; computeContactDynamicsAtFrame( - *systemIt, contactFramesIdx[i], q, v, constraint, fextLocal); + *systemIt, contactFramesIdx[i], constraint, fextLocal); forceMax = std::max(forceMax, fextLocal.linear().norm()); } @@ -1323,7 +1346,7 @@ namespace jiminy auto & constraint = systemDataIt->constraintsHolder.collisionBodies[i][j].second; pinocchio::Force & fextLocal = systemDataIt->collisionBodiesForces[i][j]; computeContactDynamicsAtBody( - *systemIt, collisionPairIdx, q, v, constraint, fextLocal); + *systemIt, collisionPairIdx, constraint, fextLocal); forceMax = std::max(forceMax, fextLocal.linear().norm()); } } @@ -1400,7 +1423,7 @@ namespace jiminy a = computeAcceleration(*systemIt, *systemDataIt, q, v, u, fext); // Compute joints accelerations and forces - computeExtraTerms(*systemIt); + computeExtraTerms(*systemIt, *systemDataIt); syncAccelerationsAndForces(*systemIt, *fPrevIt, *aPrevIt); // Update the sensor data once again, with the updated effort and acceleration @@ -1825,7 +1848,7 @@ namespace jiminy */ if (!std::isfinite(stepperUpdatePeriod_) || !engineOptions_->stepper.logInternalStepperSteps) { - bool mustUpdateTelemetry = !std::isfinite(stepperUpdatePeriod_); + bool_t mustUpdateTelemetry = !std::isfinite(stepperUpdatePeriod_); if (!mustUpdateTelemetry) { float64_t dtNextStepperUpdatePeriod = stepperUpdatePeriod_ - std::fmod(t, stepperUpdatePeriod_); @@ -1842,7 +1865,7 @@ namespace jiminy if (!std::isfinite(stepperUpdatePeriod_) && hasDynamicsChanged) { computeSystemsDynamics(t, qSplit, vSplit, aSplit); - computeAllExtraTerms(systems_); + computeAllExtraTerms(systems_, systemsDataHolder_); syncAllAccelerationsAndForces(systems_, fPrev_, aPrev_); syncSystemsStateWithStepper(true); hasDynamicsChanged = false; @@ -1891,7 +1914,7 @@ namespace jiminy if (hasDynamicsChanged) { computeSystemsDynamics(t, qSplit, vSplit, aSplit); - computeAllExtraTerms(systems_); + computeAllExtraTerms(systems_, systemsDataHolder_); syncAllAccelerationsAndForces(systems_, fPrev_, aPrev_); syncSystemsStateWithStepper(true); hasDynamicsChanged = false; @@ -1956,7 +1979,7 @@ namespace jiminy /* Compute the actual joint acceleration and forces, based on up-to-date pinocchio::Data. */ - computeAllExtraTerms(systems_); + computeAllExtraTerms(systems_, systemsDataHolder_); // Synchronize the individual system states syncAllAccelerationsAndForces(systems_, fPrev_, aPrev_); @@ -2047,7 +2070,7 @@ namespace jiminy /* Compute the actual joint acceleration and forces, based on up-to-date pinocchio::Data. */ - computeAllExtraTerms(systems_); + computeAllExtraTerms(systems_, systemsDataHolder_); // Synchronize the individual system states syncAllAccelerationsAndForces(systems_, fPrev_, aPrev_); @@ -2089,7 +2112,7 @@ namespace jiminy // Update sensors data if necessary, namely if time-continuous or breakpoint float64_t const & sensorsUpdatePeriod = engineOptions_->stepper.sensorsUpdatePeriod; - bool mustUpdateSensors = sensorsUpdatePeriod < EPS; + bool_t mustUpdateSensors = sensorsUpdatePeriod < EPS; float64_t dtNextSensorsUpdatePeriod = sensorsUpdatePeriod - std::fmod(t, sensorsUpdatePeriod); if (!mustUpdateSensors) { @@ -2610,16 +2633,6 @@ namespace jiminy // Backup contact model as enum for fast check contactModel_ = contactModelIt->second; - // Instantiate desired LCP solver - if (contactSolverIt->second == contactSolver_t::PGS) - { - contactSolver_ = std::make_unique( - PGS_MAX_ITERATIONS, - PGS_RANDOM_PERMUTATION_PERIOD, - engineOptions_->stepper.tolAbs, - engineOptions_->stepper.tolRel); - } - // Set breakpoint period during the integration loop stepperUpdatePeriod_ = minUpdatePeriod; @@ -2782,7 +2795,7 @@ namespace jiminy pinocchio::Model const & model = system.robot->pncModel_; pinocchio::Data & data = system.robot->pncData_; pinocchio::GeometryModel const & geomModel = system.robot->collisionModel_; - pinocchio::GeometryData & geomData = *system.robot->pncGeometryData_; + pinocchio::GeometryData & geomData = *system.robot->pncCollisionData_; // Update forward kinematics pinocchio::forwardKinematics(model, data, q, v, a); @@ -2849,8 +2862,6 @@ namespace jiminy void EngineMultiRobot::computeContactDynamicsAtBody(systemHolder_t const & system, pairIndex_t const & collisionPairIdx, - vectorN_t const & q, - vectorN_t const & v, std::shared_ptr & constraint, pinocchio::Force & fextLocal) const { @@ -2863,11 +2874,17 @@ namespace jiminy jointIndex_t const & parentJointIdx = system.robot->collisionModel_.geometryObjects[geometryIdx].parentJoint; // Extract collision and distance results - hpp::fcl::CollisionResult const & collisionResult = system.robot->pncGeometryData_->collisionResults[collisionPairIdx]; + hpp::fcl::CollisionResult const & collisionResult = system.robot->pncCollisionData_->collisionResults[collisionPairIdx]; fextLocal.setZero(); - bool_t isColliding = false; + // There is no way to get access to the distance from the ground at this point, + // so it is not possible to disable the constraint only if depth > transitionEps. + if (constraint) + { + constraint->disable(); + } + for (uint32_t i = 0; i < collisionResult.numContacts(); ++i) { /* Extract the contact information. @@ -2886,7 +2903,6 @@ namespace jiminy { continue; } - isColliding = true; // Make sure the normal is always pointing upward, and the penetration depth is negative if (nGround[2] < 0.0) @@ -2916,45 +2932,29 @@ namespace jiminy else { // Some geometry shapes are not supported for now, if so the constraint is not initialized - if (constraint && !constraint->getIsEnabled()) + if (constraint) { - constraint->reset(q, v); + // In case of slippage the contact point has actually moved and must be updated constraint->enable(); auto & frameConstraint = static_cast(*constraint.get()); - vector3_t & positionRef = frameConstraint.getReferenceTransform().translation(); - positionRef.noalias() -= depth * nGround; - } - } - } + frameIndex_t const & frameIdx = frameConstraint.getFrameIdx(); + frameConstraint.setReferenceTransform({ + system.robot->pncData_.oMf[frameIdx].rotation(), + system.robot->pncData_.oMf[frameIdx].translation() - depth * nGround + }); + frameConstraint.setLocalFrame( + quaternion_t::FromTwoVectors(vector3_t::UnitZ(), nGround).toRotationMatrix() + ); - if (!isColliding) - { - if (constraint) - { - // There is no way to get access to the distance from the ground at this point, - // so it is not possible to disable the constraint only if depth > transitionEps. - constraint->disable(); + // Only one contact constraint per collision body is supported for now + break; + } } } - - /* One must update tangential reference position with the current one, - because in case of slippage the contact point has actually moved. */ - if (constraint && constraint->getIsEnabled()) - { - // auto & collisionConstraint = static_cast(*constraint.get()); - auto & collisionConstraint = static_cast(*constraint.get()); - frameIndex_t const & frameIdx = collisionConstraint.getFrameIdx(); - vector3_t const & posFrame = system.robot->pncData_.oMf[frameIdx].translation(); - vector3_t & positionRef = collisionConstraint.getReferenceTransform().translation(); - vector3_t const nGround = (vector3_t() << 0.0, 0.0, 1.0).finished(); //TODO assuming normal ground contact for now - positionRef = (posFrame + (positionRef - posFrame).dot(nGround) * nGround).eval(); - } } void EngineMultiRobot::computeContactDynamicsAtFrame(systemHolder_t const & system, frameIndex_t const & frameIdx, - vectorN_t const & q, - vectorN_t const & v, std::shared_ptr & constraint, pinocchio::Force & fextLocal) const { @@ -2999,15 +2999,10 @@ namespace jiminy } else { - // Enable fixed frame constraint and reset it if it was disable, - // then move the reference position at the surface of the ground. + // Enable fixed frame constraint if (!constraint->getIsEnabled()) { - constraint->reset(q, v); constraint->enable(); - auto & frameConstraint = static_cast(*constraint.get()); - vector3_t & positionRef = frameConstraint.getReferenceTransform().translation(); - positionRef.noalias() -= depth * nGround; } } } @@ -3026,13 +3021,19 @@ namespace jiminy } } - /* One must update tangential reference position with the current one, + /* Move the reference position at the surface of the ground. + Note that it is must done systematically as long as the constraint is enabled because in case of slippage the contact point has actually moved. */ if (constraint->getIsEnabled()) { auto & frameConstraint = static_cast(*constraint.get()); - vector3_t & positionRef = frameConstraint.getReferenceTransform().translation(); - positionRef = (posFrame + (positionRef - posFrame).dot(nGround) * nGround).eval(); + frameConstraint.setReferenceTransform({ + system.robot->pncData_.oMf[frameIdx].rotation(), + (vector3_t() << posFrame.head<2>(), zGround).finished() + }); + frameConstraint.setLocalFrame( + quaternion_t::FromTwoVectors(vector3_t::UnitZ(), nGround).toRotationMatrix() + ); } } @@ -3196,8 +3197,9 @@ namespace jiminy // Enable fixed joint constraint and reset it if it was disable if (!constraint->getIsEnabled()) { - constraint->reset(q, v); constraint->enable(); + auto & jointConstraint = static_cast(*constraint.get()); + jointConstraint.setReferenceConfiguration(joint.jointConfigSelector(q)); } } else /* if (qJoint < qJointMax - engineOptions_->contacts.transitionEps */ @@ -3395,8 +3397,6 @@ namespace jiminy void EngineMultiRobot::computeCollisionForces(systemHolder_t const & system, systemDataHolder_t & systemData, - vectorN_t const & q, - vectorN_t const & v, forceVector_t & fext) const { // Compute the forces at contact points @@ -3407,7 +3407,7 @@ namespace jiminy frameIndex_t const & frameIdx = contactFramesIdx[i]; auto & constraint = systemData.constraintsHolder.contactFrames[i].second; pinocchio::Force & fextLocal = systemData.contactFramesForces[i]; - computeContactDynamicsAtFrame(system, frameIdx, q, v, constraint, fextLocal); + computeContactDynamicsAtFrame(system, frameIdx, constraint, fextLocal); // Apply the force at the origin of the parent joint frame, in local joint frame jointIndex_t const & parentJointIdx = system.robot->pncModel_.frames[frameIdx].parent; @@ -3432,7 +3432,7 @@ namespace jiminy pairIndex_t const & collisionPairIdx = collisionPairsIdx[i][j]; auto & constraint = systemData.constraintsHolder.collisionBodies[i][j].second; pinocchio::Force & fextLocal = systemData.collisionBodiesForces[i][j]; - computeContactDynamicsAtBody(system, collisionPairIdx, q, v, constraint, fextLocal); + computeContactDynamicsAtBody(system, collisionPairIdx, constraint, fextLocal); // Apply the force at the origin of the parent joint frame, in local joint frame fext[parentJointIdx] += fextLocal; @@ -3555,7 +3555,7 @@ namespace jiminy /* Compute the collision forces and estimated time at which the contact state will changed (Take-off / Touch-down). */ - computeCollisionForces(*systemIt, *systemDataIt, *qIt, *vIt, fext); + computeCollisionForces(*systemIt, *systemDataIt, fext); // Compute the external contact forces. computeExternalForces(*systemIt, *systemDataIt, t, *qIt, *vIt, fext); @@ -3731,7 +3731,8 @@ namespace jiminy for (constraintsHolderType_t holderType : holderTypes) { systemData.constraintsHolder.foreach(holderType, - [&lo, &hi, &fIdx, &constraintIdx, &contactOptions = const_cast(engineOptions_->contacts)]( + [&lo, &hi, &fIdx, &constraintIdx, + &contactOptions = const_cast(engineOptions_->contacts)]( std::shared_ptr const & constraint, constraintsHolderType_t const & /* holderType */) { @@ -3739,16 +3740,18 @@ namespace jiminy { return; } - // auto const & frameConstraint = static_cast(*constraint.get()); - // if (frameConstraint.getIsTranslationFixed()) - // { - // Enforce friction pyramid - hi[constraintIdx] = contactOptions.friction; // Friction along x-axis - fIdx[constraintIdx] = static_cast(constraintIdx + 2); - hi[constraintIdx + 1] = contactOptions.friction; // Friction along y-axis - fIdx[constraintIdx + 1] = static_cast(constraintIdx + 2); - lo[constraintIdx + 2] = 0.0; - // } + + // Enforce tangential friction pyramid and torsional friction + hi[constraintIdx] = contactOptions.friction; // Friction along x-axis + fIdx[constraintIdx] = static_cast(constraintIdx + 2); + hi[constraintIdx + 1] = contactOptions.friction; // Friction along y-axis + fIdx[constraintIdx + 1] = static_cast(constraintIdx + 2); + hi[constraintIdx + 3] = contactOptions.torsion; // Friction around z-axis + fIdx[constraintIdx + 3] = static_cast(constraintIdx + 2); + + // Vertical force cannot be negative + lo[constraintIdx + 2] = 0.0; + constraintIdx += constraint->getDim(); }); } @@ -3783,13 +3786,27 @@ namespace jiminy hi, fIdx); - // Restore contact frame forces and bounds internal efforts + // Update lagrangian multipliers associated with the constraint constraintIdx = 0U; + systemData.constraintsHolder.foreach( + [&lambda_c = const_cast(data.lambda_c), // std::as_const is not supported by gcc<7.3 + &constraintIdx]( + std::shared_ptr const & constraint, + constraintsHolderType_t const & /* holderType */) + { + if (!constraint->getIsEnabled()) + { + return; + } + uint64_t const constraintDim = constraint->getDim(); + constraint->lambda_ = lambda_c.segment(constraintIdx, constraintDim); + constraintIdx += constraintDim; + }); + + // Restore contact frame forces and bounds internal efforts systemData.constraintsHolder.foreach( constraintsHolderType_t::BOUNDS_JOINTS, - [&constraintIdx, - &lambda_c = const_cast(data.lambda_c), // std::as_const is not supported by gcc<7.3 - &u = systemData.state.u, + [&u = systemData.state.u, &uInternal = systemData.state.uInternal, &joints = const_cast(model.joints)]( std::shared_ptr & constraint, @@ -3801,13 +3818,11 @@ namespace jiminy } vectorN_t & uJoint = constraint->lambda_; - uJoint = lambda_c.segment(constraintIdx, constraint->getDim()); auto const & jointConstraint = static_cast(*constraint.get()); auto const & jointModel = joints[jointConstraint.getJointIdx()]; jointModel.jointVelocitySelector(uInternal) += uJoint; jointModel.jointVelocitySelector(u) += uJoint; - constraintIdx += constraint->getDim(); }); constraintIt = systemData.constraintsHolder.contactFrames.begin(); @@ -3821,28 +3836,33 @@ namespace jiminy continue; } auto const & frameConstraint = static_cast(constraint); - // if (frameConstraint.getIsTranslationFixed()) - // { - // Extract part of the lagrangian multipliers associated with the constraint - vectorN_t & fextWorld = constraint.lambda_; - fextWorld = data.lambda_c.segment<3>(constraintIdx); - // Convert the force from local world aligned to local frame - frameIndex_t const & frameIdx = frameConstraint.getFrameIdx(); - pinocchio::SE3 const & transformContactInWorld = data.oMf[frameIdx]; - forceIt->linear().noalias() = transformContactInWorld.rotation().transpose() * fextWorld; + // Extract force in local reference-frame-aligned from lagrangian multipliers + pinocchio::Force fextInLocal( + frameConstraint.lambda_.head<3>(), + frameConstraint.lambda_[3] * vector3_t::UnitZ()); - // Convert the force from local world aligned to local parent joint - jointIndex_t const & jointIdx = model.frames[frameIdx].parent; - fext[jointIdx] += convertForceGlobalFrameToJoint( - model, data, frameIdx, {fextWorld, vector3_t::Zero()}); - // } - constraintIdx += constraint.getDim(); + // Compute force in local world aligned frame + matrix3_t const & rotationLocal = frameConstraint.getLocalFrame(); + pinocchio::Force const fextInWorld({ + rotationLocal * fextInLocal.linear(), + rotationLocal * fextInLocal.angular(), + }); + + // Convert the force from local world aligned frame to local frame + frameIndex_t const & frameIdx = frameConstraint.getFrameIdx(); + auto rotationWorldInContact = data.oMf[frameIdx].rotation().transpose(); + forceIt->linear().noalias() = rotationWorldInContact * fextInWorld.linear(); + forceIt->angular().noalias() = rotationWorldInContact * fextInWorld.angular(); + + // Convert the force from local world aligned to local parent joint + jointIndex_t const & jointIdx = model.frames[frameIdx].parent; + fext[jointIdx] += convertForceGlobalFrameToJoint(model, data, frameIdx, fextInWorld); } systemData.constraintsHolder.foreach( constraintsHolderType_t::COLLISION_BODIES, - [&constraintIdx, &fext, &model, &data]( + [&fext, &model, &data]( std::shared_ptr & constraint, constraintsHolderType_t const & /* holderType */) { @@ -3850,21 +3870,24 @@ namespace jiminy { return; } - - // auto const & collisionConstraint = static_cast(*constraint.get()); - auto const & collisionConstraint = static_cast(*constraint.get()); - - // Extract part of the lagrangian multipliers associated with the constraint - vectorN_t & fextWorld = constraint->lambda_; - fextWorld = data.lambda_c.segment<3>(constraintIdx); + auto const & frameConstraint = static_cast(*constraint.get()); + + // Extract force in world frame from lagrangian multipliers + pinocchio::Force fextInLocal( + frameConstraint.lambda_.head<3>(), + frameConstraint.lambda_[3] * vector3_t::UnitZ()); + + // Compute force in world frame + matrix3_t const & rotationLocal = frameConstraint.getLocalFrame(); + pinocchio::Force const fextInWorld({ + rotationLocal * fextInLocal.linear(), + rotationLocal * fextInLocal.angular(), + }); // Convert the force from local world aligned to local parent joint - frameIndex_t const & frameIdx = collisionConstraint.getFrameIdx(); + frameIndex_t const & frameIdx = frameConstraint.getFrameIdx(); jointIndex_t const & jointIdx = model.frames[frameIdx].parent; - fext[jointIdx] += convertForceGlobalFrameToJoint( - model, data, frameIdx, {fextWorld, vector3_t::Zero()}); - - constraintIdx += constraint->getDim(); + fext[jointIdx] += convertForceGlobalFrameToJoint(model, data, frameIdx, fextInWorld); }); return data.ddq; diff --git a/core/src/robot/Model.cc b/core/src/robot/Model.cc index b48e983dd..ba4dee805 100644 --- a/core/src/robot/Model.cc +++ b/core/src/robot/Model.cc @@ -130,8 +130,8 @@ namespace jiminy return false; } - std::shared_ptr constraintsHolder_t::get(std::string const & key, - constraintsHolderType_t const & holderType) + std::shared_ptr constraintsHolder_t::get(std::string const & key, + constraintsHolderType_t const & holderType) { constraintsMap_t * constraintsMapPtr; constraintsMap_t::iterator constraintIt; std::tie(constraintsMapPtr, constraintIt) = find(key, holderType); @@ -156,7 +156,7 @@ namespace jiminy return {}; } - void constraintsHolder_t::insert(constraintsMap_t const & constraintsMap, + void constraintsHolder_t::insert(constraintsMap_t const & constraintsMap, constraintsHolderType_t const & holderType) { switch (holderType) @@ -176,7 +176,7 @@ namespace jiminy } } - constraintsMap_t::iterator constraintsHolder_t::erase(std::string const & key, + constraintsMap_t::iterator constraintsHolder_t::erase(std::string const & key, constraintsHolderType_t const & holderType) { constraintsMap_t * constraintsMapPtr; constraintsMap_t::iterator constraintIt; @@ -189,12 +189,13 @@ namespace jiminy } Model::Model(void) : + pncModelRigidOrig_(), pncModel_(), - pncData_(), collisionModel_(), - pncGeometryData_(nullptr), - pncModelRigidOrig_(), + visualModel_(), pncDataRigidOrig_(), + pncData_(), + pncCollisionData_(nullptr), mdlOptions_(nullptr), contactForces_(), isInitialized_(false), @@ -235,7 +236,8 @@ namespace jiminy } hresult_t Model::initialize(pinocchio::Model const & pncModel, - pinocchio::GeometryModel const & collisionModel) + pinocchio::GeometryModel const & collisionModel, + pinocchio::GeometryModel const & visualModel) { hresult_t returnCode = hresult_t::SUCCESS; @@ -255,7 +257,7 @@ namespace jiminy constraintsLambda_.resize(0); jointsAcceleration_.clear(); - // Initialize URDF info + // Reset URDF info joint_t rootJointType; getJointTypeFromIdx(pncModel, 1, rootJointType); // It cannot fail. urdfPath_ = ""; @@ -265,6 +267,7 @@ namespace jiminy // Set the models pncModelRigidOrig_ = pncModel; collisionModel_ = collisionModel; + visualModel_ = visualModel; // Add ground geometry object to collision model is not already available if (!collisionModel_.existGeometryName("ground")) @@ -279,7 +282,7 @@ namespace jiminy // Its parent frame and parent joint are the universe. It is aligned with world frame, // and the top face is the actual ground surface. pinocchio::SE3 groundPose = pinocchio::SE3::Identity(); - groundPose.translation() = (vector3_t() << 0.0, 0.0, -1.0).finished(); + groundPose.translation() = - vector3_t::UnitZ(); pinocchio::GeometryObject groundPlane("ground", 0, 0, groudBox, groundPose); // Add the ground plane pinocchio to the robot model @@ -352,14 +355,15 @@ namespace jiminy // Load new robot and collision models pinocchio::Model pncModel; - pinocchio::GeometryModel pncGeometryModel; + pinocchio::GeometryModel pncCollisionModel; + pinocchio::GeometryModel pncVisualModel; returnCode = buildModelsFromUrdf( - urdfPath, hasFreeflyer, meshPackageDirs, pncModel, pncGeometryModel); + urdfPath, hasFreeflyer, meshPackageDirs, pncModel, pncCollisionModel, pncVisualModel); // Initialize jiminy model if (returnCode == hresult_t::SUCCESS) { - returnCode = initialize(pncModel, pncGeometryModel); + returnCode = initialize(pncModel, pncCollisionModel, pncVisualModel); } // Backup URDF info @@ -613,7 +617,7 @@ namespace jiminy // collisionConstraintsMap.emplace_back(geom.name, std::make_shared( // geom.name, sphere.radius)); collisionConstraintsMap.emplace_back(geom.name, std::make_shared( - geom.name, (Eigen::Matrix() << true, true, true, false, false, false).finished())); + geom.name, (Eigen::Matrix() << true, true, true, false, false, true).finished())); } else { @@ -760,7 +764,7 @@ namespace jiminy for (std::string const & frameName : frameNames) { frameConstraintsMap.emplace_back(frameName, std::make_shared( - frameName, (Eigen::Matrix() << true, true, true, false, false, false).finished())); + frameName, (Eigen::Matrix() << true, true, true, false, false, true).finished())); } returnCode = addConstraints(frameConstraintsMap, constraintsHolderType_t::CONTACT_FRAMES); @@ -1453,24 +1457,24 @@ namespace jiminy if (returnCode == hresult_t::SUCCESS) { // Update geometry data object after changing the collision pairs - if (pncGeometryData_.get()) + if (pncCollisionData_.get()) { // No object stored at this point, so created a new one - *pncGeometryData_ = pinocchio::GeometryData(collisionModel_); + *pncCollisionData_ = pinocchio::GeometryData(collisionModel_); } else { /* Use copy assignment to avoid changing memory pointers, to avoid dangling reference at Python-side. */ - pncGeometryData_ = std::make_unique(collisionModel_); + pncCollisionData_ = std::make_unique(collisionModel_); } pinocchio::updateGeometryPlacements(pncModel_, pncData_, collisionModel_, - *pncGeometryData_); + *pncCollisionData_); // Set the max number of contact points per collision pairs - for (hpp::fcl::CollisionRequest & collisionRequest : pncGeometryData_->collisionRequests) + for (hpp::fcl::CollisionRequest & collisionRequest : pncCollisionData_->collisionRequests) { collisionRequest.num_max_contacts = mdlOptions_->collisions.maxContactPointsPerBody; } @@ -1734,6 +1738,11 @@ namespace jiminy return isInitialized_; } + std::string const & Model::getName(void) const + { + return pncModelRigidOrig_.name; + } + std::string const & Model::getUrdfPath(void) const { return urdfPath_; diff --git a/core/src/robot/Robot.cc b/core/src/robot/Robot.cc index 61c2159e1..e459fce2f 100644 --- a/core/src/robot/Robot.cc +++ b/core/src/robot/Robot.cc @@ -58,6 +58,19 @@ namespace jiminy return Model::initialize(urdfPath, hasFreeflyer, meshPackageDirs); } + hresult_t Robot::initialize(pinocchio::Model const & pncModel, + pinocchio::GeometryModel const & collisionModel, + pinocchio::GeometryModel const & visualModel) + { + // Detach all the motors and sensors + detachSensors({}); + detachMotors({}); + + /* Delete the current model and generate a new one. + Note that is also refresh all proxies automatically. */ + return Model::initialize(pncModel, collisionModel, visualModel); + } + void Robot::reset(void) { // Reset the model diff --git a/core/src/solver/LCPSolvers.cc b/core/src/solver/LCPSolvers.cc index db05aa836..44d43593e 100644 --- a/core/src/solver/LCPSolvers.cc +++ b/core/src/solver/LCPSolvers.cc @@ -32,8 +32,6 @@ namespace jiminy vectorN_t const & lo, vectorN_t const & hi, std::vector const & fIdx, - bool_t const & checkAbs, - bool_t const & checkRel, vectorN_t & x) { bool_t isSuccess = true; @@ -47,32 +45,28 @@ namespace jiminy ++lastShuffle_; // Update every coefficients sequentially - for (int32_t const & i : indices_) + for (uint32_t const & i : indices_) { + // Extract single coefficient + float64_t & e = x[i]; + float64_t const ePrev = e; + // Update a single coefficient - float64_t const xPrev = x[i]; - x[i] += (b[i] - A.row(i).dot(x)) / A(i, i); + e += (b[i] - A.col(i).dot(x)) / A(i, i); // Project the coefficient between lower and upper bounds if (fIdx[i] < 0) { - x[i] = clamp(x[i], lo[i], hi[i]); + e = clamp(e, lo[i], hi[i]); } else { float64_t const hiTmp = hi[i] * x[fIdx[i]]; - x[i] = clamp(x[i], - hiTmp, hiTmp); + e = clamp(e, -hiTmp, hiTmp); } // Check if still possible to terminate after complete update - if (checkAbs) - { - isSuccess = isSuccess && (std::abs(x[i] - xPrev) < tolAbs_); - } - if (checkRel && std::abs(x[i]) > EPS_DIVISION) - { - isSuccess = isSuccess && (std::abs((x[i] - xPrev) / x[i]) < tolRel_); - } + isSuccess = isSuccess && (std::abs(e - ePrev) < tolAbs_ || std::abs((e - ePrev) / e) < tolRel_); } return isSuccess; @@ -92,7 +86,7 @@ namespace jiminy /* Adapt shuffling indices if the number of indices has changed. Note that it may converge faster to enforce constraints in reverse order, since usually constraints bounds dependending on others have lower indices - by design. For instance, for friction, x and y */ + by design, aka. the linear friction pyramid. */ size_t const nIndicesOrig = indices_.size(); size_t const nIndices = b.size(); if (nIndicesOrig < nIndices) @@ -122,17 +116,10 @@ namespace jiminy indices_.resize(nIndices); } - // Normalizing - for (Eigen::Index i = 0; i < b.size(); ++i) - { - b[i] /= A(i, i); - A.row(i).array() /= A(i, i); - } - // Perform multiple PGS loop until convergence or max iter reached for (uint32_t iter = 0; iter < maxIter_; ++iter) { - bool_t isSuccess = ProjectedGaussSeidelIter(A, b, lo, hi, fIdx, false, true, x); + bool_t isSuccess = ProjectedGaussSeidelIter(A, b, lo, hi, fIdx, x); if (isSuccess) { // Do NOT shuffle indices unless necessary to avoid discontinuities diff --git a/core/src/utilities/Helpers.cc b/core/src/utilities/Helpers.cc index b019eba3a..402dde4c4 100644 --- a/core/src/utilities/Helpers.cc +++ b/core/src/utilities/Helpers.cc @@ -169,20 +169,4 @@ namespace jiminy auto start = std::find(header.begin(), header.end(), "StartColumns"); return logData.col(std::distance(start, iterator) - 1); } - - // ********************** Math utilities ************************* - - float64_t clamp(float64_t const & data, - float64_t const & minThr, - float64_t const & maxThr) - { - if (!isnan(data)) - { - return std::min(std::max(data, minThr), maxThr); - } - else - { - return 0.0; - } - } } diff --git a/core/src/utilities/Json.cc b/core/src/utilities/Json.cc index 03d63487c..f6cd58974 100644 --- a/core/src/utilities/Json.cc +++ b/core/src/utilities/Json.cc @@ -58,7 +58,7 @@ namespace jiminy } template<> - Json::Value convertToJson(heatMapFunctor_t const & /* value */) + Json::Value convertToJson(heightmapFunctor_t const & /* value */) { return {"not supported"}; } @@ -201,13 +201,13 @@ namespace jiminy } template<> - heatMapFunctor_t convertFromJson(Json::Value const & /* value */) + heightmapFunctor_t convertFromJson(Json::Value const & /* value */) { return { - heatMapFunctor_t( - [](vector3_t const & /* pos */) -> std::pair + heightmapFunctor_t( + [](vector3_t const & /* pos */) -> std::pair { - return {0.0, (vector3_t() << 0.0, 0.0, 1.0).finished()}; + return {0.0, vector3_t::UnitZ()}; }) }; } diff --git a/core/src/utilities/Random.cc b/core/src/utilities/Random.cc index 3b96f3e50..f7605036b 100644 --- a/core/src/utilities/Random.cc +++ b/core/src/utilities/Random.cc @@ -796,4 +796,241 @@ namespace jiminy { return period_; } -} \ No newline at end of file + + template + std::enable_if_t, float64_t> + randomDouble(Eigen::MatrixBase const & key, + int64_t const & sparsity, + float64_t const & scale, + uint32_t const & seed) + { + int32_t const keyLen = static_cast(sizeof(typename VectorLike::Scalar)) * + static_cast(key.size()); + uint32_t const hash = MurmurHash3(key.derived().data(), keyLen, seed); + if (hash % sparsity == 0) + { + float64_t encoding(hash); + encoding /= std::numeric_limits::max(); + return scale * encoding; + } + return 0.0; + } + + std::pair tile2dInterp1d(Eigen::Matrix & posIdx, + vector2_t const & posRel, + uint32_t const & dim, + vector2_t const & size, + int64_t const & sparsity, + float64_t const & heightMax, + vector2_t const & interpThreshold, + uint32_t const & seed) + { + float64_t const z = randomDouble(posIdx, sparsity, heightMax, seed); + float64_t height, dheight; + if (posRel[dim] < interpThreshold[dim]) + { + posIdx[dim] -= 1; + float64_t const z_m = randomDouble(posIdx, sparsity, heightMax, seed); + posIdx[dim] += 1; + + float64_t const ratio = (1.0 - posRel[dim] / interpThreshold[dim]) / 2.0; + height = z + (z_m - z) * ratio; + dheight = (z - z_m) / (2.0 * size[dim] * interpThreshold[dim]); + } + else if (1.0 - posRel[dim] < interpThreshold[dim]) + { + posIdx[dim] += 1; + float64_t const z_p = randomDouble(posIdx, sparsity, heightMax, seed); + posIdx[dim] -= 1; + + float64_t const ratio = (1.0 + (posRel[dim] - 1.0) / interpThreshold[dim]) / 2.0; + height = z + (z_p - z) * ratio; + dheight = (z_p - z) / (2.0 * size[dim] * interpThreshold[dim]); + } + else + { + height = z; + dheight = 0.0; + } + + return {height, dheight}; + } + + heightmapFunctor_t randomTileGround(vector2_t const & size, + float64_t const & heightMax, + vector2_t const & interpDelta, + uint32_t const & sparsity, + float64_t const & orientation, + uint32_t const & seed) + { + if ((0.01 <= interpDelta.array()).all() + && (interpDelta.array() <= size.array() / 2.0).all()) + { + PRINT_WARNING("'interpDelta' must be in range [0.01, 'size'/2.0]."); + } + + vector2_t interpThreshold = interpDelta.cwiseMax(0.01).cwiseMin(size / 2.0); + interpThreshold.array() /= size.array(); + + vector2_t const offset = vector2_t::NullaryExpr( + [&size, &seed] (vectorN_t::Index const & i) -> float64_t + { + Eigen::Matrix key; + key << i; + return randomDouble(key, 1, size[i], seed); + }); + + Eigen::Rotation2D rotationMat(orientation); + + return [size, heightMax, interpDelta, rotationMat, sparsity, interpThreshold, offset, seed]( + vector3_t const & pos3) -> std::pair + { + // Compute the tile index and relative coordinate + vector2_t pos = rotationMat * (pos3.head<2>() + offset); + vector2_t posRel = pos.array() / size.array(); + Eigen::Matrix posIdx = posRel.array().floor().cast(); + posRel -= posIdx.cast(); + + // Interpolate height based on nearby tiles if necessary + Eigen::Matrix isEdge = + (posRel.array() < interpThreshold.array()) || + (1.0 - posRel.array() < interpThreshold.array()); + float64_t height, dheight_x, dheight_y; + if (isEdge[0] && !isEdge[1]) + { + auto result = tile2dInterp1d( + posIdx, posRel, 0, size, sparsity, heightMax, + interpThreshold, seed); + height = std::get<0>(result); + dheight_x = std::get<1>(result); + dheight_y = 0.0; + } + else if (!isEdge[0] && isEdge[1]) + { + auto result = tile2dInterp1d( + posIdx, posRel, 1, size, sparsity, heightMax, + interpThreshold, seed); + height = std::get<0>(result); + dheight_y = std::get<1>(result); + dheight_x = 0.0; + } + else if (isEdge[0] && isEdge[1]) + { + auto result_0 = tile2dInterp1d( + posIdx, posRel, 0, size, sparsity, heightMax, + interpThreshold, seed); + float64_t height_0 = std::get<0>(result_0); + float64_t dheight_x_0 = std::get<1>(result_0); + if (posRel[1] < interpThreshold[1]) + { + posIdx[1] -= 1; + auto result_m = tile2dInterp1d( + posIdx, posRel, 0, size, sparsity, + heightMax, interpThreshold, seed); + float64_t height_m = std::get<0>(result_m); + float64_t dheight_x_m = std::get<1>(result_m); + + float64_t ratio = (1.0 - posRel[1] / interpThreshold[1]) / 2.0; + height = height_0 + (height_m - height_0) * ratio; + dheight_x = dheight_x_0 + (dheight_x_m - dheight_x_0) * ratio; + dheight_y = (height_0 - height_m) / (2.0 * size[1] * interpThreshold[1]); + } + else + { + posIdx[1] += 1; + auto result_p = tile2dInterp1d( + posIdx, posRel, 0, size, sparsity, + heightMax, interpThreshold, seed); + float64_t height_p = std::get<0>(result_p); + float64_t dheight_x_p = std::get<1>(result_p); + + float64_t ratio = (1.0 + (posRel[1] - 1.0) / interpThreshold[1]) / 2.0; + height = height_0 + (height_p - height_0) * ratio; + dheight_x = dheight_x_0 + (dheight_x_p - dheight_x_0) * ratio; + dheight_y = (height_p - height_0) / (2.0 * size[1] * interpThreshold[1]); + } + } + else + { + height = randomDouble(posIdx, sparsity, heightMax, seed); + dheight_x = 0.0; + dheight_y = 0.0; + } + + // Compute the resulting normal to the surface: (-d.height/d.x, -d.height/d.y, 1.0) + vector3_t normal; + normal << -dheight_x, -dheight_y, 1.0; + normal.normalize(); + + return {height, normal}; + }; + } + + heightmapFunctor_t sumHeightmap(std::vector const & heightmaps) + { + return [heightmaps](vector3_t const & pos3) -> std::pair + { + float64_t height = 0.0; + vector3_t normal = vector3_t::Zero(); + for (heightmapFunctor_t const & heightmap : heightmaps) + { + auto result = heightmap(pos3); + height += std::get(result); + normal += std::get(result); + } + normal.normalize(); + return {height, normal}; + }; + } + + heightmapFunctor_t mergeHeightmap(std::vector const & heightmaps) + { + return [heightmaps](vector3_t const & pos3) -> std::pair + { + float64_t heightmax = -INF; + vector3_t normal = vector3_t::UnitZ(); + for (heightmapFunctor_t const & heightmap : heightmaps) + { + auto result = heightmap(pos3); + float64_t height = std::get(result); + if (std::abs(height - heightmax) < EPS) + { + normal += std::get(result); + } + else if (height > heightmax) + { + heightmax = height; + normal = std::get(result); + } + } + normal.normalize(); + return {heightmax, normal}; + }; + } + + matrixN_t discretizeHeightmap(heightmapFunctor_t const & heightmap, + float64_t const & gridSize, + float64_t const & gridUnit) + { + // Allocate empty discrete grid + uint32_t gridDim = static_cast(std::ceil(gridSize / gridUnit)) + 1U; + matrixN_t heightGrid(gridDim * gridDim, 6); + + // Fill x and y discrete grid coordinates + vectorN_t const values = ( + vectorN_t::LinSpaced(gridDim, 0, gridDim - 1) * gridUnit + ).array() - (gridDim - 1) * (gridUnit / 2.0); + Eigen::Map(heightGrid.col(0).data(), gridDim, gridDim).colwise() = values; + Eigen::Map(heightGrid.col(1).data(), gridDim, gridDim).rowwise() = values.transpose(); + + // Fill discrete grid + for (uint32_t i=0; i < heightGrid.rows(); ++i) + { + auto result = heightmap(heightGrid.block<1, 3>(i, 0)); + heightGrid(i, 2) = std::get(result); + heightGrid.block<1, 3>(i, 3) = std::get(result); + } + + return heightGrid; + } +} diff --git a/data/bipedal_robots/atlas/atlas_v4_hardware.toml b/data/bipedal_robots/atlas/atlas_v4_hardware.toml new file mode 100644 index 000000000..1a1577143 --- /dev/null +++ b/data/bipedal_robots/atlas/atlas_v4_hardware.toml @@ -0,0 +1,382 @@ +[Global] +collisionBodiesNames = ["l_foot", "r_foot"] +contactFramesNames = [] + +# ================== Motors ================== + +[Motor.SimpleMotor.neck_ry] +joint_name = "neck_ry" +armature = 0.1 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.back_bkz] +joint_name = "back_bkz" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.back_bky] +joint_name = "back_bky" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.back_bkx] +joint_name = "back_bkx" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_arm_shz] +joint_name = "l_arm_shz" +armature = 0.2 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_arm_shx] +joint_name = "l_arm_shx" +armature = 0.2 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_arm_ely] +joint_name = "l_arm_ely" +armature = 0.2 +frictionViscousPositive = -0.2 +frictionViscousNegative = -0.2 + +[Motor.SimpleMotor.l_arm_elx] +joint_name = "l_arm_elx" +armature = 0.2 +frictionViscousPositive = -0.2 +frictionViscousNegative = -0.2 + +[Motor.SimpleMotor.l_arm_wry] +joint_name = "l_arm_wry" +armature = 0.1 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_arm_wrx] +joint_name = "l_arm_wrx" +armature = 0.1 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_arm_wry2] +joint_name = "l_arm_wry2" +armature = 0.1 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_arm_shz] +joint_name = "r_arm_shz" +armature = 0.2 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_arm_shx] +joint_name = "r_arm_shx" +armature = 0.2 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_arm_ely] +joint_name = "r_arm_ely" +armature = 0.2 +frictionViscousPositive = -0.2 +frictionViscousNegative = -0.2 + +[Motor.SimpleMotor.r_arm_elx] +joint_name = "r_arm_elx" +armature = 0.2 +frictionViscousPositive = -0.2 +frictionViscousNegative = -0.2 + +[Motor.SimpleMotor.r_arm_wry] +joint_name = "r_arm_wry" +armature = 0.1 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_arm_wrx] +joint_name = "r_arm_wrx" +armature = 0.1 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_arm_wry2] +joint_name = "r_arm_wry2" +armature = 0.1 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_leg_hpx] +joint_name = "l_leg_hpx" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_leg_hpz] +joint_name = "l_leg_hpz" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_leg_hpy] +joint_name = "l_leg_hpy" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_leg_kny] +joint_name = "l_leg_kny" +armature = 0.6 +frictionViscousPositive = -0.2 +frictionViscousNegative = -0.2 + +[Motor.SimpleMotor.l_leg_aky] +joint_name = "l_leg_aky" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.l_leg_akx] +joint_name = "l_leg_akx" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_leg_hpx] +joint_name = "r_leg_hpx" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_leg_hpz] +joint_name = "r_leg_hpz" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_leg_hpy] +joint_name = "r_leg_hpy" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_leg_kny] +joint_name = "r_leg_kny" +armature = 0.6 +frictionViscousPositive = -0.2 +frictionViscousNegative = -0.2 + +[Motor.SimpleMotor.r_leg_aky] +joint_name = "r_leg_aky" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +[Motor.SimpleMotor.r_leg_akx] +joint_name = "r_leg_akx" +armature = 0.6 +frictionViscousPositive = -0.1 +frictionViscousNegative = -0.1 + +# =============== IMU sensors ================= + +[Sensor.ImuSensor.pelvis] +frame_name = "pelvis" + +# ============= Force sensors =============== + +[Sensor.ForceSensor.l_foot] +frame_name = "l_foot" + +[Sensor.ForceSensor.r_foot] +frame_name = "r_foot" + +# ============= Effort sensors =============== + +[Sensor.EffortSensor.neck_ry] +motor_name = "neck_ry" + +[Sensor.EffortSensor.back_bkz] +motor_name = "back_bkz" + +[Sensor.EffortSensor.back_bky] +motor_name = "back_bky" + +[Sensor.EffortSensor.back_bkx] +motor_name = "back_bkx" + +[Sensor.EffortSensor.l_arm_shz] +motor_name = "l_arm_shz" + +[Sensor.EffortSensor.l_arm_shx] +motor_name = "l_arm_shx" + +[Sensor.EffortSensor.l_arm_ely] +motor_name = "l_arm_ely" + +[Sensor.EffortSensor.l_arm_elx] +motor_name = "l_arm_elx" + +[Sensor.EffortSensor.l_arm_wry] +motor_name = "l_arm_wry" + +[Sensor.EffortSensor.l_arm_wrx] +motor_name = "l_arm_wrx" + +[Sensor.EffortSensor.l_arm_wry2] +motor_name = "l_arm_wry2" + +[Sensor.EffortSensor.r_arm_shz] +motor_name = "r_arm_shz" + +[Sensor.EffortSensor.r_arm_shx] +motor_name = "r_arm_shx" + +[Sensor.EffortSensor.r_arm_ely] +motor_name = "r_arm_ely" + +[Sensor.EffortSensor.r_arm_elx] +motor_name = "r_arm_elx" + +[Sensor.EffortSensor.r_arm_wry] +motor_name = "r_arm_wry" + +[Sensor.EffortSensor.r_arm_wrx] +motor_name = "r_arm_wrx" + +[Sensor.EffortSensor.r_arm_wry2] +motor_name = "r_arm_wry2" + +[Sensor.EffortSensor.l_leg_hpx] +motor_name = "l_leg_hpx" + +[Sensor.EffortSensor.l_leg_hpz] +motor_name = "l_leg_hpz" + +[Sensor.EffortSensor.l_leg_hpy] +motor_name = "l_leg_hpy" + +[Sensor.EffortSensor.l_leg_kny] +motor_name = "l_leg_kny" + +[Sensor.EffortSensor.l_leg_aky] +motor_name = "l_leg_aky" + +[Sensor.EffortSensor.l_leg_akx] +motor_name = "l_leg_akx" + +[Sensor.EffortSensor.r_leg_hpx] +motor_name = "r_leg_hpx" + +[Sensor.EffortSensor.r_leg_hpz] +motor_name = "r_leg_hpz" + +[Sensor.EffortSensor.r_leg_hpy] +motor_name = "r_leg_hpy" + +[Sensor.EffortSensor.r_leg_kny] +motor_name = "r_leg_kny" + +[Sensor.EffortSensor.r_leg_aky] +motor_name = "r_leg_aky" + +[Sensor.EffortSensor.r_leg_akx] +motor_name = "r_leg_akx" + +# ============= Encoder sensors =============== + +[Sensor.EncoderSensor.neck_ry] +joint_name = "neck_ry" + +[Sensor.EncoderSensor.back_bkz] +joint_name = "back_bkz" + +[Sensor.EncoderSensor.back_bky] +joint_name = "back_bky" + +[Sensor.EncoderSensor.back_bkx] +joint_name = "back_bkx" + +[Sensor.EncoderSensor.l_arm_shz] +joint_name = "l_arm_shz" + +[Sensor.EncoderSensor.l_arm_shx] +joint_name = "l_arm_shx" + +[Sensor.EncoderSensor.l_arm_ely] +joint_name = "l_arm_ely" + +[Sensor.EncoderSensor.l_arm_elx] +joint_name = "l_arm_elx" + +[Sensor.EncoderSensor.l_arm_wry] +joint_name = "l_arm_wry" + +[Sensor.EncoderSensor.l_arm_wrx] +joint_name = "l_arm_wrx" + +[Sensor.EncoderSensor.l_arm_wry2] +joint_name = "l_arm_wry2" + +[Sensor.EncoderSensor.r_arm_shz] +joint_name = "r_arm_shz" + +[Sensor.EncoderSensor.r_arm_shx] +joint_name = "r_arm_shx" + +[Sensor.EncoderSensor.r_arm_ely] +joint_name = "r_arm_ely" + +[Sensor.EncoderSensor.r_arm_elx] +joint_name = "r_arm_elx" + +[Sensor.EncoderSensor.r_arm_wry] +joint_name = "r_arm_wry" + +[Sensor.EncoderSensor.r_arm_wrx] +joint_name = "r_arm_wrx" + +[Sensor.EncoderSensor.r_arm_wry2] +joint_name = "r_arm_wry2" + +[Sensor.EncoderSensor.l_leg_hpx] +joint_name = "l_leg_hpx" + +[Sensor.EncoderSensor.l_leg_hpz] +joint_name = "l_leg_hpz" + +[Sensor.EncoderSensor.l_leg_hpy] +joint_name = "l_leg_hpy" + +[Sensor.EncoderSensor.l_leg_kny] +joint_name = "l_leg_kny" + +[Sensor.EncoderSensor.l_leg_aky] +joint_name = "l_leg_aky" + +[Sensor.EncoderSensor.l_leg_akx] +joint_name = "l_leg_akx" + +[Sensor.EncoderSensor.r_leg_hpx] +joint_name = "r_leg_hpx" + +[Sensor.EncoderSensor.r_leg_hpz] +joint_name = "r_leg_hpz" + +[Sensor.EncoderSensor.r_leg_hpy] +joint_name = "r_leg_hpy" + +[Sensor.EncoderSensor.r_leg_kny] +joint_name = "r_leg_kny" + +[Sensor.EncoderSensor.r_leg_aky] +joint_name = "r_leg_aky" + +[Sensor.EncoderSensor.r_leg_akx] +joint_name = "r_leg_akx" \ No newline at end of file diff --git a/data/bipedal_robots/atlas/atlas_v4_options.toml b/data/bipedal_robots/atlas/atlas_v4_options.toml new file mode 100644 index 000000000..a9084fe43 --- /dev/null +++ b/data/bipedal_robots/atlas/atlas_v4_options.toml @@ -0,0 +1,27 @@ +# ============= Engine stepper ================= + +[engine.stepper] +verbose = false +odeSolver = "runge_kutta_4" +sensorsUpdatePeriod = 0.01 +controllerUpdatePeriod = 0.01 +logInternalStepperSteps = false +randomSeed = 0 + +# ============== Ground dynamics =============== + +[engine.contacts] +model = "impulse" +solver = "PGS" +tolAbs = 1.0e-4 +tolRel = 1.0e-3 +regularization = 2.0e-3 +stabilizationFreq = 20.0 +transitionEps = 5.0e-3 +friction = 1.0 + +# ======== Joints bounds configuration ======== + +[system.robot.model.joints] +enablePositionLimit = true +enableVelocityLimit = true diff --git a/data/bipedal_robots/cassie/cassie_hardware.toml b/data/bipedal_robots/cassie/cassie_hardware.toml index 4accfaa4c..6daef4fa2 100644 --- a/data/bipedal_robots/cassie/cassie_hardware.toml +++ b/data/bipedal_robots/cassie/cassie_hardware.toml @@ -7,52 +7,52 @@ contactFramesNames = [] [Motor.SimpleMotor.hip_abduction_motor_left] joint_name = "hip_abduction_left" mechanicalReduction = 25.0 -armature = 0.038125 +armature = 0.2 [Motor.SimpleMotor.hip_rotation_motor_left] joint_name = "hip_rotation_left" mechanicalReduction = 25.0 -armature = 0.038125 +armature = 0.2 [Motor.SimpleMotor.hip_flexion_motor_left] joint_name = "hip_flexion_left" mechanicalReduction = 16.0 -armature = 0.09344 +armature = 0.2 [Motor.SimpleMotor.knee_joint_motor_left] joint_name = "knee_joint_left" mechanicalReduction = 16.0 -armature = 0.09344 +armature = 0.2 [Motor.SimpleMotor.toe_joint_motor_left] joint_name = "toe_joint_left" mechanicalReduction = 50.0 -armature = 0.01225 +armature = 0.2 [Motor.SimpleMotor.hip_abduction_motor_right] joint_name = "hip_abduction_right" mechanicalReduction = 25.0 -armature = 0.038125 +armature = 0.2 [Motor.SimpleMotor.hip_rotation_motor_right] joint_name = "hip_rotation_right" mechanicalReduction = 25.0 -armature = 0.038125 +armature = 0.2 [Motor.SimpleMotor.hip_flexion_motor_right] joint_name = "hip_flexion_right" mechanicalReduction = 16.0 -armature = 0.09344 +armature = 0.2 [Motor.SimpleMotor.knee_joint_motor_right] joint_name = "knee_joint_right" mechanicalReduction = 16.0 -armature = 0.09344 +armature = 0.2 [Motor.SimpleMotor.toe_joint_motor_right] joint_name = "toe_joint_right" mechanicalReduction = 50.0 -armature = 0.01225 +armature = 0.2 # =============== IMU sensors ================= diff --git a/data/bipedal_robots/cassie/cassie_options.toml b/data/bipedal_robots/cassie/cassie_options.toml index 8c88b2ea7..af14d7b05 100644 --- a/data/bipedal_robots/cassie/cassie_options.toml +++ b/data/bipedal_robots/cassie/cassie_options.toml @@ -3,49 +3,25 @@ [engine.stepper] verbose = false odeSolver = "runge_kutta_4" -tolRel = 1.0e-9 -tolAbs = 1.0e-8 -# dtMax = 0.01 -dtRestoreThresholdRel = 0.2 -iterMax = 100000 -timeout = -1 sensorsUpdatePeriod = 0.01 controllerUpdatePeriod = 0.01 -randomSeed = 0 logInternalStepperSteps = false -successiveIterFailedMax = 1000 - -# ================= World ====================== - -[engine.world] -gravity = [0.0, 0.0, -9.81, 0.0, 0.0, 0.0] +randomSeed = 0 # ============== Ground dynamics =============== [engine.contacts] model = "impulse" solver = "PGS" +tolAbs = 1.0e-4 +tolRel = 1.0e-3 regularization = 2.0e-3 stabilizationFreq = 20.0 transitionEps = 2.0e-3 friction = 0.5 -# ======== Model dynamic configuration ========= - -[system.robot.model.dynamics] -inertiaBodiesBiasStd = 0.0 -massBodiesBiasStd = 0.0 -centerOfMassPositionBodiesBiasStd = 0.0 -relativePositionBodiesBiasStd = 0.0 -enableFlexibleModel = false - # ======== Joints bounds configuration ======== [system.robot.model.joints] enablePositionLimit = true -positionLimitFromUrdf = true -positionLimitMax = [] -positionLimitMin = [] enableVelocityLimit = true -velocityLimitFromUrdf = true -velocityLimit = [] diff --git a/data/toys_models/ant/ant_options.toml b/data/toys_models/ant/ant_options.toml index 0f824cc66..83724b4c7 100644 --- a/data/toys_models/ant/ant_options.toml +++ b/data/toys_models/ant/ant_options.toml @@ -2,9 +2,7 @@ [engine.stepper] verbose = false -odeSolver = "runge_kutta_4" # ["runge_kutta_4", "runge_kutta_dopri5"] -tolAbs = 1.0e-6 -tolRel = 1.0e-5 +odeSolver = "runge_kutta_4" sensorsUpdatePeriod = 0.01 controllerUpdatePeriod = 0.01 logInternalStepperSteps = false @@ -13,19 +11,14 @@ randomSeed = 0 # ============== Contact dynamics =============== [engine.contacts] -model = "impulse" # ["impulse", "spring_damper"] +model = "impulse" solver = "PGS" +tolAbs = 1.0e-4 +tolRel = 1.0e-3 regularization = 1.0e-1 stabilizationFreq = 5.0 -stiffness = 1.0e5 -damping = 1.0e3 transitionEps = 1.0e-2 friction = 1.0 -transitionVelocity = 1.0e-2 - -[engine.joints] -boundStiffness = 1.0e7 -boundDamping = 1.0e4 # ======== Joints bounds configuration ======== diff --git a/docs/conf.py b/docs/conf.py index 2886d109f..5e4617eb3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -192,11 +192,15 @@ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'jiminy', u'Jiminy Documentation', - author, 'jiminy', 'Fast and light weight Python/C++ simulator of rigid poly-articulated systems focused on reinforcement learning.', - 'Miscellaneous'), -] +texinfo_documents = [( + master_doc, + 'jiminy', + 'Jiminy Documentation', + author, + 'jiminy', + 'Fast and light weight Python/C++ simulator of rigid poly-articulated systems focused on reinforcement learning.', + 'Miscellaneous' +)] # -- Options for Epub output ------------------------------------------------- diff --git a/docs/tutorial.ipynb b/docs/tutorial.ipynb index 8a2010644..d9753c49f 120000 --- a/docs/tutorial.ipynb +++ b/docs/tutorial.ipynb @@ -1 +1 @@ -../examples/tutorial.ipynb \ No newline at end of file +../examples/python/tutorial.ipynb \ No newline at end of file diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 6c7e91bd0..8c6729f43 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -5,4 +5,4 @@ cmake_minimum_required(VERSION 3.10) project(${LIBRARY_NAME}_examples VERSION ${BUILD_VERSION}) # Sub-projects -add_subdirectory(double_pendulum) \ No newline at end of file +add_subdirectory("cpp/double_pendulum") diff --git a/examples/double_pendulum/CMakeLists.txt b/examples/cpp/double_pendulum/CMakeLists.txt similarity index 100% rename from examples/double_pendulum/CMakeLists.txt rename to examples/cpp/double_pendulum/CMakeLists.txt diff --git a/examples/double_pendulum/double_pendulum.cc b/examples/cpp/double_pendulum/double_pendulum.cc similarity index 99% rename from examples/double_pendulum/double_pendulum.cc rename to examples/cpp/double_pendulum/double_pendulum.cc index acfa020dc..9285f7b8a 100644 --- a/examples/double_pendulum/double_pendulum.cc +++ b/examples/cpp/double_pendulum/double_pendulum.cc @@ -50,7 +50,7 @@ int main(int /* argc */, char_t * /* argv */[]) // Set URDF and log output. boost::filesystem::path const filePath(__FILE__); - auto const jiminySrcPath = filePath.parent_path().parent_path().parent_path(); + auto const jiminySrcPath = filePath.parent_path().parent_path().parent_path().parent_path(); auto const dataPath = jiminySrcPath / "data/toys_models"; auto const urdfPath = dataPath / "double_pendulum/double_pendulum.urdf"; auto const outputDirPath = boost::filesystem::temp_directory_path(); diff --git a/examples/pip_extension/CMakeLists.txt b/examples/cpp/pip_extension/CMakeLists.txt similarity index 100% rename from examples/pip_extension/CMakeLists.txt rename to examples/cpp/pip_extension/CMakeLists.txt diff --git a/examples/pip_extension/double_pendulum.cc b/examples/cpp/pip_extension/double_pendulum.cc similarity index 100% rename from examples/pip_extension/double_pendulum.cc rename to examples/cpp/pip_extension/double_pendulum.cc diff --git a/examples/pip_extension/double_pendulum.urdf b/examples/cpp/pip_extension/double_pendulum.urdf similarity index 100% rename from examples/pip_extension/double_pendulum.urdf rename to examples/cpp/pip_extension/double_pendulum.urdf diff --git a/examples/python/box_uneven_ground_impulse_contact.py b/examples/python/box_uneven_ground_impulse_contact.py new file mode 100644 index 000000000..10fbec4ad --- /dev/null +++ b/examples/python/box_uneven_ground_impulse_contact.py @@ -0,0 +1,63 @@ +import os +from itertools import starmap + +import numpy as np + +from jiminy_py.simulator import Simulator +from jiminy_py.core import random_tile_ground, sum_heightmap, merge_heightmap +from gym_jiminy.common.envs import BaseJiminyEnv + +# Set hyperpaameters of the ground profile +TILE_SIZE = [np.array([4.0, 4.0]), + np.array([100.0, 0.05]), + np.array([0.05, 100.0])] +TILE_HEIGHT_MAX = [1.0, 0.05, 0.05] +TILE_INTERP_DELTA = [np.array([0.5, 1.0]), + np.array([0.01, 0.01]), + np.array([0.01, 0.01])] +TILE_SPARSITY = [1, 8, 8] +TILE_ORIENTATION = [0.0, np.pi / 4.0, 0.0] +TILE_SEED = [np.random.randint(0, 2 ** 32, dtype=np.uint32) for _ in range(3)] + + +# Create a gym environment for a simple cube +urdf_path = f"{os.environ['HOME']}/wdc_workspace/src/jiminy/unit_py/data/box_collision_mesh.urdf" +env = BaseJiminyEnv(Simulator.build( + urdf_path, has_freeflyer=True), step_dt=0.01) + +# Enable impulse contact model +engine_options = env.engine.get_options() +engine_options['contacts']['model'] = 'impulse' +env.engine.set_options(engine_options) + +# Generate random ground profile +ground_params = list(starmap(random_tile_ground, zip( + TILE_SIZE, TILE_HEIGHT_MAX, TILE_INTERP_DELTA, TILE_SPARSITY, + TILE_ORIENTATION, TILE_SEED))) +engine_options["world"]["groundProfile"] = sum_heightmap([ + ground_params[0], merge_heightmap(ground_params[1:])]) +env.engine.set_options(engine_options) + +# Monkey-patch the initial state sampling function +sample_state_orig = env._sample_state + +def sample_state(): + qpos, qvel = sample_state_orig() + qpos[2] += 1.0 + qvel[-1] = 1.0 + return qpos, qvel + +env._sample_state = sample_state + +# Run a simulation +engine_options['contacts']['friction'] = 1.0 +engine_options['contacts']['torsion'] = 0.0 +env.engine.set_options(engine_options) + +env.reset() +for _ in range(300): + env.step() +env.stop() + +# Replay the simulation +env.replay(enable_travelling=False, display_contact_frames=True) diff --git a/examples/double_pendulum_py/test_simulation.py b/examples/python/double_pendulum.py similarity index 100% rename from examples/double_pendulum_py/test_simulation.py rename to examples/python/double_pendulum.py diff --git a/examples/tutorial.ipynb b/examples/python/tutorial.ipynb similarity index 100% rename from examples/tutorial.ipynb rename to examples/python/tutorial.ipynb diff --git a/python/gym_jiminy/common/gym_jiminy/common/bases/block_bases.py b/python/gym_jiminy/common/gym_jiminy/common/bases/block_bases.py index 96f4a6818..3068a2786 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/bases/block_bases.py +++ b/python/gym_jiminy/common/gym_jiminy/common/bases/block_bases.py @@ -12,7 +12,7 @@ import gym -from ..utils import FieldDictNested, SpaceDictNested, get_fieldnames +from ..utils import FieldNested, DataNested, get_fieldnames from ..envs import BaseJiminyEnv from .generic_bases import ControllerInterface, ObserverInterface @@ -176,7 +176,7 @@ def _setup(self) -> None: "environment simulation timestep.") def refresh_observation(self, # type: ignore[override] - measure: SpaceDictNested) -> None: + measure: DataNested) -> None: """Compute observed features based on the current simulation state and lower-level measure. @@ -257,7 +257,7 @@ def _setup(self) -> None: "The controller update period must be lower than or equal to the " "environment simulation timestep.") - def get_fieldnames(self) -> FieldDictNested: + def get_fieldnames(self) -> FieldNested: """Get mapping between each scalar element of the action space of the controller and the associated fieldname for logging. @@ -316,5 +316,5 @@ def get_fieldnames(self) -> FieldDictNested: :param measure: Observation of the environment. :param action: Target to achieve. - :returns: Action to perform + :returns: Action to perform. """ diff --git a/python/gym_jiminy/common/gym_jiminy/common/bases/generic_bases.py b/python/gym_jiminy/common/gym_jiminy/common/bases/generic_bases.py index 9ddbd9394..c3d1dcad0 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/bases/generic_bases.py +++ b/python/gym_jiminy/common/gym_jiminy/common/bases/generic_bases.py @@ -10,7 +10,7 @@ import jiminy_py.core as jiminy from jiminy_py.simulator import Simulator -from ..utils import SpaceDictNested, is_breakpoint +from ..utils import DataNested, is_breakpoint class ObserverInterface: @@ -18,7 +18,7 @@ class ObserverInterface: """ observe_dt: float observation_space: Optional[gym.Space] - _observation: Optional[SpaceDictNested] + _observation: Optional[DataNested] def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize the observation interface. @@ -37,7 +37,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # Call super to allow mixing interfaces through multiple inheritance super().__init__(*args, **kwargs) # type: ignore[call-arg] - def get_observation(self) -> SpaceDictNested: + def get_observation(self) -> DataNested: """Get post-processed observation. By default, it does not perform any post-processing. One is responsible @@ -83,7 +83,7 @@ class ControllerInterface: """ control_dt: float action_space: Optional[gym.Space] - _action: Optional[SpaceDictNested] + _action: Optional[DataNested] def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize the control interface. @@ -115,8 +115,8 @@ def _refresh_action_space(self) -> None: raise NotImplementedError def compute_command(self, - measure: SpaceDictNested, - action: SpaceDictNested) -> SpaceDictNested: + measure: DataNested, + action: DataNested) -> DataNested: """Compute the command to send to the subsequent block, based on the current target and observation of the environment. diff --git a/python/gym_jiminy/common/gym_jiminy/common/bases/pipeline_bases.py b/python/gym_jiminy/common/gym_jiminy/common/bases/pipeline_bases.py index bc1e46d7d..db869b5b4 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/bases/pipeline_bases.py +++ b/python/gym_jiminy/common/gym_jiminy/common/bases/pipeline_bases.py @@ -21,13 +21,12 @@ import jiminy_py.core as jiminy from jiminy_py.simulator import Simulator -from ..utils import (SpaceDictNested, +from ..utils import (DataNested, is_breakpoint, zeros, fill, copy, - set_value, - register_variables) + set_value) from ..envs import ObserverHandleType, ControllerHandleType, BaseJiminyEnv from .block_bases import BaseControllerBlock, BaseObserverBlock @@ -106,7 +105,7 @@ def _get_block_index(self) -> int: block = block.env return i - def get_observation(self) -> SpaceDictNested: + def get_observation(self) -> DataNested: """Get post-processed observation. It performs a recursive shallow copy of the observation. @@ -120,7 +119,7 @@ def reset(self, controller_hook: Optional[Callable[[], Optional[Tuple[ Optional[ObserverHandleType], Optional[ControllerHandleType]]]]] = None, - **kwargs: Any) -> SpaceDictNested: + **kwargs: Any) -> DataNested: """Reset the unified environment. In practice, it resets the environment and initializes the generic @@ -168,8 +167,8 @@ def register() -> Tuple[ObserverHandleType, ControllerHandleType]: return self.get_observation() def step(self, - action: Optional[SpaceDictNested] = None - ) -> Tuple[SpaceDictNested, float, bool, Dict[str, Any]]: + action: Optional[DataNested] = None + ) -> Tuple[DataNested, float, bool, Dict[str, Any]]: """Run a simulation step for a given action. :param action: Next action to perform. `None` to not update it. @@ -226,8 +225,8 @@ def refresh_observation(self) -> None: # type: ignore[override] self.env.refresh_observation() def compute_command(self, - measure: SpaceDictNested, - action: SpaceDictNested) -> SpaceDictNested: + measure: DataNested, + action: DataNested) -> DataNested: """Compute the motors efforts to apply on the robot. By default, it forwards the command computed by the environment. @@ -325,9 +324,12 @@ def __init__(self, # Update the observation space if self.augment_observation: self.observation_space = deepcopy(self.env.observation_space) + if not isinstance(self.observation_space, gym.spaces.Dict): + self.observation_space = gym.spaces.Dict(OrderedDict( + measures=self.observation_space)) self.observation_space.spaces.setdefault( - 'features', gym.spaces.Dict()).spaces[self.observer_name] = \ - self.observer.observation_space + 'features', gym.spaces.Dict()).spaces[ + self.observer_name] = self.observer.observation_space else: self.observation_space = self.observer.observation_space @@ -378,7 +380,13 @@ def refresh_observation(self) -> None: # type: ignore[override] if not self.simulator.is_simulation_running: features = self.observer.get_observation() if self.augment_observation: - self._observation = obs + # Assertion for type checker + assert isinstance(self._observation, dict) + # Make sure to store references + if isinstance(obs, gym.spaces.Dict): + self._observation = obs + else: + self._observation['measures'] = obs self._observation.setdefault('features', OrderedDict())[ self.observer_name] = features else: @@ -503,15 +511,24 @@ def __init__(self, # Append the controller's target to the observation if requested self.observation_space = deepcopy(self.env.observation_space) if self.augment_observation: + if not isinstance(self.observation_space, gym.spaces.Dict): + self.observation_space = gym.spaces.Dict(OrderedDict( + measures=self.observation_space)) self.observation_space.spaces.setdefault( - 'targets', gym.spaces.Dict()).spaces[self.controller_name] = \ - self.controller.action_space + 'targets', gym.spaces.Dict()).spaces[ + self.controller_name] = self.controller.action_space # Initialize some internal buffers self._action = zeros(self.action_space, dtype=np.float64) self._target = zeros(self.env.action_space, dtype=np.float64) self._observation = zeros(self.observation_space) + # Register the controller target to the telemetry + self.env.register_variable("action", + self._action, + self.controller.get_fieldnames(), + self.controller_name) + def _setup(self) -> None: """Configure the wrapper. @@ -531,17 +548,10 @@ def _setup(self) -> None: self.observe_dt = self.env.observe_dt self.control_dt = self.controller.control_dt - # Register the controller target to the telemetry. - # It may be useful for computing the terminal reward or debugging. - register_variables(self.simulator.controller, - self.controller.get_fieldnames(), - self._action, - self.controller_name) - def compute_command(self, - measure: SpaceDictNested, - action: SpaceDictNested - ) -> SpaceDictNested: + measure: DataNested, + action: DataNested + ) -> DataNested: """Compute the motors efforts to apply on the robot. In practice, it updates, whenever it is necessary: @@ -572,7 +582,7 @@ def compute_command(self, # action is undefined at this point set_value(self.env._action, self._target) set_value(self._command, self.env.compute_command( - self._observation, deepcopy(self._target))) + self._observation, self._target)) return self._command @@ -597,10 +607,19 @@ def refresh_observation(self) -> None: # type: ignore[override] # Add target to observation if requested if not self.simulator.is_simulation_running: - self._observation = self.env.get_observation() + obs = self.env.get_observation() if self.augment_observation: + # Assertion for type checker + assert isinstance(self._observation, dict) + # Make sure to store references + if isinstance(obs, dict): + self._observation = copy(obs) + else: + self._observation['measures'] = obs self._observation.setdefault('targets', OrderedDict())[ self.controller_name] = self._action + else: + self._observation = obs def compute_reward(self, *args: Any, **kwargs: Any) -> float: return self.controller.compute_reward(*args, **kwargs) diff --git a/python/gym_jiminy/common/gym_jiminy/common/controllers/proportional_derivative.py b/python/gym_jiminy/common/gym_jiminy/common/controllers/proportional_derivative.py index e945212f3..55661d56a 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/controllers/proportional_derivative.py +++ b/python/gym_jiminy/common/gym_jiminy/common/controllers/proportional_derivative.py @@ -12,7 +12,7 @@ from ..bases import BaseControllerBlock from ..envs import BaseJiminyEnv -from ..utils import SpaceDictNested, FieldDictNested +from ..utils import DataNested, FieldNested @nb.jit(nopython=True, nogil=True) @@ -116,7 +116,7 @@ def _refresh_action_space(self) -> None: V=gym.spaces.Box( low=vel_low, high=vel_high, dtype=np.float64))) - def get_fieldnames(self) -> FieldDictNested: + def get_fieldnames(self) -> FieldNested: pos_fieldnames = [f"targetPosition{name}" for name in self.robot.motors_names] vel_fieldnames = [f"targetVelocity{name}" @@ -124,8 +124,8 @@ def get_fieldnames(self) -> FieldDictNested: return OrderedDict(Q=pos_fieldnames, V=vel_fieldnames) def compute_command(self, - measure: SpaceDictNested, - action: SpaceDictNested + measure: DataNested, + action: gym.spaces.Dict ) -> np.ndarray: """Compute the motor torques using a PD controller. @@ -137,6 +137,6 @@ def compute_command(self, """ return _compute_command_impl( q_target=action['Q'], v_target=action['V'], - encoders_data=measure['sensors'][encoder.type], + encoders_data=self.env.sensors_data[encoder.type], motor_to_encoder=self.motor_to_encoder, pid_kp=self.pid_kp, pid_kd=self.pid_kd) diff --git a/python/gym_jiminy/common/gym_jiminy/common/envs/env_generic.py b/python/gym_jiminy/common/gym_jiminy/common/envs/env_generic.py index e85f073ad..950010ae2 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/envs/env_generic.py +++ b/python/gym_jiminy/common/gym_jiminy/common/envs/env_generic.py @@ -6,8 +6,12 @@ import tempfile from copy import deepcopy from collections import OrderedDict -from typing import Optional, Tuple, Dict, Any, Callable, List, Union +from collections.abc import Mapping +from typing import ( + Optional, Tuple, Dict, Any, Callable, List, Union, Iterator, + Mapping as MappingT, MutableMapping as MutableMappingT) +import tree import numpy as np import gym from gym import logger, spaces @@ -19,11 +23,12 @@ ContactSensor as contact, ForceSensor as force, ImuSensor as imu) -from jiminy_py.viewer.viewer import DEFAULT_CAMERA_XYZRPY_REL +from jiminy_py.viewer.viewer import DEFAULT_CAMERA_XYZRPY_REL, Viewer from jiminy_py.dynamics import compute_freeflyer_state_from_fixed_body from jiminy_py.simulator import Simulator +from jiminy_py.log import extract_data_from_log -from pinocchio import neutral, framesForwardKinematics +from pinocchio import neutral, normalize, framesForwardKinematics from ..utils import (zeros, fill, @@ -31,8 +36,8 @@ clip, get_fieldnames, register_variables, - FieldDictNested, - SpaceDictNested) + FieldNested, + DataNested) from ..bases import ObserverControllerInterface from .internal import (ObserverHandleType, @@ -55,6 +60,23 @@ SENSOR_ACCEL_MAX = 10000.0 +class _LazyDictItemFilter(Mapping): + def __init__(self, + dict_packed: MappingT[str, Tuple[Any, ...]], + item_index: int) -> None: + self.dict_packed = dict_packed + self.item_index = item_index + + def __getitem__(self, name: str) -> Any: + return self.dict_packed[name][self.item_index] + + def __iter__(self) -> Iterator[str]: + return iter(self.dict_packed) + + def __len__(self) -> int: + return len(self.dict_packed) + + class BaseJiminyEnv(ObserverControllerInterface, gym.Env): """Base class to train a robot in Gym OpenAI using a user-specified Python Jiminy engine for physics computations. @@ -118,11 +140,16 @@ def __init__(self, self.system_state: jiminy.SystemState = self.engine.system_state self.sensors_data: jiminy.sensorsData = dict(self.robot.sensors_data) + # Store references to the variables to register to the telemetry + self._registered_variables: MutableMappingT[ + str, Tuple[FieldNested, DataNested]] = {} + self.log_headers: MappingT[str, FieldNested] = _LazyDictItemFilter( + self._registered_variables, 0) + # Internal buffers for physics computations - self.rg = np.random.Generator(np.random.Philox()) self._seed: List[np.uint32] = [] + self.rg = np.random.Generator(np.random.Philox()) self.log_path: Optional[str] = None - self.logfile_action_headers: Optional[FieldDictNested] = None # Whether or not evaluation mode is active self.is_training = True @@ -166,6 +193,14 @@ def __init__(self, self._action = zeros(self.action_space, dtype=np.float64) self._observation = zeros(self.observation_space) + # Register the action to the telemetry + if isinstance(self._action, np.ndarray) and ( + self._action.size == self.robot.nmotors): + # Default case: assuming there is one scalar action per motor + action_headers = [ + ".".join(("action", e)) for e in self.robot.motors_names] + self.register_variable("action", self._action, action_headers) + def __getattr__(self, name: str) -> Any: """Fallback attribute getter. @@ -437,11 +472,56 @@ def _refresh_action_space(self) -> None: self.action_space = spaces.Box( low=-action_scale, high=action_scale, dtype=np.float64) + def register_variable(self, + name: str, + value: DataNested, + fieldnames: Optional[ + Union[str, FieldNested]] = None, + namespace: Optional[str] = None) -> None: + """ TODO: Write documentation. + """ + # Create default fieldnames if not specified + if fieldnames is None: + fieldnames = get_fieldnames(value, name) + + # Store string in a list + if isinstance(fieldnames, str): + fieldnames = [fieldnames] + + # Prepend with namespace if requested + if namespace: + fieldnames = tree.map_structure( + lambda key: ".".join(filter(None, (namespace, key))), + fieldnames) + + # Early return with a warning is fieldnames is empty + if not fieldnames: + logger.warn("'value' or 'fieldnames' cannot be empty.") + return + + # Check if variable can be registered successfully to the telemetry. + # Note that a dummy controller must be created to avoid using the + # actual one to keep control of when registering will take place. + try: + is_success = register_variables( + jiminy.BaseController(), fieldnames, value) + except ValueError as e: + raise ValueError( + f"'fieldnames' ({fieldnames})' if not consistent with the " + f"'value' ({value})") from e + + # Combine namespace and variable name if provided + name = ".".join(filter(None, (namespace, name))) + + # Store the header and a reference to the variable if successful + if is_success: + self._registered_variables[name] = (fieldnames, value) + def reset(self, controller_hook: Optional[Callable[[], Optional[Tuple[ Optional[ObserverHandleType], Optional[ControllerHandleType]]]]] = None - ) -> SpaceDictNested: + ) -> DataNested: """Reset the environment. In practice, it resets the backend simulator and set the initial state @@ -454,19 +534,10 @@ def reset(self, possible to change the robot (included options), nor to register log variable. The only way to do so is via 'controller_hook'. - :param controller_hook: Custom controller hook. It will be executed - right after initialization of the environment, - and just before actually starting the - simulation. It is a callable that optionally - returns observer and/or controller handles. If - defined, it will be used to initialize the - low-level jiminy controller. It is useful to - override partially the configuration of the - low-level engine, set a custom low-level - observer/controller handle, or to register - custom variables to the telemetry. Set to - `None` if unused. - Optional: Disabled by default. + :param controller_hook: Used internally for chaining multiple + `BasePipelineWrapper`. It is not meant to be + defined manually. + Optional: None by default. :returns: Initial observation of the episode. """ @@ -491,23 +562,24 @@ def reset(self, "The memory address of the low-level has changed.") # Enforce the low-level controller. - # The robot may have changed, for example if it is randomly generated - # based on different URDF files. As a result, it is necessary to - # instantiate a new low-level controller. - # Note that `BaseJiminyObserverController` is used in place of - # `jiminy.BaseControllerFunctor`. Although it is less efficient because - # it adds an extra layer of indirection, it makes it possible to update - # the controller handle without instantiating a new controller, which - # is necessary to allow registering telemetry variables before knowing - # the controller handle in advance. + # The robot may have changed, for example it could be randomly + # generated, which would corrupt the old controller. As a result, it is + # necessary to either instantiate a new low-level controller and to + # re-initialize the existing one by calling `controller.initialize` + # method BEFORE calling `reset` method because otherwise it would + # cause a segfault. In practice, `BaseJiminyObserverController` must be + # used because it enables to define observer and controller handles + # seperately, while dealing with all the logics internally. This extra + # layer of indirection makes it computionally less efficient than + # `jiminy.BaseControllerFunctor` but it is a small price to pay. controller = BaseJiminyObserverController() controller.initialize(self.robot) self.simulator.set_controller(controller) # Reset the simulator. - # Note that the controller must be set BEFORE calling 'reset', because - # otherwise the controller would be corrupted if the robot has changed. - self.simulator.reset() + # Do NOT remove all forces since it has already been done before, and + # because it would make it impossible to register forces in `_setup`. + self.simulator.reset(remove_all_forces=False) # Re-initialize some shared memories. # It must be done because the robot may have changed. @@ -554,19 +626,9 @@ def reset(self, self.max_steps = int( self.simulator.simulation_duration_max / self.step_dt) - # Register the action to the telemetry - if isinstance(self._action, np.ndarray) and ( - self._action.size == self.robot.nmotors): - # Default case: assuming there is one scalar action per motor - self.logfile_action_headers = [ - ".".join(("action", e)) for e in self.robot.motors_names] - else: - # Fallback: Get generic fieldnames otherwise - self.logfile_action_headers = get_fieldnames( - self.action_space, "action") - register_variables(self.simulator.controller, - self.logfile_action_headers, - self._action) + # Register user-specified variables to the telemetry + for header, value in self._registered_variables.values(): + register_variables(self.simulator.controller, header, value) # Sample the initial state and reset the low-level engine qpos, qvel = self._sample_state() @@ -653,8 +715,8 @@ def close(self) -> None: self.simulator.close() def step(self, - action: Optional[SpaceDictNested] = None - ) -> Tuple[SpaceDictNested, float, bool, Dict[str, Any]]: + action: Optional[DataNested] = None + ) -> Tuple[DataNested, float, bool, Dict[str, Any]]: """Run a simulation step for a given action. :param action: Action to perform. `None` to not update the action. @@ -797,24 +859,25 @@ def plot(self, **kwargs: Any) -> None: # individual scalar data over time to be displayed to the same subplot. t = log_data["Global.Time"] tab_data = {} - if self.logfile_action_headers is None: + action_headers = self.log_headers.get("action", None) + if action_headers is None: # It was impossible to register the action to the telemetry, likely # because of incompatible dtype. Early return without adding tab. return - if isinstance(self.logfile_action_headers, dict): - for field, subfields in self.logfile_action_headers.items(): - if not isinstance(subfields, list): - logger.error("Action space not supported.") + if isinstance(action_headers, dict): + for group, fieldnames in action_headers.items(): + if not isinstance(fieldnames, list): + logger.error("Action space not supported by this method.") return - tab_data[field] = { - field.split(".", 1)[1]: log_data[ - ".".join(("HighLevelController", field))] - for field in subfields} - elif isinstance(self.logfile_action_headers, list): + tab_data[group] = { + ".".join(key.split(".")[1:]): value + for key, value in extract_data_from_log( + log_data, fieldnames, as_dict=True).items()} + elif isinstance(action_headers, list): tab_data.update({ - field.split(".", 1)[1]: log_data[ - ".".join(("HighLevelController", field))] - for field in self.logfile_action_headers}) + ".".join(key.split(".")[1:]): value + for key, value in extract_data_from_log( + log_data, action_headers, as_dict=True).items()}) # Add action tab self.figure.add_tab("Action", t, tab_data) @@ -843,7 +906,8 @@ def replay(self, enable_travelling: bool = True, **kwargs: Any) -> None: self.simulator.stop() # Set default camera pose if viewer not already available - if not self.simulator.is_viewer_available and self.robot.has_freeflyer: + if not self.simulator.is_viewer_available and \ + self.robot.has_freeflyer and not Viewer.has_gui(): # Get root frame name. # The first and second frames are respectively "universe" no matter # if the robot has a freeflyer or not, and the second one is the @@ -922,6 +986,11 @@ def play_interactive(env: Union["BaseJiminyEnv", gym.Wrapper], obs = env.reset() reward = None + # Refresh the ground profile + engine_options = self.engine.get_options() + ground_profile = engine_options["world"]["groundProfile"] + self.viewer.update_floor(ground_profile, show_meshes=False) + # Enable travelling if enable_travelling is None: enable_travelling = \ @@ -990,6 +1059,13 @@ def _setup(self) -> None: By default, it enforces some options of the engine. + .. warning:: + Beware this method is called BEFORE `observe_dt` and + `controller_dt` are properly set, so one cannot rely on it at this + point. Yet, `step_dt` is available and should always be. One can + still access the low-level controller update period through + `engine_options['stepper']['controllerUpdatePeriod']`. + .. note:: The user must overload this method to enforce custom observer update period, otherwise it will be the same of the controller. @@ -1079,6 +1155,16 @@ def _sample_state(self) -> Tuple[np.ndarray, np.ndarray]: # Get the neutral configuration qpos = self._neutral() + # Make sure the configuration is not out-of-bound + pinocchio_model = self.robot.pinocchio_model + position_limit_lower = pinocchio_model.lowerPositionLimit + position_limit_upper = pinocchio_model.upperPositionLimit + qpos = np.minimum(np.maximum( + qpos, position_limit_lower), position_limit_upper) + + # Make sure the configuration is normalized + qpos = normalize(pinocchio_model, qpos) + # Make sure the robot impacts the ground if self.robot.has_freeflyer: engine_options = self.simulator.engine.get_options() @@ -1142,7 +1228,7 @@ def refresh_observation(self) -> None: # type: ignore[override] self._observation['state']['V'][:] = velocity def compute_command(self, - measure: SpaceDictNested, + measure: DataNested, action: np.ndarray ) -> np.ndarray: """Compute the motors efforts to apply on the robot. @@ -1184,9 +1270,9 @@ def is_done(self, *args: Any, **kwargs: Any) -> bool: def _key_to_action(self, key: Optional[str], - obs: SpaceDictNested, + obs: DataNested, reward: Optional[float], - **kwargs: Any) -> SpaceDictNested: + **kwargs: Any) -> DataNested: """Mapping from input keyboard keys to actions. .. note:: @@ -1261,7 +1347,7 @@ def __init__(self, # Define some internal buffers self._desired_goal = zeros(goal_space) - def get_observation(self) -> SpaceDictNested: + def get_observation(self) -> DataNested: """Get post-processed observation. It gathers the original observation from the environment with the @@ -1277,7 +1363,7 @@ def reset(self, controller_hook: Optional[Callable[[], Optional[Tuple[ Optional[ObserverHandleType], Optional[ControllerHandleType]]]]] = None - ) -> SpaceDictNested: + ) -> DataNested: self._desired_goal = self._sample_goal() return super().reset(controller_hook) @@ -1295,7 +1381,7 @@ def _get_goal_space(self) -> gym.Space: """ raise NotImplementedError - def _sample_goal(self) -> SpaceDictNested: + def _sample_goal(self) -> DataNested: """Sample a goal randomly. .. note:: @@ -1307,7 +1393,7 @@ def _sample_goal(self) -> SpaceDictNested: """ raise NotImplementedError - def _get_achieved_goal(self) -> SpaceDictNested: + def _get_achieved_goal(self) -> DataNested: """Compute the achieved goal based on current state of the robot. .. note:: @@ -1320,8 +1406,8 @@ def _get_achieved_goal(self) -> SpaceDictNested: raise NotImplementedError def is_done(self, # type: ignore[override] - achieved_goal: Optional[SpaceDictNested] = None, - desired_goal: Optional[SpaceDictNested] = None) -> bool: + achieved_goal: Optional[DataNested] = None, + desired_goal: Optional[DataNested] = None) -> bool: """Determine whether a termination condition has been reached. By default, it uses the termination condition inherited from normal @@ -1344,8 +1430,8 @@ def is_done(self, # type: ignore[override] raise NotImplementedError def compute_reward(self, # type: ignore[override] - achieved_goal: Optional[SpaceDictNested] = None, - desired_goal: Optional[SpaceDictNested] = None, + achieved_goal: Optional[DataNested] = None, + desired_goal: Optional[DataNested] = None, *, info: Dict[str, Any]) -> float: """Compute the reward for any given episode state. diff --git a/python/gym_jiminy/common/gym_jiminy/common/utils/__init__.py b/python/gym_jiminy/common/gym_jiminy/common/utils/__init__.py index 133be826a..e280c3422 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/utils/__init__.py +++ b/python/gym_jiminy/common/gym_jiminy/common/utils/__init__.py @@ -1,7 +1,7 @@ # pylint: disable=missing-module-docstring -from .spaces import (SpaceDictNested, - FieldDictNested, +from .spaces import (DataNested, + FieldNested, sample, zeros, fill, @@ -14,8 +14,8 @@ __all__ = [ - 'SpaceDictNested', - 'FieldDictNested', + 'DataNested', + 'FieldNested', 'sample', 'zeros', 'fill', diff --git a/python/gym_jiminy/common/gym_jiminy/common/utils/helpers.py b/python/gym_jiminy/common/gym_jiminy/common/utils/helpers.py index ef8ede40c..b1e3167e8 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/utils/helpers.py +++ b/python/gym_jiminy/common/gym_jiminy/common/utils/helpers.py @@ -1,14 +1,19 @@ """ TODO: Write documentation. """ -from typing import Union, Dict, List, ValuesView +import logging +from typing import Union, ValuesView +import gym +import tree import numpy as np import numba as nb -from gym import spaces import jiminy_py.core as jiminy -from .spaces import FieldDictNested, SpaceDictNested +from .spaces import FieldNested, DataNested, zeros + + +logger = logging.getLogger(__name__) @nb.jit(nopython=True, nogil=True) @@ -29,9 +34,9 @@ def is_breakpoint(t: float, dt: float, eps: float) -> bool: return False -def get_fieldnames(space: spaces.Space, - namespace: str = "") -> FieldDictNested: - """Get generic fieldnames from `gym.spaces.Space`, so that it can be used +def get_fieldnames(structure: Union[FieldNested, DataNested], + namespace: str = "") -> FieldNested: + """Generate generic fieldnames from `gym..Space`, so that it can be used in conjunction with `register_variables`, to register any value from gym Space to the telemetry conveniently. @@ -40,34 +45,34 @@ def get_fieldnames(space: spaces.Space, Empty string to disable. Optional: Disabled by default. """ - # Fancy action space: Trying to be clever and infer meaningful - # telemetry names based on action space - if isinstance(space, (spaces.Box, spaces.Tuple, spaces.MultiBinary)): - # No information: fallback to basic numbering - return [".".join(filter(None, (namespace, str(i)))) - for i in range(len(space))] - if isinstance(space, ( - spaces.Discrete, spaces.MultiDiscrete)): - # The space is already scalar. Namespace alone as fieldname is enough. - return [namespace] - if isinstance(space, spaces.Dict): - assert space.spaces, "Dict space cannot be empty." - out: List[Union[Dict[str, FieldDictNested], str]] = [] - for field, subspace in dict.items(space.spaces): - if isinstance(subspace, spaces.Dict): - out.append({field: get_fieldnames(subspace, namespace)}) - else: - out.append(field) - return out - raise NotImplementedError( - f"Gym.Space of type {type(space)} is not supported.") + # Create dummy data structure if gym.Space is provided + if isinstance(structure, gym.Space): + structure = zeros(structure) + + fieldnames = [] + for fieldname_path, data in tree.flatten_with_path(structure): + assert isinstance(data, np.ndarray), ( + "'structure' ({structure}) must have leaves of type `np.ndarray`.") + if data.size < 1: + # Empty: return empty list + fieldname = [] + elif data.size == 1: + # Scalar: fieldname path is enough + fieldname = [".".join(filter(None, (namespace, *fieldname_path)))] + else: + # Tensor: basic numbering + fieldname = np.array([ + ".".join(filter(None, (namespace, *fieldname_path, str(i)))) + for i in range(data.size)]).reshape(data.shape).tolist() + fieldnames.append(fieldname) + + return tree.unflatten_as(structure, fieldnames) def register_variables(controller: jiminy.AbstractController, - fields: Union[ - ValuesView[FieldDictNested], FieldDictNested], - data: SpaceDictNested, - namespace: str = "") -> bool: + fieldnames: Union[ + ValuesView[FieldNested], FieldNested], + data: DataNested) -> bool: """Register data from `Gym.Space` to the telemetry of a controller. .. warning:: @@ -78,49 +83,29 @@ def register_variables(controller: jiminy.AbstractController, :param controller: Robot's controller of the simulator used to register variables to the telemetry. - :param field: Nested variable names, as returned by `get_fieldnames` - method. It can be a nested list or/and dict. The leaf are - str corresponding to the name of each scalar data. - :param data: Data from `Gym.spaces.Space` to register. Note that the + :param fieldnames: Nested variable names, as returned by `get_fieldnames` + method. It can be a nested list or/and dict. The leaves + are str corresponding to the name of each scalar data. + :param data: Data from `gym.spaces.Space` to register. Note that the telemetry stores pointers to the underlying memory, so it only supports np.float64, and make sure to reassign data using `np.copyto` or `[:]` operator (faster). - :param namespace: Namespace used to prepend fields, using '.' delimiter. - Empty string to disable. - Optional: Disabled by default. :returns: Whether or not the registration has been successful. """ - # Make sure data is at least 1d if numpy array, to avoid undefined `len` - if isinstance(data, np.ndarray): - data = np.atleast_1d(data) # By reference - - # Make sure fields and data are consistent - assert fields and len(fields) == len(data), ( - "fields and data are inconsistent.") - - # Default case: data is already a numpy array. Can be registered directly. - if isinstance(data, np.ndarray): - if np.issubsctype(data, np.float64): - assert isinstance(fields, list) - for i, field in enumerate(fields): - if isinstance(fields[i], list): - fields[i] = [".".join(filter(None, (namespace, subfield))) - for subfield in field] - else: - fields[i] = ".".join(filter(None, (namespace, field))) - hresult = controller.register_variables(fields, data) - return hresult == jiminy.hresult_t.SUCCESS - return False - - # Fallback to looping over fields and data iterators - is_success = True - if isinstance(fields, dict): - fields = fields.values() - for subfields, value in zip(fields, data.values()): - assert isinstance(subfields, (dict, list)) - is_success = register_variables( - controller, subfields, value, namespace) - if not is_success: - break - return is_success + # pylint: disable=cell-var-from-loop + for fieldname, value in zip( + tree.flatten_up_to(data, fieldnames), + tree.flatten(data)): + if np.issubsctype(value, np.float64): + assert isinstance(fieldname, list), ( + "'fieldname' ({fieldname}) should be a list of strings.") + hresult = controller.register_variables(fieldname, value) + if hresult != jiminy.hresult_t.SUCCESS: + return False + else: + logger.warning( + f"Variable of dtype '{value.dtype}' cannot be registered to " + "the telemetry and must have dtype 'np.float64' instead.") + return False + return True diff --git a/python/gym_jiminy/common/gym_jiminy/common/utils/spaces.py b/python/gym_jiminy/common/gym_jiminy/common/utils/spaces.py index 3c8b6dece..d56e517d6 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/utils/spaces.py +++ b/python/gym_jiminy/common/gym_jiminy/common/utils/spaces.py @@ -1,18 +1,34 @@ """ TODO: Write documentation. """ -from collections import OrderedDict -from typing import Optional, Union, Dict, Sequence, List +from typing import Optional, Union, Dict, Sequence, TypeVar import numpy as np +import tree import gym -from gym import spaces -SpaceDictNested = Union[ # type: ignore - Dict[str, 'SpaceDictNested'], np.ndarray] # type: ignore -ListStrRecursive = List[Union[str, 'ListStrRecursive']] # type: ignore -FieldDictNested = Union[ # type: ignore - Dict[str, 'FieldDictNested'], ListStrRecursive] # type: ignore +ValueType = TypeVar('ValueType') +StructNested = Union[Dict[str, 'StructNested'], # type: ignore + Sequence['StructNested'], # type: ignore + ValueType] +FieldNested = StructNested[str] # type: ignore +DataNested = StructNested[np.ndarray] # type: ignore + + +def _space_nested_raw(space_nested: gym.Space) -> StructNested[gym.Space]: + """Replace any `gym.spaces.Dict` by the raw `OrderedDict` dict it contains. + + .. note:: + It is necessary because it does not inherit from + `collection.abc.Mapping`, which is necessary for `dm-tree` to operate + properly on it. + # TODO: remove this patch after release of gym==0.22.0 (hopefully) + """ + return tree.traverse( + lambda space: + _space_nested_raw(space.spaces) + if isinstance(space, gym.spaces.Dict) else None, + space_nested) def sample(low: Union[float, np.ndarray] = -1.0, @@ -80,87 +96,74 @@ def sample(low: Union[float, np.ndarray] = -1.0, rg = np.random distrib_fn = getattr(rg, dist) if dist == 'uniform': - val = distrib_fn(low=-1.0, high=1.0, size=shape) + value = distrib_fn(low=-1.0, high=1.0, size=shape) else: - val = distrib_fn(size=shape) + value = distrib_fn(size=shape) # Set mean and deviation - val = mean + dev * val + value = mean + dev * value # Revert log scale if appropriate if enable_log_scale: - val = 10 ** val + value = 10 ** value - return val + return value -def is_bounded(space: gym.Space) -> bool: +def is_bounded(space_nested: gym.Space) -> bool: """Check wether `gym.spaces.Space` has finite bounds. :param space: Gym.Space on which to operate. """ - if isinstance(space, spaces.Box): - return space.is_bounded() - if isinstance(space, spaces.Dict): - return any(not is_bounded(subspace) for subspace in space.values()) - if isinstance(space, spaces.Tuple): - return any(not is_bounded(subspace) for subspace in space) - if isinstance(space, ( - spaces.Discrete, spaces.MultiDiscrete, spaces.MultiBinary)): - return True - raise NotImplementedError( - f"Space of type {type(space)} is not supported.") - - -def zeros(space: gym.Space, - dtype: Optional[type] = None) -> Union[SpaceDictNested, int]: + for space in tree.flatten(_space_nested_raw(space_nested)): + is_bounded_fn = getattr(space, "is_bounded", None) + if is_bounded_fn is not None and not is_bounded_fn(): + return False + return True + + +def zeros(space_nested: gym.Space, + dtype: Optional[type] = None) -> Union[DataNested, int]: """Allocate data structure from `gym.space.Space` and initialize it to zero. :param space: Gym.Space on which to operate. :param dtype: Must be specified to overwrite original space dtype. """ - if isinstance(space, spaces.Box): - return np.zeros(space.shape, dtype=dtype or space.dtype) - if isinstance(space, spaces.Dict): - value = OrderedDict() - for field, subspace in dict.items(space.spaces): - value[field] = zeros(subspace, dtype=dtype) - return value - if isinstance(space, spaces.Tuple): - return tuple(zeros(subspace, dtype=dtype) for subspace in space.spaces) - if isinstance(space, (spaces.Discrete, spaces.MultiDiscrete)): - # Note that np.array of 0 dim is returned in order to be mutable - return np.array(0, dtype=dtype or np.int64) - if isinstance(space, spaces.MultiBinary): - return np.zeros(space.n, dtype=dtype or np.int8) - raise NotImplementedError( - f"Space of type {type(space)} is not supported.") - - -def fill(data: SpaceDictNested, - fill_value: float) -> None: + space_nested_raw = _space_nested_raw(space_nested) + values = [] + for space in tree.flatten(space_nested_raw): + if isinstance(space, gym.spaces.Box): + value = np.zeros(space.shape, dtype=dtype or space.dtype) + elif isinstance(space, gym.spaces.Discrete): + # Note that np.array of 0 dim is returned in order to be mutable + value = np.array(0, dtype=dtype or np.int64) + elif isinstance(space, gym.spaces.MultiDiscrete): + value = np.zeros_like(space.nvec, dtype=dtype or np.int64) + elif isinstance(space, gym.spaces.MultiBinary): + value = np.zeros(space.n, dtype=dtype or np.int8) + else: + raise NotImplementedError( + f"Space of type {type(space)} is not supported.") + values.append(value) + return tree.unflatten_as(space_nested_raw, values) + + +def fill(data: DataNested, fill_value: float) -> None: """Set every element of 'data' from `Gym.Space` to scalar 'fill_value'. :param data: Data structure to update. :param fill_value: Value used to fill any scalar from the leaves. """ - if isinstance(data, np.ndarray): - data.fill(fill_value) - elif isinstance(data, dict): - for subdata in dict.values(data): - fill(subdata, fill_value) - elif isinstance(data, (tuple, list)): - for subdata in data: - fill(subdata, fill_value) - else: - if hasattr(data, '__dict__') or hasattr(data, '__slots__'): - raise NotImplementedError( - f"Data of type {type(data)} is not supported.") - raise ValueError("Data of immutable type is not supported.") + for value in tree.flatten(data): + try: + value.fill(fill_value) + except AttributeError as e: + raise ValueError( + "Leaves of 'data' structure must have type `np.ndarray`." + ) from e -def set_value(data: SpaceDictNested, - value: SpaceDictNested) -> None: +def set_value(data: DataNested, value: DataNested) -> None: """Partially set 'data' from `Gym.Space` to 'value'. It avoids memory allocation, so that memory pointers of 'data' remains @@ -169,59 +172,38 @@ def set_value(data: SpaceDictNested, .. note:: If 'data' is a dictionary, 'value' must be a subtree of 'data', - whose leaf values must be broadcastable with the ones of 'data'. + whose leaves must be broadcastable with the ones of 'data'. :param data: Data structure to partially update. :param value: Unset of data only containing fields to update. """ - if isinstance(data, np.ndarray): + for data_i, value_i in zip(tree.flatten(data), tree.flatten(value)): try: - data.flat[:] = value - except TypeError as e: - raise TypeError(f"Cannot cast '{data}' to '{value}'.") from e - elif isinstance(data, dict): - for field, subval in dict.items(value): - set_value(data[field], subval) - elif isinstance(data, (tuple, list)): - for subdata, subval in zip(data, value): - fill(subdata, subval) - else: - raise NotImplementedError( - f"Data of type {type(data)} is not supported.") + data_i.flat[:] = value_i + except AttributeError as e: + raise ValueError( + "Leaves of 'data' structure must have type `np.ndarray`." + ) from e -def copy(data: SpaceDictNested) -> SpaceDictNested: +def copy(data: DataNested) -> DataNested: """Shallow copy recursively 'data' from `Gym.Space`, so that only leaves are still references. :param data: Hierarchical data structure to copy without allocation. """ - if isinstance(data, dict): - return data.__class__(zip( - dict.keys(data), map(copy, dict.values(data)))) - if isinstance(data, (tuple, list)): - return data.__class__(map(copy, data)) - return data + return tree.unflatten_as(data, tree.flatten(data)) -def clip(space: gym.Space, value: SpaceDictNested) -> SpaceDictNested: +def clip(space_nested: gym.Space, + data: DataNested) -> DataNested: """Clamp value from Gym.Space to make sure it is within bounds. :param space: Gym.Space on which to operate. - :param value: Value to clamp. + :param data: Data to clamp. """ - if isinstance(space, spaces.Box): - return np.minimum(np.maximum(value, space.low), space.high) - if isinstance(space, spaces.Dict): - out = OrderedDict() - for field, subspace in dict.items(space.spaces): - out[field] = clip(subspace, value[field]) - return out - if isinstance(space, spaces.Tuple): - return (clip(subspace, subvalue) - for subspace, subvalue in zip(space, value)) - if isinstance(space, ( - spaces.Discrete, spaces.MultiDiscrete, spaces.MultiBinary)): - return value # No need to clip those spaces. - raise NotImplementedError( - f"Gym.Space of type {type(space)} is not supported.") + return tree.map_structure( + lambda space, value: + np.minimum(np.maximum(value, space.low), space.high) + if isinstance(space, gym.spaces.Box) else value, + _space_nested_raw(space_nested), data) diff --git a/python/gym_jiminy/common/gym_jiminy/common/wrappers/frame_stack.py b/python/gym_jiminy/common/gym_jiminy/common/wrappers/frame_stack.py index 03cc006d1..3d44378aa 100644 --- a/python/gym_jiminy/common/gym_jiminy/common/wrappers/frame_stack.py +++ b/python/gym_jiminy/common/gym_jiminy/common/wrappers/frame_stack.py @@ -9,7 +9,7 @@ import gym -from ..utils import SpaceDictNested, is_breakpoint, zeros +from ..utils import DataNested, is_breakpoint, zeros from ..bases import BasePipelineWrapper @@ -102,36 +102,37 @@ def _setup(self) -> None: for _ in range(self.num_stack): frames.append(zeros(leaf_space)) - def observation(self, observation: SpaceDictNested) -> SpaceDictNested: + def observation(self, observation: DataNested) -> DataNested: """ TODO: Write documentation. """ # Replace nested fields of original observation by the stacked ones for fields, frames in zip(self.leaf_fields_list, self._frames): - root_obs = reduce(lambda d, key: d[key], fields[:-1], - observation) + root_obs = reduce(lambda d, key: d[key], fields[:-1], observation) + assert isinstance(root_obs, dict) # Assert for type checker root_obs[fields[-1]] = np.stack(frames) # Return the stacked observation return observation - def compute_observation(self, measure: SpaceDictNested) -> SpaceDictNested: + def compute_observation(self, measure: DataNested) -> DataNested: """ TODO: Write documentation. """ # Backup the nested observation fields to stack for fields, frames in zip(self.leaf_fields_list, self._frames): leaf_obs = reduce(lambda d, key: d[key], fields, measure) + assert isinstance(leaf_obs, np.ndarray) # Assert for type checker frames.append(leaf_obs.copy()) # Copy to make sure not altered # Return the stacked observation return self.observation(measure) def step(self, - action: SpaceDictNested - ) -> Tuple[SpaceDictNested, float, bool, Dict[str, Any]]: + action: DataNested + ) -> Tuple[DataNested, float, bool, Dict[str, Any]]: observation, reward, done, info = self.env.step(action) return self.compute_observation(observation), reward, done, info - def reset(self, **kwargs: Any) -> SpaceDictNested: + def reset(self, **kwargs: Any) -> DataNested: observation = self.env.reset(**kwargs) self._setup() return self.compute_observation(observation) diff --git a/python/gym_jiminy/common/setup.py b/python/gym_jiminy/common/setup.py index 32410d91e..3f241b366 100644 --- a/python/gym_jiminy/common/setup.py +++ b/python/gym_jiminy/common/setup.py @@ -1,19 +1,19 @@ from itertools import chain +from pkg_resources import get_distribution from setuptools import setup, find_namespace_packages -version = __import__("jiminy_py").__version__ -version_required = ".".join(version.split(".")[:2]) +version = get_distribution('jiminy_py').version extras = { "zoo": [ - f"gym_jiminy_zoo~={version_required}", + f"gym_jiminy_zoo=={version}", ], "toolbox": [ - f"gym_jiminy_toolbox~={version_required}" + f"gym_jiminy_toolbox=={version}" ], "rllib": [ - f"gym_jiminy_rllib~={version_required}" + f"gym_jiminy_rllib=={version}" ] } extras["all"] = list(set(chain.from_iterable(extras.values()))) @@ -46,12 +46,12 @@ keywords="reinforcement-learning robotics gym jiminy", packages=find_namespace_packages(), install_requires=[ - f"jiminy-py~={version_required}", + f"jiminy-py=={version}", # Use to perform linear algebra computation. # 1.16 introduces new array function dispatcher which had significant # overhead if not handle carefully. "numpy>=1.16", - # Use internally to speedup computation of simple methods. + # Use internally to speedup computation of math methods. # Disable automatic forward compatibility with newer versions because # numba relies on llvmlite, for which wheels take some time before # being available on Pypi, making the whole installation process fail. diff --git a/python/gym_jiminy/envs/gym_jiminy/envs/__init__.py b/python/gym_jiminy/envs/gym_jiminy/envs/__init__.py index 01154ff5f..9bc0da938 100644 --- a/python/gym_jiminy/envs/gym_jiminy/envs/__init__.py +++ b/python/gym_jiminy/envs/gym_jiminy/envs/__init__.py @@ -6,7 +6,10 @@ from .spotmicro import SpotmicroJiminyEnv from .cassie import CassieJiminyEnv, CassiePDControlJiminyEnv from .anymal import ANYmalJiminyEnv, ANYmalPDControlJiminyEnv -from .atlas import AtlasJiminyEnv, AtlasPDControlJiminyEnv +from .atlas import (AtlasJiminyEnv, + AtlasReducedJiminyEnv, + AtlasPDControlJiminyEnv, + AtlasReducedPDControlJiminyEnv) __all__ = [ @@ -20,7 +23,9 @@ 'ANYmalJiminyEnv', 'ANYmalPDControlJiminyEnv', 'AtlasJiminyEnv', - 'AtlasPDControlJiminyEnv' + 'AtlasReducedJiminyEnv', + 'AtlasPDControlJiminyEnv', + 'AtlasReducedPDControlJiminyEnv' ] register( @@ -65,7 +70,15 @@ id='atlas-v0', entry_point='gym_jiminy.envs:AtlasJiminyEnv' ) +register( + id='atlas-reduced-v0', + entry_point='gym_jiminy.envs:AtlasReducedJiminyEnv' +) register( id='atlas-pid-v0', entry_point='gym_jiminy.envs:AtlasPDControlJiminyEnv' ) +register( + id='atlas-reduced-pid-v0', + entry_point='gym_jiminy.envs:AtlasReducedPDControlJiminyEnv' +) diff --git a/python/gym_jiminy/envs/gym_jiminy/envs/acrobot.py b/python/gym_jiminy/envs/gym_jiminy/envs/acrobot.py index b4148b936..a15f11836 100644 --- a/python/gym_jiminy/envs/gym_jiminy/envs/acrobot.py +++ b/python/gym_jiminy/envs/gym_jiminy/envs/acrobot.py @@ -9,7 +9,7 @@ import jiminy_py.core as jiminy from jiminy_py.simulator import Simulator -from gym_jiminy.common.utils import sample, SpaceDictNested +from gym_jiminy.common.utils import sample, DataNested from gym_jiminy.common.envs import BaseJiminyEnv, BaseJiminyGoalEnv @@ -198,7 +198,7 @@ def is_done(self) -> bool: # type: ignore[override] return bool(achieved_goal > desired_goal) def compute_command(self, - measure: SpaceDictNested, + measure: DataNested, action: np.ndarray ) -> np.ndarray: """Compute the motors efforts to apply on the robot. diff --git a/python/gym_jiminy/envs/gym_jiminy/envs/atlas.py b/python/gym_jiminy/envs/gym_jiminy/envs/atlas.py index 850a86464..cd111b96c 100644 --- a/python/gym_jiminy/envs/gym_jiminy/envs/atlas.py +++ b/python/gym_jiminy/envs/gym_jiminy/envs/atlas.py @@ -1,54 +1,68 @@ import os import numpy as np +from pathlib import Path from pkg_resources import resource_filename - -from pinocchio import neutral - -from gym_jiminy.common.envs import WalkerJiminyEnv +from typing import Any + +import jiminy_py.core as jiminy +from jiminy_py.robot import load_hardware_description_file +from jiminy_py.simulator import Simulator +from gym_jiminy.common.envs import BaseJiminyEnv, WalkerJiminyEnv +from gym_jiminy.common.envs.env_locomotion import ( + F_PROFILE_WAVELENGTH, F_PROFILE_PERIOD) from gym_jiminy.common.controllers import PDController from gym_jiminy.common.pipeline import build_pipeline +from pinocchio import neutral + # Sagittal hip angle of neutral configuration (:float [rad]) DEFAULT_SAGITTAL_HIP_ANGLE = 0.2 # Default simulation duration (:float [s]) SIMULATION_DURATION = 20.0 + # Ratio between the High-level neural network PID target update and Low-level # PID torque update (:int [NA]) HLC_TO_LLC_RATIO = 1 + # Stepper update period (:float [s]) -STEP_DT = 1.0e-3 +STEP_DT = 0.04 # PID proportional gains (one per actuated joint) -PID_KP = np.array([ - # Back: [X, Y, Z] - 4000.0, 12000.0, 1000.0, - # Left arm: [ElX, ElY, ShX, ShZ, WrX, WrY, WrY2] - 200.0, 100.0, 100.0, 500.0, 100.0, 10.0, 10.0, - # Left leg: [AkX, AkY, HpZ, HpX, HpY, KnY] - 1500.0, 10000.0, 4000.0, 4000.0, 1000.0, 1000.0, +PID_REDUCED_KP = np.array([ + # Left leg: [HpX, HpZ, HpY, KnY, AkY, AkX] + 5000.0, 5000.0, 8000.0, 4000.0, 8000.0, 5000.0, + # Right leg: [HpX, HpZ, HpY, KnY, AkY, AkX] + 5000.0, 5000.0, 8000.0, 4000.0, 8000.0, 5000.0]) +PID_REDUCED_KD = np.array([ + # Left leg: [HpX, HpZ, HpY, KnY, AkY, AkX] + 0.02, 0.01, 0.015, 0.01, 0.02, 0.01, + # Right leg: [HpX, HpZ, HpY, KnY, AkY, AkX] + 0.02, 0.01, 0.015, 0.01, 0.02, 0.01]) + +PID_FULL_KP = np.array([ # Neck: [Y] 1000.0, - # Right arm: [ElX, ElY, ShX, ShZ, WrX, WrY, WrY2] - 200.0, 100.0, 100.0, 500.0, 100.0, 10.0, 10.0, - # Right leg: [AkX, AkY, HpZ, HpX, HpY, KnY] - 1500.0, 10000.0, 4000.0, 4000.0, 1000.0, 1000.0]) -# PID derivative gains (one per actuated joint) -PID_KD = np.array([ - # Back: [X, Y, Z] - 0.08, 0.02, 0.01, - # Left arm: [ElX, ElY, ShX, ShZ, WrX, WrY, WrY2] - 0.02, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, - # Left leg: [AkX, AkY, HpZ, HpX, HpY, KnY] - 0.002, 0.02, 0.002, 0.002, 0.01, 0.01, + # Back: [Z, Y, X] + 5000.0, 8000.0, 5000.0, + # Left arm: [ShZ, ShX, ElY, ElX, WrY, WrX, WrY2] + 500.0, 100.0, 200.0, 500.0, 10.0, 100.0, 10.0, + # Right arm: [ShZ, ShX, ElY, ElX, WrY, WrX, WrY2] + 500.0, 100.0, 200.0, 500.0, 10.0, 100.0, 10.0, + # Lower body motors + *PID_REDUCED_KP]) +PID_FULL_KD = np.array([ # Neck: [Y] 0.01, - # Right arm: [ElX, ElY, ShX, ShZ, WrX, WrY, WrY2] - 0.02, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, - # Right leg: [AkX, AkY, HpZ, HpX, HpY, KnY] - 0.002, 0.02, 0.002, 0.002, 0.01, 0.01]) - + # Back: [Z, Y, X] + 0.01, 0.015, 0.02, + # Left arm: [ShZ, ShX, ElY, ElX, WrY, WrX, WrY2] + 0.01, 0.01, 0.01, 0.02, 0.01, 0.02, 0.02, + # Right arm: [ShZ, ShX, ElY, ElX, WrY, WrX, WrY2] + 0.01, 0.01, 0.01, 0.02, 0.01, 0.02, 0.02, + # Lower body motors + *PID_REDUCED_KD]) # Reward weight for each individual component that can be optimized REWARD_MIXTURE = { @@ -66,13 +80,13 @@ class AtlasJiminyEnv(WalkerJiminyEnv): - def __init__(self, debug: bool = False, **kwargs): + def __init__(self, debug: bool = False, **kwargs: Any) -> None: # Get the urdf and mesh paths data_root_dir = resource_filename( "gym_jiminy.envs", "data/bipedal_robots/atlas") urdf_path = os.path.join(data_root_dir, "atlas_v4.urdf") - # Initialize the walker environment + # Initialize the walker environment directly super().__init__(**{**dict( urdf_path=urdf_path, mesh_path=data_root_dir, @@ -83,6 +97,11 @@ def __init__(self, debug: bool = False, **kwargs): avoid_instable_collisions=True, debug=debug), **kwargs}) + # Remove unrelevant contact points + self.robot.remove_contact_points([ + name for name in self.robot.contact_frames_names + if int(name.split("_")[-1]) in (1, 3, 5, 7)]) + def _neutral(self): def joint_position_idx(joint_name): joint_idx = self.robot.pinocchio_model.getJointId(joint_name) @@ -91,17 +110,101 @@ def joint_position_idx(joint_name): qpos = neutral(self.robot.pinocchio_model) qpos[joint_position_idx('back_bky')] = DEFAULT_SAGITTAL_HIP_ANGLE qpos[joint_position_idx('l_arm_elx')] = DEFAULT_SAGITTAL_HIP_ANGLE - qpos[joint_position_idx('l_arm_shx')] = - np.pi / 2 + 1e-6 - qpos[joint_position_idx('l_arm_shz')] = np.pi / 4 - 1e-6 - qpos[joint_position_idx('l_arm_ely')] = np.pi / 4 + np.pi / 2 + qpos[joint_position_idx('l_arm_shx')] = - np.pi / 2.0 + qpos[joint_position_idx('l_arm_shz')] = np.pi / 4.0 + qpos[joint_position_idx('l_arm_ely')] = np.pi / 4.0 + np.pi / 2.0 qpos[joint_position_idx('r_arm_elx')] = - DEFAULT_SAGITTAL_HIP_ANGLE - qpos[joint_position_idx('r_arm_shx')] = np.pi / 2 - 1e-6 - qpos[joint_position_idx('r_arm_shz')] = - np.pi / 4 + 1e-6 - qpos[joint_position_idx('r_arm_ely')] = np.pi / 4 + np.pi / 2 + qpos[joint_position_idx('r_arm_shx')] = np.pi / 2.0 + qpos[joint_position_idx('r_arm_shz')] = - np.pi / 4.0 + qpos[joint_position_idx('r_arm_ely')] = np.pi / 4.0 + np.pi / 2.0 return qpos +class AtlasReducedJiminyEnv(WalkerJiminyEnv): + def __init__(self, debug: bool = False, **kwargs: Any) -> None: + # Get the urdf and mesh paths + data_root_dir = resource_filename( + "gym_jiminy.envs", "data/bipedal_robots/atlas") + urdf_path = os.path.join(data_root_dir, "atlas_v4.urdf") + + # Load the full models + pinocchio_model, collision_model, visual_model = \ + jiminy.build_models_from_urdf(urdf_path, + has_freeflyer=True, + build_visual_model=True, + mesh_package_dirs=[data_root_dir]) + + # Generate the reference configuration + def joint_position_idx(joint_name): + joint_idx = pinocchio_model.getJointId(joint_name) + return pinocchio_model.joints[joint_idx].idx_q + + qpos = neutral(pinocchio_model) + qpos[joint_position_idx('back_bky')] = DEFAULT_SAGITTAL_HIP_ANGLE + qpos[joint_position_idx('l_arm_elx')] = DEFAULT_SAGITTAL_HIP_ANGLE + qpos[joint_position_idx('l_arm_shx')] = - np.pi / 2.0 + qpos[joint_position_idx('l_arm_shz')] = np.pi / 4.0 + qpos[joint_position_idx('l_arm_ely')] = np.pi / 4.0 + np.pi / 2.0 + qpos[joint_position_idx('r_arm_elx')] = - DEFAULT_SAGITTAL_HIP_ANGLE + qpos[joint_position_idx('r_arm_shx')] = np.pi / 2.0 + qpos[joint_position_idx('r_arm_shz')] = - np.pi / 4.0 + qpos[joint_position_idx('r_arm_ely')] = np.pi / 4.0 + np.pi / 2.0 + + # Build the reduced models + joint_locked_indices = [ + pinocchio_model.getJointId(joint_name) + for joint_name in pinocchio_model.names[2:] + if "_leg_" not in joint_name] + pinocchio_model, collision_model, visual_model = \ + jiminy.build_reduced_models(pinocchio_model, + collision_model, + visual_model, + joint_locked_indices, + qpos) + + # Build the robot and load the hardware + robot = jiminy.Robot() + robot.initialize(pinocchio_model, collision_model, visual_model) + hardware_path = str( + Path(urdf_path).with_suffix('')) + '_hardware.toml' + config_path = str( + Path(urdf_path).with_suffix('')) + '_options.toml' + load_hardware_description_file(robot, hardware_path, True, False) + + # Instantiate a simulator and load the options + simulator = Simulator(robot) + simulator.import_options(config_path) + + # Set base class attributes manually + self.simu_duration_max = SIMULATION_DURATION + self.reward_mixture = { + k: v for k, v in REWARD_MIXTURE.items() if v > 0.0} + self.urdf_path = urdf_path + self.mesh_path = data_root_dir + self.hardware_path = hardware_path + self.config_path = config_path + self.std_ratio = { + k: v for k, v in STD_RATIO.items() if v > 0.0} + self.avoid_instable_collisions = True + self._f_xy_profile = [ + jiminy.PeriodicGaussianProcess( + F_PROFILE_WAVELENGTH, F_PROFILE_PERIOD), + jiminy.PeriodicGaussianProcess( + F_PROFILE_PERIOD, F_PROFILE_PERIOD)] + self._power_consumption_max = 0.0 + self._height_neutral = 0.0 + + # Initialize base environment + BaseJiminyEnv.__init__(self, simulator, **{**dict( + step_dt=STEP_DT, debug=debug), **kwargs}) + + # Remove unrelevant contact points + self.robot.remove_contact_points([ + name for name in self.robot.contact_frames_names + if int(name.split("_")[-1]) in (1, 3, 5, 7)]) + + AtlasPDControlJiminyEnv = build_pipeline(**{ 'env_config': { 'env_class': AtlasJiminyEnv @@ -110,8 +213,26 @@ def joint_position_idx(joint_name): 'block_class': PDController, 'block_kwargs': { 'update_ratio': HLC_TO_LLC_RATIO, - 'pid_kp': PID_KP, - 'pid_kd': PID_KD + 'pid_kp': PID_FULL_KP, + 'pid_kd': PID_FULL_KD + }, + 'wrapper_kwargs': { + 'augment_observation': False + }} + ] +}) + + +AtlasReducedPDControlJiminyEnv = build_pipeline(**{ + 'env_config': { + 'env_class': AtlasReducedJiminyEnv + }, + 'blocks_config': [{ + 'block_class': PDController, + 'block_kwargs': { + 'update_ratio': HLC_TO_LLC_RATIO, + 'pid_kp': PID_REDUCED_KP, + 'pid_kd': PID_REDUCED_KD }, 'wrapper_kwargs': { 'augment_observation': False diff --git a/python/gym_jiminy/envs/gym_jiminy/envs/cartpole.py b/python/gym_jiminy/envs/gym_jiminy/envs/cartpole.py index 89095bcb0..2ed7a8e0c 100644 --- a/python/gym_jiminy/envs/gym_jiminy/envs/cartpole.py +++ b/python/gym_jiminy/envs/gym_jiminy/envs/cartpole.py @@ -10,7 +10,7 @@ import jiminy_py.core as jiminy from jiminy_py.simulator import Simulator -from gym_jiminy.common.utils import sample, SpaceDictNested +from gym_jiminy.common.utils import sample, DataNested from gym_jiminy.common.envs import BaseJiminyEnv @@ -199,7 +199,7 @@ def is_done(self) -> bool: return (abs(x) > X_THRESHOLD) or (abs(theta) > THETA_THRESHOLD) def compute_command(self, - measure: SpaceDictNested, + measure: DataNested, action: np.ndarray ) -> np.ndarray: """Compute the motors efforts to apply on the robot. diff --git a/python/gym_jiminy/envs/gym_jiminy/envs/cassie.py b/python/gym_jiminy/envs/gym_jiminy/envs/cassie.py index dbc2ddd79..cf2c937d2 100644 --- a/python/gym_jiminy/envs/gym_jiminy/envs/cassie.py +++ b/python/gym_jiminy/envs/gym_jiminy/envs/cassie.py @@ -21,18 +21,20 @@ # Default simulation duration (:float [s]) SIMULATION_DURATION = 20.0 + # Ratio between the High-level neural network PID target update and Low-level # PID torque update (:int [NA]) HLC_TO_LLC_RATIO = 1 + # Stepper update period (:float [s]) -STEP_DT = 0.01 +STEP_DT = 0.04 # PID proportional gains (one per actuated joint) -PID_KP = np.array([50.0, 50.0, 50.0, 100.0, 10.0, - 50.0, 50.0, 50.0, 100.0, 10.0]) +PID_KP = np.array([50.0, 50.0, 50.0, 80.0, 8.0, + 50.0, 50.0, 50.0, 80.0, 8.0]) # PID derivative gains (one per actuated joint) -PID_KD = np.array([0.01, 0.02, 0.02, 0.05, 0.08, - 0.01, 0.02, 0.02, 0.05, 0.08]) +PID_KD = np.array([0.01, 0.02, 0.02, 0.03, 0.02, + 0.01, 0.02, 0.02, 0.03, 0.02]) # Reward weight for each individual component that can be optimized REWARD_MIXTURE = { @@ -67,6 +69,11 @@ def __init__(self, debug: bool = False, **kwargs): avoid_instable_collisions=False, debug=debug), **kwargs}) + # Remove unrelevant contact points + self.robot.remove_contact_points([ + name for name in self.robot.contact_frames_names + if int(name.split("_")[-1]) in (0, 1, 4, 5)]) + # Add missing pushrod close kinematic chain constraint M_pushrod_tarsus_right = SE3( np.eye(3), np.array([-0.12, 0.03, -0.005])) @@ -77,7 +84,8 @@ def __init__(self, debug: bool = False, **kwargs): self.robot.add_frame( "right_pushrod_hip", "hip_flexion_right", M_pushrod_hip_right) pushrod_right = DistanceConstraint( - "right_pushrod_tarsus", "right_pushrod_hip", 0.5012) + "right_pushrod_tarsus", "right_pushrod_hip", 0.5) + pushrod_right.baumgarte_freq = 2.0 self.robot.add_constraint("pushrod_right", pushrod_right) M_pushrod_tarsus_left = SE3( np.eye(3), np.array([-0.12, 0.03, 0.005])) @@ -88,14 +96,17 @@ def __init__(self, debug: bool = False, **kwargs): self.robot.add_frame( "left_pushrod_hip", "hip_flexion_left", M_pushrod_hip_left) pushrod_left = DistanceConstraint( - "left_pushrod_tarsus", "left_pushrod_hip", 0.5012) + "left_pushrod_tarsus", "left_pushrod_hip", 0.5) + pushrod_left.baumgarte_freq = 2.0 self.robot.add_constraint("pushrod_left", pushrod_left) # Replace knee to shin spring by fixed joint constraint right_spring_knee_to_shin = JointConstraint("knee_to_shin_right") + right_spring_knee_to_shin.baumgarte_freq = 20.0 self.robot.add_constraint( "right_spring_knee_to_shin", right_spring_knee_to_shin) left_spring_knee_to_shin = JointConstraint("knee_to_shin_left") + left_spring_knee_to_shin.baumgarte_freq = 20.0 self.robot.add_constraint( "left_spring_knee_to_shin", left_spring_knee_to_shin) diff --git a/python/gym_jiminy/envs/setup.py b/python/gym_jiminy/envs/setup.py index 28de3fdce..21f7dd2bf 100644 --- a/python/gym_jiminy/envs/setup.py +++ b/python/gym_jiminy/envs/setup.py @@ -27,10 +27,9 @@ ], keywords="reinforcement-learning robotics gym jiminy", packages=find_namespace_packages(), - package_data={"gym_jiminy.envs": ["data/**/*"]}, include_package_data=True, install_requires=[ - f"gym_jiminy~={version}" + f"gym_jiminy=={version}" ], zip_safe=False ) diff --git a/python/gym_jiminy/rllib/gym_jiminy/rllib/ppo.py b/python/gym_jiminy/rllib/gym_jiminy/rllib/ppo.py index daa14dd61..f3f3c98a6 100644 --- a/python/gym_jiminy/rllib/gym_jiminy/rllib/ppo.py +++ b/python/gym_jiminy/rllib/gym_jiminy/rllib/ppo.py @@ -1,5 +1,8 @@ """ TODO: Write documentation. """ +import math +import operator +from functools import reduce from typing import Dict, List, Type, Union, Optional, Any, Tuple import gym @@ -22,7 +25,13 @@ DEFAULT_CONFIG = PPOTrainer.merge_trainer_configs( DEFAULT_CONFIG, { - "noise_scale": 1.0, + "enable_adversarial_noise": False, + "spatial_noise_scale": 1.0, + "sgld_beta_inv": 1.0e-8, + "sgld_n_steps": 10, + "temporal_barrier_scale": 1.0, + "temporal_barrier_threshold": math.inf, + "temporal_barrier_reg": 0.0, "symmetric_policy_reg": 0.0, "caps_temporal_reg": 0.0, "caps_spatial_reg": 0.0, @@ -32,6 +41,98 @@ _allow_unknown_configs=True) +def get_action_mean(model: ModelV2, + dist_class: Type[TorchDistributionWrapper], + action_logits: torch.Tensor) -> torch.Tensor: + """ TODO: Write documentation. + """ + if issubclass(dist_class, TorchDiagGaussian): + action_mean, _ = torch.chunk(action_logits, 2, dim=1) + else: + action_dist = dist_class(action_logits, model) + action_mean = action_dist.deterministic_sample() + return action_mean + + +def get_adversarial_observation_sgld( + model: ModelV2, + dist_class: Type[TorchDistributionWrapper], + train_batch: SampleBatch, + noise_scale: float, + beta_inv: float, + n_steps: int, + action_true_mean: Optional[torch.Tensor] = None + ) -> torch.Tensor: + """ TODO: Write documentation. + """ + # Compute mean field action for true observation if not provided + if action_true_mean is None: + with torch.no_grad(): + action_true_logits, _ = model(train_batch) + action_true_mean = get_action_mean( + model, dist_class, action_true_logits) + else: + action_true_mean = action_true_mean.detach() + + # Shallow copy the original training batch. + # Be careful accessing fields using the original batch to properly keep + # track of accessed keys, which will be used to automatically discard + # useless components of policy's view requirements. + train_batch_copy = train_batch.copy(shallow=True) + + # Extract original observation + observation_true = train_batch["obs"] + + # Define observation upper and lower bounds for clipping + obs_lb_flat = observation_true - noise_scale + obs_ub_flat = observation_true + noise_scale + + # Adjust the step size based on noise scale and number of steps + step_eps = noise_scale / n_steps + + # Use Stochastic gradient Langevin dynamics (SGLD) to compute adversary + # observation perturbation. It consists in find nearby observations that + # maximize the mean action difference. + observation_noisy = observation_true + step_eps * 2.0 * ( + torch.empty_like(observation_true).bernoulli_(p=0.5) - 0.5) + for i in range(n_steps): + # Make sure gradient computation is required + observation_noisy.requires_grad_(True) + + # Compute mean field action for noisy observation + train_batch_copy["obs"] = observation_noisy + action_noisy_logits, _ = model(train_batch_copy) + action_noisy_mean = get_action_mean( + model, dist_class, action_noisy_logits) + + # Compute action different and associated gradient + loss = torch.mean(torch.sum( + (action_noisy_mean - action_true_mean) ** 2, dim=-1)) + loss.backward() + + # compute the noisy gradient for observation update + noise_factor = math.sqrt(2.0 * step_eps * beta_inv) / (i + 2) + observation_update = observation_noisy.grad + \ + noise_factor * torch.randn_like(observation_true) + + # Need to clear gradients before the backward() for policy_loss + observation_noisy.detach_() + + # Project gradient to step boundary. + # Note that `sign` is used to be agnostic to the norm of the gradient, + # which would require to tune the learning rate or use an adaptive step + # method. Alternatively, the normalized gradient could be used, but it + # takes more iterations to converge in practice. + # TODO: The update step should be `step_eps` but it was found that + # using `noise_scale` converges faster. + observation_noisy += observation_update.sign() * noise_scale + + # clip into the upper and lower bounds + observation_noisy.clamp_(obs_lb_flat, obs_ub_flat) + + return observation_noisy + + def ppo_init(policy: Policy, obs_space: gym.spaces.Space, action_space: gym.spaces.Space, @@ -53,22 +154,12 @@ def ppo_init(policy: Policy, policy.view_requirements.update(caps_view_requirements) # Initialize extra loss - policy._mean_symmetric_policy_loss = 0.0 - policy._mean_temporal_caps_loss = 0.0 - policy._mean_spatial_caps_loss = 0.0 - policy._mean_global_caps_loss = 0.0 - policy._l2_reg_loss = 0.0 - - # Check if the policy has observation filter. If so, disable element-wise - # observation sensitivity. - obs_filter = policy.config["observation_filter"] - if obs_filter == "NoFilter": - policy._is_obs_normalized = False - elif obs_filter == "MeanStdFilter": - policy._is_obs_normalized = True - else: - raise NotImplementedError( - "Only 'NoFilter' and 'MeanStdFilter' are supported.") + policy._loss_temporal_barrier_reg = 0.0 + policy._loss_symmetric_policy_reg = 0.0 + policy._loss_caps_temporal_reg = 0.0 + policy._loss_caps_spatial_reg = 0.0 + policy._loss_caps_global_reg = 0.0 + policy._loss_l2_reg = 0.0 # Extract original observation space try: @@ -77,13 +168,6 @@ def ppo_init(policy: Policy, raise NotImplementedError( "Only 'Dict' original observation space is supported.") from e - # Convert to torch.Tensor observation sensitivity data if necessary - if not policy._is_obs_normalized: - for field, scale in observation_space.sensitivity.items(): - if not isinstance(scale, torch.Tensor): - scale = torch.from_numpy(scale).to(dtype=torch.float32) - observation_space.sensitivity[field] = scale - # Transpose and convert to torch.Tensor the observation mirroring data for field, mirror_mat in observation_space.mirror_mat.items(): if not isinstance(mirror_mat, torch.Tensor): @@ -111,13 +195,11 @@ def ppo_loss(policy: Policy, device = observation_true.device # Initialize the set of training batches to forward to the model - train_batches = {"original": train_batch} + train_batches = {"true": train_batch} - if policy.config["caps_temporal_reg"] > 0.0: - # Shallow copy the original training batch. - # Be careful accessing fields using the original batch to properly - # keep track of acessed keys, which will be used to discard useless - # components of policy's view requirements. + if policy.config["caps_temporal_reg"] > 0.0 or \ + policy.config["temporal_barrier_reg"] > 0.0: + # Shallow copy the original training batch train_batch_copy = train_batch.copy(shallow=True) # Replace current observation by the previous one @@ -127,28 +209,32 @@ def ppo_loss(policy: Policy, # Append the training batches to the set train_batches["prev"] = train_batch_copy - if policy._spatial_reg > 0.0 or policy.config["caps_global_reg"] > 0.0: + if policy.config["caps_spatial_reg"] > 0.0 and \ + policy.config["enable_adversarial_noise"]: # Shallow copy the original training batch train_batch_copy = train_batch.copy(shallow=True) - # Generate noisy observation based on specified sensivity - if policy._is_obs_normalized: - observation_noisy = torch.normal( - observation_true, policy.config["noise_scale"]) - else: - offset = 0 - observation_noisy = observation_true.clone() - batch_dim = observation_true.shape[:-1] - observation_space = policy.observation_space.original_space - for field, scale in observation_space.sensitivity.items(): - scale = scale.to(device) - observation_space.sensitivity[field] = scale - unit_noise = torch.randn( - (*batch_dim, len(scale)), device=device) - slice_idx = slice(offset, offset + len(scale)) - observation_noisy[..., slice_idx].addcmul_( - policy.config["noise_scale"] * scale, unit_noise) - offset += len(scale) + # Compute adversarial observation maximizing action difference + observation_worst = get_adversarial_observation_sgld( + model, dist_class, train_batch, + policy.config["spatial_noise_scale"], + policy.config["sgld_beta_inv"], + policy.config["sgld_n_steps"]) + + # Replace current observation by the adversarial one + train_batch_copy["obs"] = observation_worst + + # Append the training batches to the set + train_batches["worst"] = train_batch_copy + + if policy.config["caps_global_reg"] > 0.0 or \ + not policy.config["enable_adversarial_noise"]: + # Shallow copy the original training batch + train_batch_copy = train_batch.copy(shallow=True) + + # Generate noisy observation + observation_noisy = torch.normal( + observation_true, policy.config["spatial_noise_scale"]) # Replace current observation by the noisy one train_batch_copy["obs"] = observation_noisy @@ -165,13 +251,29 @@ def ppo_loss(policy: Policy, observation_mirror = torch.empty_like(observation_true) observation_space = policy.observation_space.original_space for field, mirror_mat in observation_space.mirror_mat.items(): + field_shape = observation_space[field].shape + field_size = reduce(operator.mul, field_shape) + slice_idx = slice(offset, offset + field_size) + mirror_mat = mirror_mat.to(device) observation_space.mirror_mat[field] = mirror_mat - slice_idx = slice(offset, offset + len(mirror_mat)) - torch.mm(observation_true[..., slice_idx], - mirror_mat, - out=observation_mirror[..., slice_idx]) - offset += len(mirror_mat) + + if len(field_shape) > 1: + observation_true_slice = observation_true[:, slice_idx] \ + .reshape((-1, field_shape[0], field_shape[1])) \ + .swapaxes(1, 0) \ + .reshape((field_shape[0], -1)) + observation_mirror_slice = mirror_mat @ observation_true_slice + observation_mirror[:, slice_idx] = observation_mirror_slice \ + .reshape((field_shape[0], -1, field_shape[1])) \ + .swapaxes(1, 0) \ + .reshape((-1, field_size)) + else: + torch.mm(observation_true[..., slice_idx], + mirror_mat, + out=observation_mirror[..., slice_idx]) + + offset += field_size # Replace current observation by the mirrored one train_batch_copy["obs"] = observation_mirror @@ -179,17 +281,17 @@ def ppo_loss(policy: Policy, # Append the training batches to the set train_batches["mirrored"] = train_batch_copy - # Compute the logits for all training batches at onces + # Compute the action_logits for all training batches at onces train_batch_all = {} - for k in ['obs']: + for k in ["obs"]: train_batch_all[k] = torch.cat([ s[k] for s in train_batches.values()], dim=0) - logits_all, _ = model(train_batch_all) + action_logits_all, _ = model(train_batch_all) values_all = model.value_function() - logits = dict(zip(train_batches.keys(), - torch.chunk(logits_all, len(train_batches), dim=0))) - values = dict(zip(train_batches.keys(), - torch.chunk(values_all, len(train_batches), dim=0))) + action_logits = dict(zip(train_batches.keys(), torch.chunk( + action_logits_all, len(train_batches), dim=0))) + values = dict(zip(train_batches.keys(), torch.chunk( + values_all, len(train_batches), dim=0))) # Compute original ppo loss. # pylint: disable=unused-argument,missing-function-docstring @@ -198,9 +300,9 @@ class FakeModel: """ def __init__(self, model: ModelV2, - logits: torch.Tensor, + action_logits: torch.Tensor, value: torch.Tensor) -> None: - self._logits = logits + self._action_logits = action_logits self._value = value self._model = model @@ -209,92 +311,108 @@ def __getattr__(self, name: str) -> Any: def __call__(self, *args: Any, **kwargs: Any ) -> Tuple[torch.Tensor, List[torch.Tensor]]: - return self._logits, [] + return self._action_logits, [] def value_function(self, *args: Any, **kwargs: Any) -> torch.Tensor: return self._value - fake_model = FakeModel(model, logits["original"], values["original"]) + fake_model = FakeModel(model, action_logits["true"], values["true"]) total_loss = ppo_surrogate_loss( policy, fake_model, dist_class, train_batch) - # Extract mean of predicted action from logits. + # Extract mean of predicted action from action_logits. # No need to compute the perform model forward pass since the original # PPO loss is already doing it, so just getting back the last ouput. - action_logits = logits["original"] - if issubclass(dist_class, TorchDiagGaussian): - action_mean_true, _ = torch.chunk(action_logits, 2, dim=1) - else: - action_dist = dist_class(action_logits, model) - action_mean_true = action_dist.deterministic_sample() + action_true_logits = action_logits["true"] + action_true_mean = get_action_mean(model, dist_class, action_true_logits) # Compute the mean action corresponding to the previous observation - if policy.config["caps_temporal_reg"] > 0.0: - action_logits_prev = logits["prev"] - if issubclass(dist_class, TorchDiagGaussian): - action_mean_prev, _ = torch.chunk(action_logits_prev, 2, dim=1) - else: - action_dist_prev = dist_class(action_logits_prev, model) - action_mean_prev = action_dist_prev.deterministic_sample() + if policy.config["caps_temporal_reg"] > 0.0 or \ + policy.config["temporal_barrier_reg"] > 0.0: + action_prev_logits = action_logits["prev"] + action_prev_mean = get_action_mean( + model, dist_class, action_prev_logits) # Compute the mean action corresponding to the noisy observation - if policy._spatial_reg > 0.0 or policy.config["caps_global_reg"] > 0.0: - action_logits_noisy = logits["noisy"] - if issubclass(dist_class, TorchDiagGaussian): - action_mean_noisy, _ = torch.chunk(action_logits_noisy, 2, dim=1) - else: - action_dist_noisy = dist_class(action_logits_noisy, model) - action_mean_noisy = action_dist_noisy.deterministic_sample() + if policy.config["caps_global_reg"] > 0.0 or \ + not policy.config["enable_adversarial_noise"]: + action_noisy_logits = action_logits["noisy"] + action_noisy_mean = get_action_mean( + model, dist_class, action_noisy_logits) + + # Compute the mean action corresponding to the worst observation + if policy.config["caps_spatial_reg"] > 0.0 and \ + policy.config["enable_adversarial_noise"]: + action_worst_logits = action_logits["worst"] + action_worst_mean = get_action_mean( + model, dist_class, action_worst_logits) # Compute the mirrored mean action corresponding to the mirrored action if policy.config["symmetric_policy_reg"] > 0.0: - action_logits_mirror = logits["mirrored"] - if issubclass(dist_class, TorchDiagGaussian): - action_mean_mirror, _ = torch.chunk(action_logits_mirror, 2, dim=1) - else: - action_dist_mirror = dist_class(action_logits_mirror, model) - action_mean_mirror = action_dist_mirror.deterministic_sample() + action_mirror_logits = action_logits["mirrored"] + action_mirror_mean = get_action_mean( + model, dist_class, action_mirror_logits) action_mirror_mat = policy.action_space.mirror_mat.to(device) policy.action_space.mirror_mat = action_mirror_mat - action_mean_mirror = action_mean_mirror @ action_mirror_mat + action_mirror_mean = action_mirror_mean @ action_mirror_mat + + if policy.config["caps_temporal_reg"] > 0.0 or \ + policy.config["temporal_barrier_reg"] > 0.0: + # Compute action temporal delta + action_delta = (action_prev_mean - action_true_mean).abs() - if policy.config["caps_temporal_reg"] > 0.0: # Minimize the difference between the successive action mean - policy._mean_temporal_caps_loss = torch.mean( - (action_mean_prev - action_mean_true).abs()) + if policy.config["caps_temporal_reg"] > 0.0: + policy._loss_caps_temporal_reg = torch.mean(action_delta) # Add temporal smoothness loss to total loss total_loss += policy.config["caps_temporal_reg"] * \ - policy._mean_temporal_caps_loss + policy._loss_caps_temporal_reg + + # Add temporal barrier loss to total loss: + # exp(scale * (err - thr)) - 1.0 if err > thr else 0.0 + if policy.config["temporal_barrier_reg"] > 0.0: + scale = policy.config["temporal_barrier_scale"] + threshold = policy.config["temporal_barrier_threshold"] + policy._loss_temporal_barrier_reg = torch.mean(torch.exp( + torch.clamp( + scale * (action_delta - threshold), min=0.0, max=5.0 + )) - 1.0) - if policy._spatial_reg > 0.0: + # Add spatial smoothness loss to total loss + total_loss += policy.config["temporal_barrier_reg"] * \ + policy._loss_temporal_barrier_reg + + if policy.config["caps_spatial_reg"] > 0.0: # Minimize the difference between the original action mean and the - # one corresponding to the noisy observation. - policy._mean_spatial_caps_loss = torch.mean( - torch.sum(( - action_mean_noisy - action_mean_true) ** 2, dim=-1) / - torch.sum(( - observation_noisy - observation_true) ** 2, dim=-1)) + # perturbed one. + if policy.config["enable_adversarial_noise"]: + policy._loss_caps_spatial_reg = torch.mean( + torch.sum((action_worst_mean - action_true_mean) ** 2, dim=1)) + else: + policy._loss_caps_spatial_reg = torch.mean( + torch.sum((action_noisy_mean - action_true_mean) ** 2, dim=1)) # Add spatial smoothness loss to total loss - total_loss += policy._spatial_reg * policy._mean_spatial_caps_loss + total_loss += policy.config["caps_spatial_reg"] * \ + policy._loss_caps_spatial_reg if policy.config["caps_global_reg"] > 0.0: # Minimize the magnitude of action mean - policy._mean_global_caps_loss = torch.mean(action_mean_noisy ** 2) + policy._loss_caps_global_reg = torch.mean(action_noisy_mean ** 2) # Add global smoothness loss to total loss total_loss += policy.config["caps_global_reg"] * \ - policy._mean_global_caps_loss + policy._loss_caps_global_reg if policy.config["symmetric_policy_reg"] > 0.0: # Minimize the assymetry of policy output - policy._mean_symmetric_policy_loss = torch.mean( - (action_mean_mirror - action_mean_true) ** 2) + policy._loss_symmetric_policy_reg = torch.mean( + (action_mirror_mean - action_true_mean) ** 2) # Add policy symmetry loss to total loss total_loss += policy.config["symmetric_policy_reg"] * \ - policy._mean_symmetric_policy_loss + policy._loss_symmetric_policy_reg if policy.config["l2_reg"] > 0.0: # Add actor l2-regularization loss @@ -302,10 +420,10 @@ def value_function(self, *args: Any, **kwargs: Any) -> torch.Tensor: for name, params in model.named_parameters(): if not name.endswith("bias"): l2_reg_loss += l2_loss(params) - policy._l2_reg_loss = l2_reg_loss + policy._loss_l2_reg = l2_reg_loss # Add l2-regularization loss to total loss - total_loss += policy.config["l2_reg"] * policy._l2_reg_loss + total_loss += policy.config["l2_reg"] * policy._loss_l2_reg return total_loss @@ -319,15 +437,17 @@ def ppo_stats(policy: Policy, # Add spatial CAPS loss to the report if policy.config["symmetric_policy_reg"] > 0.0: - stats_dict["symmetry"] = policy._mean_symmetric_policy_loss + stats_dict["symmetry"] = policy._loss_symmetric_policy_reg + if policy.config["temporal_barrier_reg"] > 0.0: + stats_dict["temporal_barrier"] = policy._loss_temporal_barrier_reg if policy.config["caps_temporal_reg"] > 0.0: - stats_dict["temporal_smoothness"] = policy._mean_temporal_caps_loss - if policy._spatial_reg > 0.0: - stats_dict["spatial_smoothness"] = policy._mean_spatial_caps_loss + stats_dict["temporal_smoothness"] = policy._loss_caps_temporal_reg + if policy.config["caps_spatial_reg"] > 0.0: + stats_dict["spatial_smoothness"] = policy._loss_caps_spatial_reg if policy.config["caps_global_reg"] > 0.0: - stats_dict["global_smoothness"] = policy._mean_global_caps_loss + stats_dict["global_smoothness"] = policy._loss_caps_global_reg if policy.config["l2_reg"] > 0.0: - stats_dict["l2_reg"] = policy._l2_reg_loss + stats_dict["l2_reg"] = policy._loss_l2_reg return stats_dict diff --git a/python/gym_jiminy/rllib/gym_jiminy/rllib/utilities.py b/python/gym_jiminy/rllib/gym_jiminy/rllib/utilities.py index fabf8bdd2..7592cc528 100644 --- a/python/gym_jiminy/rllib/gym_jiminy/rllib/utilities.py +++ b/python/gym_jiminy/rllib/gym_jiminy/rllib/utilities.py @@ -35,7 +35,7 @@ from ray.rllib.models.preprocessors import get_preprocessor from jiminy_py.viewer import play_logs_files -from gym_jiminy.common.utils import clip, SpaceDictNested +from gym_jiminy.common.utils import clip, DataNested try: import torch @@ -277,7 +277,7 @@ def build_policy_wrapper(policy: Policy, n_frames_stack: int = 1, clip_action: bool = False, explore: bool = False) -> Callable[ - [np.ndarray, Optional[float]], SpaceDictNested]: + [np.ndarray, Optional[float]], DataNested]: """Wrap a policy into a simple callable The internal state of the policy, if any, is managed internally. @@ -324,8 +324,8 @@ def build_policy_wrapper(policy: Policy, "prev_n_rew": np.zeros([1, n_frames_stack])} # Run the simulation - def forward(obs: SpaceDictNested, - reward: Optional[float]) -> SpaceDictNested: + def forward(obs: DataNested, + reward: Optional[float]) -> DataNested: nonlocal policy, obs_flat, input_dict, explore, clip_action # Compute flat observation diff --git a/python/gym_jiminy/rllib/setup.py b/python/gym_jiminy/rllib/setup.py index 3e773ab37..9d8f21d55 100644 --- a/python/gym_jiminy/rllib/setup.py +++ b/python/gym_jiminy/rllib/setup.py @@ -28,7 +28,7 @@ keywords="reinforcement-learning robotics gym jiminy", packages=find_namespace_packages(), install_requires=[ - f"gym_jiminy~={version}", + f"gym_jiminy=={version}", "ray[default,rllib]>=1.4.0", "plotext" ], diff --git a/python/gym_jiminy/toolbox/gym_jiminy/toolbox/wrappers/meta_envs.py b/python/gym_jiminy/toolbox/gym_jiminy/toolbox/wrappers/meta_envs.py index 9fb99443e..f857e614e 100644 --- a/python/gym_jiminy/toolbox/gym_jiminy/toolbox/wrappers/meta_envs.py +++ b/python/gym_jiminy/toolbox/gym_jiminy/toolbox/wrappers/meta_envs.py @@ -9,7 +9,7 @@ import gym from gym import spaces -from gym_jiminy.common.utils.helpers import SpaceDictNested +from gym_jiminy.common.utils.helpers import DataNested DataTreeT = Dict[Any, Tuple[Any, "DataTreeT"]] # type: ignore[misc] @@ -115,7 +115,7 @@ def sample_tasks(self, n_tasks: int) -> List[Tuple[Any, ...]]: tasks.append(tuple(task_path)) return tasks - def reset(self, **kwargs: Any) -> SpaceDictNested: + def reset(self, **kwargs: Any) -> DataNested: """ TODO: Write documentation. """ # Sample task diff --git a/python/gym_jiminy/toolbox/setup.py b/python/gym_jiminy/toolbox/setup.py index b6a0a94a6..0d5c12bba 100644 --- a/python/gym_jiminy/toolbox/setup.py +++ b/python/gym_jiminy/toolbox/setup.py @@ -28,7 +28,7 @@ keywords="reinforcement-learning robotics gym jiminy", packages=find_namespace_packages(), install_requires=[ - f"gym_jiminy~={version}" + f"gym_jiminy=={version}" ], zip_safe=False ) diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_meshcat_0.png b/python/gym_jiminy/unit_py/data/atlas_standing_meshcat_0.png index 710690539..fce01393e 100644 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_meshcat_0.png and b/python/gym_jiminy/unit_py/data/atlas_standing_meshcat_0.png differ diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_meshcat_1.png b/python/gym_jiminy/unit_py/data/atlas_standing_meshcat_1.png deleted file mode 100644 index 666a0ce35..000000000 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_meshcat_1.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_0.png b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_0.png index 33b26645d..acf04f8a0 100644 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_0.png and b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_0.png differ diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_1.png b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_1.png index 40b1ec97a..d562d32e3 100644 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_1.png and b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_1.png differ diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_2.png b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_2.png index e5fe66f54..0096ea280 100644 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_2.png and b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_2.png differ diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_3.png b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_3.png deleted file mode 100644 index 6e24492f1..000000000 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_3.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_4.png b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_4.png deleted file mode 100755 index 383616ced..000000000 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_4.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_5.png b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_5.png deleted file mode 100644 index 52247bb98..000000000 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_5.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_6.png b/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_6.png deleted file mode 100755 index 651cceb5a..000000000 Binary files a/python/gym_jiminy/unit_py/data/atlas_standing_panda3d_6.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_0.png b/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_0.png index 2475402bc..89f7351f2 100644 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_0.png and b/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_0.png differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_1.png b/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_1.png deleted file mode 100644 index b3e90d319..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_1.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_2.png b/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_2.png deleted file mode 100644 index 55969b6ae..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_2.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_3.png b/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_3.png deleted file mode 100644 index 3d21a3c6e..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_3.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_4.png b/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_4.png deleted file mode 100644 index eb7ed383f..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_4.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_5.png b/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_5.png deleted file mode 100644 index 5cff8b071..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_meshcat_5.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_0.png b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_0.png index a1c912413..d0eaf2370 100644 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_0.png and b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_0.png differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_1.png b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_1.png index 330a88d8c..20f54a676 100644 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_1.png and b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_1.png differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_2.png b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_2.png index 6a1b386b2..14eb8912a 100644 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_2.png and b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_2.png differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_3.png b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_3.png deleted file mode 100755 index 7678d07be..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_3.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_4.png b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_4.png deleted file mode 100644 index 6902b7371..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_4.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_5.png b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_5.png deleted file mode 100644 index daa1151bf..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_5.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_6.png b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_6.png deleted file mode 100644 index 9b0fea73d..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_6.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_7.png b/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_7.png deleted file mode 100644 index cfeef3d9a..000000000 Binary files a/python/gym_jiminy/unit_py/data/cassie_standing_panda3d_7.png and /dev/null differ diff --git a/python/gym_jiminy/unit_py/test_pipeline_control.py b/python/gym_jiminy/unit_py/test_pipeline_control.py index a7850016c..4811567d3 100644 --- a/python/gym_jiminy/unit_py/test_pipeline_control.py +++ b/python/gym_jiminy/unit_py/test_pipeline_control.py @@ -41,8 +41,8 @@ def _test_pid_standing(self): action_init['Q'], action_init['V'] = encoder_data[ :, self.env.controller.motor_to_encoder] - # Run the simulation during 5s - while self.env.stepper_state.t < 5.0: + # Run the simulation during 12s + while self.env.stepper_state.t < 12.0: self.env.step(action_init) # Get the final posture of the robot as an RGB array @@ -82,13 +82,15 @@ def _test_pid_standing(self): log_data['.'.join(( 'HighLevelController', self.env.controller_name, name))] for name in self.env.controller.get_fieldnames()['V']], axis=-1) - self.assertTrue(np.all(np.abs(velocity_target[time > 4.0]) < 1e-9)) + self.assertTrue(np.all( + np.abs(velocity_target[time > time[-1] - 1.0]) < 1.0e-9)) # Check that the whole-body robot velocity is close to zero at the end velocity_mes = np.stack([ log_data['.'.join(('HighLevelController', name))] for name in self.env.robot.logfile_velocity_headers], axis=-1) - self.assertTrue(np.all(np.abs(velocity_mes[time > 4.0]) < 1e-3)) + self.assertTrue(np.all( + np.abs(velocity_mes[time > time[-1] - 1.0]) < 1.0e-3)) def test_pid_standing(self): for backend in ['meshcat', 'panda3d']: diff --git a/python/jiminy_py/setup.py b/python/jiminy_py/setup.py index eb22af578..a7e73a81d 100644 --- a/python/jiminy_py/setup.py +++ b/python/jiminy_py/setup.py @@ -84,15 +84,23 @@ def finalize_options(self) -> None: # >= 8.0 is required to support Python3.9. "pillow", # Add support of TypedDict to any Python 3 version. - "typing_extensions", + # 3.10.0 adds 'ParamSpec' that is required for pylint>=2.11.1. + "typing_extensions>=3.10.0", # Display elegant and versatile process bar. "tqdm", # Standard library for matrix algebra. "numpy", + # Use to operate on nested data structure conveniently + # - 0.1.5 introduces `tree.traverse` method that it used to operate on + # `gym.spaces.Dict`. + # - 0.1.6 adds support of Python 3.9 and unifies API method naming. + "dm-tree>=0.1.6", # Used internally for interpolation and filtering. # No wheel is distributed for PyPy on pypi, and pip is only able to - # build from source after install `libatlas-base-dev` system depdency. - "scipy", + # build from source after install `libatlas-base-dev` system + # dependency. + # 1.2.0 fixes `fmin_slsqp` optimizer returning wrong `imode` ouput. + "scipy>=1.2.0", # Standard library to generate figures. f"matplotlib{matplotlib_spec}", # Used internally to read HDF5 format log files. @@ -115,8 +123,7 @@ def finalize_options(self) -> None: # tile floor. "meshcat>=0.0.19", # Standalone mesh visualizer used as Viewer's backend. - # Panda3d>1.10.9 adds support of Nvidia EGL rendering without X11 - # server. + # 1.10.9 adds support of Nvidia EGL rendering without X11 server. # Panda3d is NOT supported by PyPy and cannot be built from source. "panda3d_viewer", # Used internally by Viewer to record video programmatically when diff --git a/python/jiminy_py/src/jiminy_py/dynamics.py b/python/jiminy_py/src/jiminy_py/dynamics.py index 967b67dbc..b986dcff0 100644 --- a/python/jiminy_py/src/jiminy_py/dynamics.py +++ b/python/jiminy_py/src/jiminy_py/dynamics.py @@ -170,12 +170,12 @@ def update_quantities(robot: jiminy.Model, if update_com: if velocity is None: - pin.centerOfMass(pnc_model, pnc_data, position) + kinematic_level = pin.KinematicLevel.POSITION elif acceleration is None: - pin.centerOfMass(pnc_model, pnc_data, position, velocity) + kinematic_level = pin.KinematicLevel.VELOCITY else: - pin.centerOfMass( - pnc_model, pnc_data, position, velocity, acceleration) + kinematic_level = pin.KinematicLevel.ACCELERATION + pin.centerOfMass(pnc_model, pnc_data, kinematic_level, False) if update_jacobian: if update_com: @@ -364,36 +364,43 @@ def compute_transform_contact( contact_frames_pos_rel.append(transform_rel.translation) # Order the contact points by depth - contact_frames_pos_rel = [contact_frames_pos_rel[i] for i in np.argsort( - [pos[2] for pos in contact_frames_pos_rel])] + contact_frames_order = np.argsort([ + pos[2] for pos in contact_frames_pos_rel]) + contact_frames_pos_rel = [ + contact_frames_pos_rel[i] for i in contact_frames_order] # Compute the contact plane normal if len(contact_frames_pos_rel) > 2: + # Try to compute a valid normal using three deepest points contact_edge_ref = \ contact_frames_pos_rel[0] - contact_frames_pos_rel[1] contact_edge_ref /= np.linalg.norm(contact_edge_ref) - for i in range(2, len(contact_frames_pos_rel)): - contact_edge_alt = \ - contact_frames_pos_rel[0] - contact_frames_pos_rel[i] - contact_edge_alt /= np.linalg.norm(contact_edge_alt) - normal_offset = np.cross(contact_edge_ref, contact_edge_alt) - if np.linalg.norm(normal_offset) > 0.2: # At least 11 degrees - break + contact_edge_alt = \ + contact_frames_pos_rel[0] - contact_frames_pos_rel[2] + contact_edge_alt /= np.linalg.norm(contact_edge_alt) + normal_offset = np.cross(contact_edge_ref, contact_edge_alt) + + # Make sure that the normal is valid, otherwise use the default one + if np.linalg.norm(normal_offset) < 0.6: + normal_offset = np.array([0.0, 0.0, 1.0]) + + # Make sure the normal is pointing upward if normal_offset[2] < 0.0: normal_offset *= -1.0 else: - normal_offset = np.array([0.0, 0.0, 1.0]) - - # Make sure that the normal is valid, otherwise use the default one - if np.linalg.norm(normal_offset) < 0.2: + # Fallback to world aligned if no reference can be computed normal_offset = np.array([0.0, 0.0, 1.0]) # Compute the translation and rotation to apply the touch the ground rot_offset = pin.Quaternion.FromTwoVectors( normal_offset, np.array([0.0, 0.0, 1.0])).matrix() if contact_frames_pos_rel: + contact_frame_pos = contact_frames_transform[ + contact_frames_order[0]].translation + pos_shift = ( + rot_offset @ contact_frame_pos)[2] - contact_frame_pos[2] pos_offset = np.array([ - 0.0, 0.0, -(rot_offset.T @ contact_frames_pos_rel[0])[2]]) + 0.0, 0.0, - pos_shift - contact_frames_pos_rel[0][2]]) else: pos_offset = np.zeros(3) transform_offset = pin.SE3(rot_offset, pos_offset) diff --git a/python/jiminy_py/src/jiminy_py/log.py b/python/jiminy_py/src/jiminy_py/log.py index 93976cc23..efaf927be 100644 --- a/python/jiminy_py/src/jiminy_py/log.py +++ b/python/jiminy_py/src/jiminy_py/log.py @@ -11,6 +11,7 @@ from typing import Callable, Tuple, Dict, Optional, Any, Sequence, Union import h5py +import tree import numpy as np import matplotlib.pyplot as plt from matplotlib.lines import Line2D @@ -20,6 +21,10 @@ from .state import State +FieldNested = Union[ # type: ignore + Dict[str, 'FieldNested'], Sequence['FieldNested'], str] # type: ignore + + class TrajectoryDataType(TypedDict, total=False): # List of State objects of increasing time. evolution_robot: Sequence[State] @@ -128,7 +133,7 @@ def parse_constant(key: str, value: str) -> Any: const_dict[key] = parse_constant(key, value) # Extract time - time = f['Global.Time'][()] / f['Global.Time'].attrs["unit"] + time = f['Global.Time'][()] / f['Global.Time'].attrs['unit'] # Load variables (1D time-series) data_dict = {'Global.Time': time} @@ -138,6 +143,50 @@ def parse_constant(key: str, value: str) -> Any: return data_dict, const_dict +def extract_data_from_log(log_data: Dict[str, np.ndarray], + fieldnames: FieldNested, + namespace: Optional[str] = 'HighLevelController', + *, + as_dict: bool = False) -> Optional[Union[ + Tuple[Optional[np.ndarray], ...], + Dict[str, Optional[np.ndarray]]]]: + """Extract values associated with a set of fieldnames in a specific + namespace. + + :param log_data: Data from the log file, in a dictionnary. + :param fieldnames: Structured fieldnames. + :param namespace: Namespace of the fieldnames. None to disable. + Optional: 'HighLevelController' by default. + :param keep_structure: Whether or not to return a dictionary mapping + flattened fieldnames to values. + Optional: True by default. + + :returns: + `np.ndarray` or None for each fieldname individually depending if it is + found or not. It + """ + # Key are the concatenation of the path and value + keys = [ + ".".join(map(str, filter( + lambda key: isinstance(key, str), (*fieldname_path, fieldname)))) + for fieldname_path, fieldname in tree.flatten_with_path(fieldnames)] + + # Extract value from log if it exists + values = [ + log_data.get(".".join(filter(None, (namespace, key))), None) + for key in keys] + + # Return None if no value was found + if not values or all(elem is None for elem in values): + return None + + if as_dict: + # Return flat mapping from fieldnameswithout prefix to scalar values + values = OrderedDict(zip(keys, values)) + + return values + + def extract_trajectory_data_from_log(log_data: Dict[str, np.ndarray], robot: jiminy.Model ) -> TrajectoryDataType: @@ -211,7 +260,7 @@ def extract_trajectory_data_from_log(log_data: Dict[str, np.ndarray], return traj_data -def build_robot_from_log_constants( +def build_robot_from_log( log_constants: Dict[str, str], mesh_package_dirs: Union[str, Sequence[str]] = ()) -> jiminy.Robot: """Build robot from log constants. diff --git a/python/jiminy_py/src/jiminy_py/plot.py b/python/jiminy_py/src/jiminy_py/plot.py index fbac743b6..3e4eacd34 100644 --- a/python/jiminy_py/src/jiminy_py/plot.py +++ b/python/jiminy_py/src/jiminy_py/plot.py @@ -153,7 +153,8 @@ def adjust_layout(self, else: num_rows, num_cols = map(int, (num_rows_2, num_cols_2)) for i, ax in enumerate(axes, 1): - ax.change_geometry(num_rows, num_cols, i) + ax.set_subplotspec(plt.GridSpec( + num_rows, num_cols, figure=self.figure)[i - 1]) # Adjust layout: namely margins between subplots right_margin = 0.03 diff --git a/python/jiminy_py/src/jiminy_py/robot.py b/python/jiminy_py/src/jiminy_py/robot.py index 4dd872a05..bfe887be5 100644 --- a/python/jiminy_py/src/jiminy_py/robot.py +++ b/python/jiminy_py/src/jiminy_py/robot.py @@ -6,8 +6,8 @@ import tempfile import numpy as np import xml.etree.ElementTree as ET -from collections import OrderedDict -from typing import Optional +from collections import OrderedDict, defaultdict +from typing import Optional, Dict, Any import trimesh @@ -18,6 +18,7 @@ ForceSensor as force, ImuSensor as imu) +import hppfcl import pinocchio as pin from pinocchio.rpy import rpyToMatrix @@ -52,23 +53,58 @@ def _gcd(a: float, return a -def _string_to_array(txt: str) -> np.ndarray: - """Convert a string array of float delimited by spaces into a numpy array. - """ - return np.array(list(map(float, txt.split()))) +def _fix_urdf_mesh_path(urdf_path: str, + mesh_path: str, + output_root_path: Optional[str] = None): + """Generate an URDF with updated mesh paths. + :param urdf_path: Full path of the URDF file. + :param mesh_path: Root path of the meshes. + :param output_root_path: Root directory of the fixed URDF file. + Optional: temporary directory by default. -def _origin_info_to_se3(origin_info: Optional[ET.Element]) -> pin.SE3: - """Convert an XML element with str attribute 'xyz' [X, Y, Z] encoding the - translation and 'rpy' encoding [Roll, Pitch, Yaw] into a Pinocchio.SE3 - object. + :returns: Full path of the fixed URDF file. """ - if origin_info is not None: - origin_xyz = _string_to_array(origin_info.attrib['xyz']) - origin_rpy = _string_to_array(origin_info.attrib['rpy']) - return pin.SE3(rpyToMatrix(origin_rpy), origin_xyz) + # Extract all the mesh path that are not package path, continue if any + with open(urdf_path, 'r') as urdf_file: + urdf_contents = urdf_file.read() + mesh_tag = " 1: + if all(path.startswith('.') for path in pathlists): + mesh_path_orig = '.' + else: + mesh_path_orig = os.path.commonpath(pathlists) else: - return pin.SE3.Identity() + mesh_path_orig = os.path.dirname(next(iter(pathlists))) + if mesh_path == mesh_path_orig: + return urdf_path + + # Create the output directory + if output_root_path is None: + output_root_path = tempfile.mkdtemp() + fixed_urdf_dir = os.path.join( + output_root_path, "fixed_urdf" + mesh_path.translate( + str.maketrans({k: '_' for k in '/:'}))) + os.makedirs(fixed_urdf_dir, exist_ok=True) + fixed_urdf_path = os.path.join( + fixed_urdf_dir, os.path.basename(urdf_path)) + + # Override the root mesh path with the desired one + urdf_contents = urdf_contents.replace( + '"'.join((mesh_tag, mesh_path_orig)), + '"'.join((mesh_tag, mesh_path))) + with open(fixed_urdf_path, 'w') as f: + f.write(urdf_contents) + + return fixed_urdf_path def generate_hardware_description_file( @@ -148,25 +184,32 @@ def generate_hardware_description_file( Sensor=OrderedDict() ) + # Extract the root link. It is the one having no parent at all. + links = set() + for link_descr in root.findall('./link'): + links.add(link_descr.attrib["name"]) + for joint_descr in root.findall('./joint'): + links.remove(joint_descr.find('./child').get('link')) + link_root = next(iter(links)) + # Extract the list of parent and child links, excluding the one related # to fixed link not having collision geometry, because they are likely not # "real" joint. - parent_links = set() - child_links = set() + links_parent = set() + links_child = set() for joint_descr in root.findall('./joint'): parent_link = joint_descr.find('./parent').get('link') child_link = joint_descr.find('./child').get('link') if joint_descr.get('type').casefold() != 'fixed' or root.find( f"./link[@name='{child_link}']/collision") is not None: - parent_links.add(parent_link) - child_links.add(child_link) + links_parent.add(parent_link) + links_child.add(child_link) - # Compute the root link and the leaf ones - if parent_links: - root_link = next(iter(parent_links.difference(child_links))) + # Determine leaf links. If there is no parent, then use root link instead. + if links_parent: + links_leaf = sorted(list(links_child.difference(links_parent))) else: - root_link = None - leaf_links = sorted(list(child_links.difference(parent_links))) + links_leaf = [link_root] # Parse the gazebo plugins, if any. # Note that it is only useful to extract "advanced" hardware, not basic @@ -266,26 +309,26 @@ def generate_hardware_description_file( logger.warning(f"Unsupported Gazebo plugin '{plugin}'") # Add IMU sensor to the root link if no Gazebo IMU sensor has been found - if root_link and imu.type not in hardware_info['Sensor'].keys(): + if link_root and imu.type not in hardware_info['Sensor'].keys(): hardware_info['Sensor'].setdefault(imu.type, {}).update({ - root_link: OrderedDict( - body_name=root_link, + link_root: OrderedDict( + body_name=link_root, frame_pose=6*[0.0]) }) # Add force sensors and collision bodies if no Gazebo plugin is available if not gazebo_plugins_found: - for leaf_link in leaf_links: + for link_leaf in links_leaf: # Add a force sensor hardware_info['Sensor'].setdefault(force.type, {}).update({ - leaf_link: OrderedDict( - body_name=leaf_link, + link_leaf: OrderedDict( + body_name=link_leaf, frame_pose=6*[0.0]) }) # Add the related body to the collision set if possible - if root.find(f"./link[@name='{leaf_link}']/collision") is not None: - collision_bodies_names.add(leaf_link) + if root.find(f"./link[@name='{link_leaf}']/collision") is not None: + collision_bodies_names.add(link_leaf) # Specify collision bodies and ground model in global config options hardware_info['Global']['collisionBodiesNames'] = \ @@ -319,8 +362,8 @@ def generate_hardware_description_file( # URDF standard, so it should be available on any URDF file. transmission_found = root.find('transmission') is not None for transmission_descr in root.iterfind('transmission'): - motor_info = OrderedDict() - sensor_info = OrderedDict() + # Initialize motor and sensor info + motor_info, sensor_info = OrderedDict(), OrderedDict() # Check that the transmission type is supported transmission_name = transmission_descr.get('name') @@ -429,58 +472,302 @@ def generate_hardware_description_file( toml.dump(hardware_info, f) -def _fix_urdf_mesh_path(urdf_path: str, - mesh_path: str, - output_root_path: Optional[str] = None): - """Generate an URDF with updated mesh paths. +def load_hardware_description_file( + robot: jiminy.Robot, + hardware_path: str, + avoid_instable_collisions: bool = True, + verbose: bool = True) -> Dict[str, Any]: + """Load hardware configuration file. - :param urdf_path: Full path of the URDF file. - :param mesh_path: Root path of the meshes. - :param output_root_path: Root directory of the fixed URDF file. - Optional: temporary directory by default. + If no collision geometry is associated with the body requiring collision + handling, then the visual geometry is used instead, if any. If none is + available despite at, then a single contact point is added at body frame. - :returns: Full path of the fixed URDF file. + For now, every mesh used for collision are replaced by the vertices of the + associated minimum volume bounding box, to avoid numerical instabilities. + + :param robot: Jiminy robot. + :param hardware_path: Path of Jiminy hardware description toml file. + :param avoid_instable_collisions: Prevent numerical instabilities by + replacing collision mesh by vertices of + associated minimal volume bounding box, + and primitive box by its vertices. + :param verbose: Whether or not to print warnings. + + :returns: Unused information available in hardware configuration file. """ - # Extract all the mesh path that are not package path, continue if any - with open(urdf_path, 'r') as urdf_file: - urdf_contents = urdf_file.read() - mesh_tag = " 1: - if all(path.startswith('.') for path in pathlists): - mesh_path_orig = '.' + hardware_info = toml.load(hardware_path) + extra_info = hardware_info.pop('Global', {}) + motors_info = hardware_info.pop('Motor', {}) + sensors_info = hardware_info.pop('Sensor', {}) + + # Extract the list of bodies having visual and collision meshes or + # primitives. + geometry_types = { + geom_type: {'primitive': set(), 'mesh': set()} + for geom_type in ('collision', 'visual')} + geometry_specs = { + geom_type: { + 'primitive': defaultdict(lambda: []), + 'mesh': defaultdict(lambda: [])} + for geom_type in ('collision', 'visual')} + for geom_model, geometry_types_i, geometry_specs_i in zip( + (robot.collision_model, robot.visual_model), + geometry_types.values(), + geometry_specs.values()): + for geometry_object in geom_model.geometryObjects: + frame_idx = geometry_object.parentFrame + frame_name = robot.pinocchio_model.frames[frame_idx].name + mesh_path = geometry_object.meshPath + is_mesh = any(char in mesh_path for char in ('\\', '/', '.')) + geom_type = 'mesh' if is_mesh else 'primitive' + geometry_types_i[geom_type].add(frame_name) + geometry_specs_i[geom_type][frame_name].append(geometry_object) + + # Checking the collision bodies, to make sure they are associated with + # supported collision geometries. If not, fixing the issue after + # throwing a warning. + collision_bodies_names = extra_info.pop( + 'collisionBodiesNames', []) + contact_frames_names = extra_info.pop('contactFramesNames', []) + for body_name in collision_bodies_names.copy(): + # Filter out the different cases. + # After this filter, we know that their is no collision geometry + # associated with the body but their is a visual mesh, or there is + # only collision meshes. + if body_name in geometry_types['collision']['mesh'] and \ + body_name in geometry_types['collision']['primitive']: + if not avoid_instable_collisions: + continue + logger.warning( + "Collision body having both primitive and mesh geometries " + "is not supported. Enabling only primitive collision for " + "this body.") + continue + elif body_name in geometry_types['collision']['primitive']: + pass + elif body_name in geometry_types['collision']['mesh']: + if not avoid_instable_collisions: + continue + logger.warning( + "Collision body associated with mesh geometry is not " + "supported for now. Replacing it by contact points at the " + "vertices of the minimal volume bounding box.") + elif body_name not in geometry_types['visual']['mesh']: + logger.warning( + "No visual mesh nor collision geometry associated with " + "the collision body. Fallback to adding a single contact " + "point at body frame.") + contact_frames_names.append(body_name) + continue else: - mesh_path_orig = os.path.commonpath(pathlists) - else: - mesh_path_orig = os.path.dirname(next(iter(pathlists))) - if mesh_path == mesh_path_orig: - return urdf_path + logger.warning( + "No collision geometry associated with the collision " + "body. Fallback to replacing it by contact points at " + "the vertices of the minimal volume bounding box of the " + "available visual meshes.") + + # Check if collision primitive box are available + collision_boxes_size, collision_boxes_origin = [], [] + for geometry_object in \ + geometry_specs['collision']['primitive'][body_name]: + geom = geometry_object.geometry + if isinstance(geom, hppfcl.Box): + collision_boxes_size.append(2.0 * geom.halfSide) + collision_boxes_origin.append(geometry_object.placement) + + # Replace the collision boxes by contact points, if any + if collision_boxes_size: + if not avoid_instable_collisions: + continue + logger.warning( + "Collision body associated with box geometry is not " + "numerically stable for now. Replacing it by contact " + "points at the vertices.") + + for i, (box_size, box_origin) in enumerate(zip( + collision_boxes_size, collision_boxes_origin)): + vertices = [e.flatten() for e in np.meshgrid(*[ + 0.5 * v * np.array([-1.0, 1.0]) for v in box_size])] + for j, (x, y, z) in enumerate(zip(*vertices)): + frame_name = "_".join(( + body_name, "CollisionBox", str(i), str(j))) + vertex_pos_rel = pin.SE3( + np.eye(3), np.array([x, y, z])) + frame_transform = box_origin.act(vertex_pos_rel) + robot.add_frame(frame_name, body_name, frame_transform) + contact_frames_names.append(frame_name) + elif body_name in geometry_types['collision']['primitive']: + # Do nothing if the primitive is not a box. It should be fine. + continue - # Create the output directory - if output_root_path is None: - output_root_path = tempfile.mkdtemp() - fixed_urdf_dir = os.path.join( - output_root_path, "fixed_urdf" + mesh_path.translate( - str.maketrans({k: '_' for k in '/:'}))) - os.makedirs(fixed_urdf_dir, exist_ok=True) - fixed_urdf_path = os.path.join( - fixed_urdf_dir, os.path.basename(urdf_path)) + # Remove the body from the collision detection set + collision_bodies_names.remove(body_name) - # Override the root mesh path with the desired one - urdf_contents = urdf_contents.replace( - '"'.join((mesh_tag, mesh_path_orig)), - '"'.join((mesh_tag, mesh_path))) - with open(fixed_urdf_path, 'w') as f: - f.write(urdf_contents) + # Early return if collision box primitives have been replaced + if collision_boxes_size: + continue - return fixed_urdf_path + # Replace the collision body by contact points + for geometry_object in ( + geometry_specs['collision']['mesh'][body_name] + + geometry_specs['visual']['mesh'][body_name]): + # Extract info from geometry object + mesh_path = geometry_object.meshPath + mesh_scale = geometry_object.meshScale + mesh_origin = geometry_object.placement + + # Replace relative mesh path by absolute one + if mesh_path.startswith("package://"): + mesh_path_orig = mesh_path + for root_dir in robot.mesh_package_dirs: + mesh_path = mesh_path_orig.replace( + "package:/", root_dir) + if os.path.exists(mesh_path): + break + + # Compute the minimal volume bounding box, then add new frames to + # the robot model at its vertices and register contact points at + # their location. + try: + mesh = trimesh.load(mesh_path) + except ValueError: # Mesh file is not available + continue + box = mesh.bounding_box_oriented + for i in range(8): + frame_name = "_".join((body_name, "BoundingBox", str(i))) + frame_transform_rel = pin.SE3( + np.eye(3), mesh_scale * np.asarray(box.vertices[i])) + frame_transform = mesh_origin.act(frame_transform_rel) + robot.add_frame(frame_name, body_name, frame_transform) + contact_frames_names.append(frame_name) + + # Add the collision bodies and contact points. + # Note that it must be done before adding the sensors because + # Contact sensors requires contact points to be defined. + # Mesh collisions is not numerically stable for now, so disabling it. + # Note: Be careful, the order of the contact points is important, it + # changes the computation of the external forces, which is an iterative + # algorithm for impulse model, resulting in different simulation + # results. The order of the element of the set depends of the `hash` + # method of python, whose seed is randomly generated when starting the + # interpreter for security reason. As a result, the set must be sorted + # manually to ensure consistent results. + robot.add_collision_bodies( + collision_bodies_names, ignore_meshes=avoid_instable_collisions) + robot.add_contact_points(sorted(list(set(contact_frames_names)))) + + # Add the motors to the robot + for motor_type, motors_descr in motors_info.items(): + for motor_name, motor_descr in motors_descr.items(): + # Make sure the motor can be instantiated + joint_name = motor_descr.pop('joint_name') + if not robot.pinocchio_model.existJointName(joint_name): + logger.warning( + f"'{joint_name}' is not a valid joint name.") + continue + + # Create the motor and attach it + motor = getattr(jiminy, motor_type)(motor_name) + robot.attach_motor(motor) + + # Initialize the motor + motor.initialize(joint_name) + + # Set the motor options + options = motor.get_options() + option_fields = options.keys() + for name, value in motor_descr.items(): + if name not in option_fields: + logger.warning( + f"'{name}' is not a valid option for the motor " + f"{motor_name} of type {motor_type}.") + options[name] = value + options['enableArmature'] = True + motor.set_options(options) + + # Add the sensors to the robot + for sensor_type, sensors_descr in sensors_info.items(): + for sensor_name, sensor_descr in sensors_descr.items(): + # Make sure the sensor can be instantiated + if sensor_type == encoder.type: + joint_name = sensor_descr.pop('joint_name') + if not robot.pinocchio_model.existJointName(joint_name): + logger.warning( + f"'{joint_name}' is not a valid joint name.") + continue + elif sensor_type == effort.type: + motor_name = sensor_descr.pop('motor_name') + if motor_name not in robot.motors_names: + logger.warning( + f"'{motor_name}' is not a valid motor name.") + continue + + # Create the sensor and attach it + sensor = getattr(jiminy, sensor_type)(sensor_name) + robot.attach_sensor(sensor) + + # Initialize the sensor + if sensor_type == encoder.type: + sensor.initialize(joint_name) + elif sensor_type == effort.type: + sensor.initialize(motor_name) + elif sensor_type == contact.type: + frame_name = sensor_descr.pop('frame_name') + sensor.initialize(frame_name) + elif sensor_type in [force.type, imu.type]: + # Create the frame and add it to the robot model + frame_name = sensor_descr.pop('frame_name', None) + + # Create a frame if a frame name has been specified. + # In such a case, the body name must be specified. + if frame_name is None: + # Get the body name + body_name = sensor_descr.pop('body_name') + + # Generate a frame name both intelligible and available + i = 0 + frame_name = "_".join(( + sensor_name, sensor_type, "Frame")) + while robot.pinocchio_model.existFrame(frame_name): + frame_name = "_".join(( + sensor_name, sensor_type, "Frame", str(i))) + i += 1 + + # Compute SE3 object representing the frame placement + frame_pose_xyzrpy = np.array( + sensor_descr.pop('frame_pose')) + frame_trans = frame_pose_xyzrpy[:3] + frame_rot = rpyToMatrix(frame_pose_xyzrpy[3:]) + frame_placement = pin.SE3(frame_rot, frame_trans) + + # Add the frame to the robot model + robot.add_frame(frame_name, body_name, frame_placement) + + # Initialize the sensor + sensor.initialize(frame_name) + else: + raise ValueError( + f"Unsupported sensor type {sensor_type}.") + + # Set the sensor options + options = sensor.get_options() + option_fields = options.keys() + for name, value in sensor_descr.items(): + if name not in option_fields: + logger.warning( + f"'{name}' is not a valid option for the sensor " + f"{sensor_name} of type {sensor_type}.") + options[name] = value + sensor.set_options(options) + + return extra_info class BaseJiminyRobot(jiminy.Robot): @@ -494,13 +781,6 @@ class BaseJiminyRobot(jiminy.Robot): required to fully characterize the motors, sensors, contact points and collision bodies, along with some of there properties. - If no collision geometry is associated with the body requiring collision - handling, then the visual geometry is used instead, if any. If none is - available despite at, then a single contact point is added at body frame. - - For now, every mesh used for collision are replaced by the vertices of the - associated minimum volume bounding box, to avoid numerical instabilities. - .. note:: Overload this class if you need finer-grained capability. @@ -512,7 +792,7 @@ class BaseJiminyRobot(jiminy.Robot): def __init__(self) -> None: super().__init__() self.extra_info = {} - self.urdf_path_orig = None + self._urdf_path_orig = None def initialize(self, urdf_path: str, @@ -538,18 +818,12 @@ def initialize(self, :param avoid_instable_collisions: Prevent numerical instabilities by replacing collision mesh by vertices of associated minimal volume bounding - box, and replacing primitive box by - its vertices. + box, primitive box by its vertices, + and primitive sphere by its center. :param verbose: Whether or not to print warnings. """ - # Handle verbosity level - if verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.ERROR) - # Backup the original URDF path - self.urdf_path_orig = urdf_path + self._urdf_path_orig = urdf_path # Fix the URDF mesh paths if mesh_path is not None: @@ -574,7 +848,7 @@ def initialize(self, # Load the hardware description file if available if hardware_path is None: hardware_path = str(pathlib.Path( - self.urdf_path_orig).with_suffix('')) + '_hardware.toml' + self._urdf_path_orig).with_suffix('')) + '_hardware.toml' self.hardware_path = hardware_path if not os.path.exists(hardware_path): @@ -584,271 +858,13 @@ def initialize(self, "hardware to the robot.\n Default file can be generated " "using 'generate_hardware_description_file' method.") return - hardware_info = toml.load(hardware_path) - self.extra_info = hardware_info.pop('Global', {}) - motors_info = hardware_info.pop('Motor', {}) - sensors_info = hardware_info.pop('Sensor', {}) - - # Parse the URDF - tree = ET.parse(urdf_path) - root = tree.getroot() - - # Extract the list of bodies having visual and collision meshes or - # primitives. - geometry_info = {'visual': {'primitive': None, 'mesh': None}, - 'collision': {'primitive': None, 'mesh': None}} - for geometry_type in geometry_info.keys(): - geometry_links = set(link.get('name') for link in root.findall( - f"./link/{geometry_type}/geometry/../..")) - mesh_links = [name for name in geometry_links if 'mesh' in set( - link[0].tag for link in root.find( - f"./link[@name='{name}']").findall( - f'{geometry_type}/geometry'))] - primitive_links = [name for name in geometry_links if set( - link[0].tag for link in root.find( - f"./link[@name='{name}']").findall( - f'{geometry_type}/geometry')).difference({'mesh'})] - geometry_info[geometry_type]['mesh'] = mesh_links - geometry_info[geometry_type]['primitive'] = primitive_links - - # Checking the collision bodies, to make sure they are associated with - # supported collision geometries. If not, fixing the issue after - # throwing a warning. - collision_bodies_names = self.extra_info.pop( - 'collisionBodiesNames', []) - contact_frames_names = self.extra_info.pop('contactFramesNames', []) - for body_name in collision_bodies_names.copy(): - # Filter out the different cases. - # After this filter, we know that their is no collision geometry - # associated with the body but their is a visual mesh, or there is - # only collision meshes. - if body_name in geometry_info['collision']['mesh'] and \ - body_name in geometry_info['collision']['primitive']: - if not avoid_instable_collisions: - continue - logger.warning( - "Collision body having both primitive and mesh geometries " - "is not supported. Enabling only primitive collision for " - "this body.") - continue - elif body_name in geometry_info['collision']['primitive']: - pass - elif body_name in geometry_info['collision']['mesh']: - if not avoid_instable_collisions: - continue - logger.warning( - "Collision body associated with mesh geometry is not " - "supported for now. Replacing it by contact points at the " - "vertices of the minimal volume bounding box.") - elif body_name not in geometry_info['visual']['mesh']: - logger.warning( - "No visual mesh nor collision geometry associated with " - "the collision body. Fallback to adding a single contact " - "point at body frame.") - contact_frames_names.append(body_name) - continue - else: - logger.warning( - "No collision geometry associated with the collision " - "body. Fallback to replacing it by contact points at " - "the vertices of the minimal volume bounding box of the " - "available visual meshes.") - # Check if collision primitive box are available - body_link = root.find(f"./link[@name='{body_name}']") - collision_box_sizes_info, collision_box_origin_info = [], [] - for link in body_link.findall('collision'): - box_link = link.find('geometry/box') - if box_link is not None: - collision_box_sizes_info.append(box_link.get('size')) - collision_box_origin_info.append(link.find('origin')) - - # Replace the collision boxes by contact points, if any - if collision_box_sizes_info: - if not avoid_instable_collisions: - continue - logger.warning( - "Collision body associated with box geometry is not " - "numerically stable for now. Replacing it by contact " - "points at the vertices.") - - for i, (box_size_info, box_origin_info) in enumerate(zip( - collision_box_sizes_info, collision_box_origin_info)): - box_size = list(map(float, box_size_info.split())) - box_origin = _origin_info_to_se3(box_origin_info) - vertices = [e.flatten() for e in np.meshgrid(*[ - 0.5 * v * np.array([-1.0, 1.0]) for v in box_size])] - for j, (x, y, z) in enumerate(zip(*vertices)): - frame_name = "_".join(( - body_name, "CollisionBox", str(i), str(j))) - vertex_pos_rel = pin.SE3( - np.eye(3), np.array([x, y, z])) - frame_transform = box_origin.act(vertex_pos_rel) - self.add_frame(frame_name, body_name, frame_transform) - contact_frames_names.append(frame_name) - elif body_name in geometry_info['collision']['primitive']: - # Do nothing if the primitive is not a box. It should be fine. - continue - # Remove the body from the collision detection set - collision_bodies_names.remove(body_name) - - # Early return if collision box primitives have been replaced - if collision_box_sizes_info: - continue - - # Extract meshes paths and origins. - if body_name in geometry_info['collision']['mesh']: - mesh_links = body_link.findall('collision') - else: - mesh_links = body_link.findall('visual') - mesh_paths = [link.find('geometry/mesh').get('filename') - for link in mesh_links] - mesh_scales = [_string_to_array(link.find('geometry/mesh').get( - 'scale', '1.0 1.0 1.0')) - for link in mesh_links] - mesh_origins = [] - for link in mesh_links: - mesh_origin_info = link.find('origin') - mesh_origin_transform = \ - _origin_info_to_se3(mesh_origin_info) - mesh_origins.append(mesh_origin_transform) - - # Replace the collision body by contact points - for mesh_path, mesh_scale, mesh_origin in zip( - mesh_paths, mesh_scales, mesh_origins): - # Replace relative mesh path by absolute one - if mesh_path.startswith("package://"): - mesh_path_orig = mesh_path - for root_dir in mesh_root_dirs: - mesh_path = mesh_path_orig.replace( - "package:/", root_dir) - if os.path.exists(mesh_path): - break - - # Compute the minimal volume bounding box, then add new frames - # to the robot model at its vertices and register contact - # points at their location. - try: - mesh = trimesh.load(mesh_path) - except ValueError: # Mesh file is not available - continue - box = mesh.bounding_box_oriented - for i in range(8): - frame_name = "_".join((body_name, "BoundingBox", str(i))) - frame_transform_rel = pin.SE3( - np.eye(3), mesh_scale * np.asarray(box.vertices[i])) - frame_transform = mesh_origin.act(frame_transform_rel) - self.add_frame(frame_name, body_name, frame_transform) - contact_frames_names.append(frame_name) - - # Add the collision bodies and contact points. - # Note that it must be done before adding the sensors because - # Contact sensors requires contact points to be defined. - # Mesh collisions is not numerically stable for now, so disabling it. - # Note: Be careful, the order of the contact points is important, it - # changes the computation of the external forces, which is an iterative - # algorithm for impulse model, resulting in different simulation - # results. The order of the element of the set depends of the `hash` - # method of python, whose seed is randomly generated when starting the - # interpreter for security reason. As a result, the set must be sorted - # manually to ensure consistent results. - self.add_collision_bodies( - collision_bodies_names, ignore_meshes=avoid_instable_collisions) - self.add_contact_points(sorted(list(set(contact_frames_names)))) - - # Add the motors to the robot - for motor_type, motors_descr in motors_info.items(): - for motor_name, motor_descr in motors_descr.items(): - # Create the sensor and attach it - motor = getattr(jiminy, motor_type)(motor_name) - self.attach_motor(motor) - - # Initialize the motor - joint_name = motor_descr.pop('joint_name') - motor.initialize(joint_name) - - # Set the motor options - options = motor.get_options() - option_fields = options.keys() - for name, value in motor_descr.items(): - if name not in option_fields: - logger.warning( - f"'{name}' is not a valid option for the motor " - f"{motor_name} of type {motor_type}.") - options[name] = value - options['enableArmature'] = True - motor.set_options(options) - - # Add the sensors to the robot - for sensor_type, sensors_descr in sensors_info.items(): - for sensor_name, sensor_descr in sensors_descr.items(): - # Create the sensor and attach it - sensor = getattr(jiminy, sensor_type)(sensor_name) - self.attach_sensor(sensor) - - # Initialize the sensor - if sensor_type == encoder.type: - joint_name = sensor_descr.pop('joint_name') - sensor.initialize(joint_name) - elif sensor_type == effort.type: - motor_name = sensor_descr.pop('motor_name') - sensor.initialize(motor_name) - elif sensor_type == contact.type: - frame_name = sensor_descr.pop('frame_name') - sensor.initialize(frame_name) - elif sensor_type in [force.type, imu.type]: - # Create the frame and add it to the robot model - frame_name = sensor_descr.pop('frame_name', None) - - # Create a frame if a frame name has been specified. - # In such a case, the body name must be specified. - if frame_name is None: - # Get the body name - body_name = sensor_descr.pop('body_name') - - # Generate a frame name both intelligible and available - i = 0 - frame_name = "_".join(( - sensor_name, sensor_type, "Frame")) - while self.pinocchio_model.existFrame(frame_name): - frame_name = "_".join(( - sensor_name, sensor_type, "Frame", str(i))) - i += 1 - - # Compute SE3 object representing the frame placement - frame_pose_xyzrpy = np.array( - sensor_descr.pop('frame_pose')) - frame_trans = frame_pose_xyzrpy[:3] - frame_rot = rpyToMatrix(frame_pose_xyzrpy[3:]) - frame_placement = pin.SE3(frame_rot, frame_trans) - - # Add the frame to the robot model - self.add_frame(frame_name, body_name, frame_placement) - - # Initialize the sensor - sensor.initialize(frame_name) - else: - raise ValueError( - f"Unsupported sensor type {sensor_type}.") - - # Set the sensor options - options = sensor.get_options() - option_fields = options.keys() - for name, value in sensor_descr.items(): - if name not in option_fields: - logger.warning( - f"'{name}' is not a valid option for the sensor " - f"{sensor_name} of type {sensor_type}.") - options[name] = value - sensor.set_options(options) + self.extra_info = load_hardware_description_file( + self, hardware_path, avoid_instable_collisions, verbose) def __del__(self) -> None: - if self.urdf_path != self.urdf_path_orig: + if self.urdf_path != self._urdf_path_orig: try: os.remove(self.urdf_path) except (PermissionError, FileNotFoundError): pass - - @property - def name(self) -> str: - return self.pinocchio_model.name diff --git a/python/jiminy_py/src/jiminy_py/simulator.py b/python/jiminy_py/src/jiminy_py/simulator.py index 75b485b8c..e9a1a7bc6 100644 --- a/python/jiminy_py/src/jiminy_py/simulator.py +++ b/python/jiminy_py/src/jiminy_py/simulator.py @@ -20,7 +20,7 @@ from .robot import (generate_hardware_description_file, BaseJiminyRobot) from .plot import TabbedFigure -from .log import read_log, build_robot_from_log_constants +from .log import read_log, build_robot_from_log, extract_data_from_log from .viewer import (TrajectoryDataType, interactive_mode, extract_replay_data_from_log_data, @@ -324,9 +324,11 @@ def seed(self, seed: int) -> None: # Make sure no simulation is running before setting the seed self.engine.stop() - # Set the seed through the engine instead of using - # `jiminy.reset_random_generator` to keep track of the seed in options, - # and thereby to log it in the telemetry as constant. + # Force to reset the seed of the low-level engine + jiminy.reset_random_generator(seed) + + # Set the seed in engine options to keep track of the seed and log it + # automatically in the telemetry as constant. engine_options = self.engine.get_options() engine_options["stepper"]["randomSeed"] = \ np.array(seed, dtype=np.dtype('uint32')) @@ -572,7 +574,7 @@ def replay(self, logs_data = [self.log_data] for log_file in extra_logs_files: log_data, log_constants = read_log(log_file) - robot = build_robot_from_log_constants( + robot = build_robot_from_log( log_constants, self.robot.mesh_package_dirs) robots.append(robot) logs_data.append(log_data) @@ -601,6 +603,12 @@ def replay(self, 'record_video_path', None) is not None, **kwargs}) + # Enable the ground profile if possible + if self.viewer_backend.startswith('panda3d'): + engine_options = self.engine.get_options() + ground_profile = engine_options["world"]["groundProfile"] + self.viewer.update_floor(ground_profile, show_meshes=False) + # Define sequence of viewer instances viewers = [self.viewer, *[None for _ in trajectories[:-1]]] @@ -644,26 +652,6 @@ def plot(self, Optional: False by default. :param kwargs: Extra keyword arguments to forward to `TabbedFigure`. """ - # Define some internal helper functions - def extract_fields(log_data: Dict[str, np.ndarray], - namespace: Optional[str], - fieldnames: Sequence[str], - ) -> Optional[Sequence[np.ndarray]]: - """Extract value associated with a set of fieldnames in a specific - namespace. - - :param log_data: Log file, as returned by `log_data` property. - :param namespace: Namespace of the fieldnames. None to disable. - :param fieldnames: Sequence of fieldnames. - """ - field_values = [log_data.get( - '.'.join((filter(None, (namespace, field)))), None) - for field in fieldnames] - if not field_values or all(v is None for v in field_values): - return None - else: - return field_values - # Extract log data log_data, log_constants = self.log_data, self.log_constants if not log_constants: @@ -708,25 +696,23 @@ def extract_fields(log_data: Dict[str, np.ndarray], name in field for name in self.robot.flexible_joints_names), fieldnames)) - values = extract_fields( - log_data, 'HighLevelController', fieldnames) + values = extract_data_from_log( + log_data, fieldnames, as_dict=True) if values is not None: tabs_data[' '.join(("State", fields_type))] = OrderedDict( - (field[len("current"):].replace(fields_type, ""), val) - for field, val in zip(fieldnames, values)) + (field[len("current"):].replace(fields_type, ""), elem) + for field, elem in values.items()) # Get motors efforts information - motor_effort = extract_fields( - log_data, 'HighLevelController', - self.robot.logfile_motor_effort_headers) + motor_effort = extract_data_from_log( + log_data, self.robot.logfile_motor_effort_headers) if motor_effort is not None: tabs_data['MotorEffort'] = OrderedDict( zip(self.robot.motors_names, motor_effort)) # Get command information - command = extract_fields( - log_data, 'HighLevelController', - self.robot.logfile_command_headers) + command = extract_data_from_log( + log_data, self.robot.logfile_command_headers) if command is not None: tabs_data['Command'] = OrderedDict( zip(self.robot.motors_names, command)) @@ -738,9 +724,11 @@ def extract_fields(log_data: Dict[str, np.ndarray], namespace = sensors_type if sensors_class.has_prefix else None if isinstance(sensors_fields, dict): for fields_prefix, fieldnames in sensors_fields.items(): - sensors_data = [extract_fields(log_data, namespace, [ - '.'.join((name, fields_prefix + field)) - for name in sensors_names]) for field in fieldnames] + sensors_data = [ + extract_data_from_log(log_data, [ + '.'.join((name, fields_prefix + field)) + for name in sensors_names], namespace) + for field in fieldnames] if sensors_data[0] is not None: type_name = ' '.join((sensors_type, fields_prefix)) tabs_data[type_name] = OrderedDict( @@ -748,9 +736,10 @@ def extract_fields(log_data: Dict[str, np.ndarray], for field, values in zip(fieldnames, sensors_data)) else: for field in sensors_fields: - sensors_data = extract_fields( - log_data, namespace, - ['.'.join((name, field)) for name in sensors_names]) + sensors_data = extract_data_from_log( + log_data, + ['.'.join((name, field)) for name in sensors_names], + namespace) if sensors_data is not None: tabs_data[' '.join((sensors_type, field))] = \ OrderedDict(zip(sensors_names, sensors_data)) @@ -812,9 +801,13 @@ def export_options(self, """ if config_path is None: if isinstance(self.robot, BaseJiminyRobot): - urdf_path = self.robot.urdf_path_orig + urdf_path = self.robot._urdf_path_orig else: urdf_path = self.robot.urdf_path + if not urdf_path: + raise ValueError( + "'config_path' must be provided if the robot is not " + "associated with any URDF.") config_path = str(pathlib.Path( urdf_path).with_suffix('')) + '_options.toml' with open(config_path, 'w') as f: @@ -844,9 +837,13 @@ def deep_update(source, overrides): if config_path is None: if isinstance(self.robot, BaseJiminyRobot): - urdf_path = self.robot.urdf_path_orig + urdf_path = self.robot._urdf_path_orig else: urdf_path = self.robot.urdf_path + if not urdf_path: + raise ValueError( + "'config_path' must be provided if the robot is not " + "associated with any URDF.") config_path = str(pathlib.Path( urdf_path).with_suffix('')) + '_options.toml' if not os.path.exists(config_path): diff --git a/python/jiminy_py/src/jiminy_py/viewer/meshcat/server.py b/python/jiminy_py/src/jiminy_py/viewer/meshcat/server.py index 88209d009..835b865ba 100644 --- a/python/jiminy_py/src/jiminy_py/viewer/meshcat/server.py +++ b/python/jiminy_py/src/jiminy_py/viewer/meshcat/server.py @@ -138,11 +138,11 @@ def handle_zmq(self, frames: Sequence[bytes]) -> None: if not self.websocket_pool and not self.comm_pool: self.zmq_socket.send(b"") msg = umsgpack.packb({"type": "ready"}) + self.is_waiting_ready_msg = True for websocket in self.websocket_pool: websocket.write_message(msg, binary=True) for comm_id in self.comm_pool: self.forward_to_comm(comm_id, msg) - self.is_waiting_ready_msg = True else: super().handle_zmq(frames) diff --git a/python/jiminy_py/src/jiminy_py/viewer/meshcat/wrapper.py b/python/jiminy_py/src/jiminy_py/viewer/meshcat/wrapper.py index 1289f316c..453ea85d0 100644 --- a/python/jiminy_py/src/jiminy_py/viewer/meshcat/wrapper.py +++ b/python/jiminy_py/src/jiminy_py/viewer/meshcat/wrapper.py @@ -7,7 +7,9 @@ import pathlib import umsgpack import threading +import tornado.gen import tornado.ioloop +from pkg_resources import parse_version as version from contextlib import redirect_stdout, redirect_stderr from typing import Optional, Sequence, Dict, Any @@ -33,15 +35,18 @@ # is tricky to do it properly, so instead every message is process # without distinction. import ipykernel - from pkg_resources import parse_version as version - if version(ipykernel.__version__) >= version("5.0"): - import tornado.gen + ipykernel_version_major = version(ipykernel.__version__).major + if ipykernel_version_major == 5: from ipykernel.kernelbase import SHELL_PRIORITY + elif ipykernel_version_major > 5: + logging.warning( + "ipykernel version 6.X.Y detected. The viewer works optimally " + "with ipykernel 5.X.Y. Revert to old version in case of issues.") else: logging.warning( "Old ipykernel version < 5.0 detected. Please do not schedule " "other cells for execution while the viewer is busy otherwise " - "it will be not executed properly.\nUpdate to a newer version " + "it will be not executed properly. Update to a newer version " "if possible to avoid such limitation.") class CommProcessor: @@ -54,11 +59,10 @@ class CommProcessor: kernel state. This method only processes comm messages to avoid such side effects. """ - def __init__(self): from IPython import get_ipython self.__kernel = get_ipython().kernel - self.__old_api = version(ipykernel.__version__) < version("5.0") + self.__old_api = version(ipykernel.__version__).major < 5 if self.__old_api: logging.warning( "Pre/post kernel handler hooks must be disable for the " @@ -104,9 +108,9 @@ def __call__(self, unsafe: bool = False) -> None: # One must go through all the messages to keep them in order for _ in range(qsize): - priority, t, dispatch, args = \ + *priority, t, dispatch, args = \ self.__kernel.msg_queue.get_nowait() - if priority <= SHELL_PRIORITY: + if not priority or priority[0] <= SHELL_PRIORITY: # New message: reading message without deserializing its # content at this point for efficiency. _, msg = self.__kernel.session.feed_identities( @@ -136,14 +140,19 @@ def __call__(self, unsafe: bool = False) -> None: continue # The message is not related to meshcat comm, so putting it - # back in the queue after lowering its priority so that it is - # send at the "end of the queue", ie just at the right place: - # after the next unchecked messages, after the other messages - # already put back in the queue, but before the next one to go - # the same way. Note that every shell messages have - # SHELL_PRIORITY by default. - self.__kernel.msg_queue.put_nowait( - (SHELL_PRIORITY + 1, t, dispatch, args)) + # back in the queue at the "end of the queue", ie just at the + # right place: after the next unchecked messages, after the + # other messages already put back in the queue, but before the + # next one to go the same way. + # Note that its priority is also lowered, so that the next time + # it can be directly forwarded without analyzing its content, + # since every shell messages have SHELL_PRIORITY by default. + # Note that ipykernel 6 removed priority feature. + if priority: + self.__kernel.msg_queue.put_nowait( + (SHELL_PRIORITY + 1, t, dispatch, args)) + else: + self.__kernel.msg_queue.put_nowait((t, dispatch, args)) self.qsize_old = self.__kernel.msg_queue.qsize() # Ensure the eventloop wakes up diff --git a/python/jiminy_py/src/jiminy_py/viewer/panda3d/panda3d_visualizer.py b/python/jiminy_py/src/jiminy_py/viewer/panda3d/panda3d_visualizer.py index bdcd8559b..4d59f36d1 100644 --- a/python/jiminy_py/src/jiminy_py/viewer/panda3d/panda3d_visualizer.py +++ b/python/jiminy_py/src/jiminy_py/viewer/panda3d/panda3d_visualizer.py @@ -4,7 +4,6 @@ import sys import math import array -import pickle import warnings import xml.etree.ElementTree as ET from datetime import datetime @@ -17,15 +16,15 @@ from matplotlib.patches import Patch from panda3d.core import ( - NodePath, Point3, Vec3, Mat4, Quat, LQuaternion, Geom, GeomEnums, GeomNode, - GeomVertexData, GeomTriangles, GeomVertexArrayFormat, GeomVertexFormat, - GeomVertexWriter, CullFaceAttrib, GraphicsWindow, PNMImage, InternalName, - OmniBoundingVolume, CompassEffect, BillboardEffect, Filename, TextNode, - Texture, TextureStage, PNMImageHeader, PGTop, Camera, PerspectiveLens, - TransparencyAttrib, OrthographicLens, ClockObject, GraphicsPipe, - WindowProperties, FrameBufferProperties, loadPrcFileData, AntialiasAttrib, + NodePath, Point3, Vec3, Vec4, Mat4, Quat, LQuaternion, Material, Geom, + GeomEnums, GeomNode, GeomVertexData, GeomTriangles, GeomVertexArrayFormat, + GeomVertexFormat, GeomVertexWriter, CullFaceAttrib, GraphicsWindow, + PNMImage, InternalName, OmniBoundingVolume, CompassEffect, BillboardEffect, + Filename, TextNode, Texture, TextureStage, PNMImageHeader, PGTop, Camera, + PerspectiveLens, TransparencyAttrib, OrthographicLens, ClockObject, + GraphicsPipe, WindowProperties, FrameBufferProperties, AntialiasAttrib, CollisionNode, CollisionRay, CollisionTraverser, CollisionHandlerQueue, - RenderModeAttrib) + RenderModeAttrib, loadPrcFileData) from direct.showbase.ShowBase import ShowBase from direct.gui.OnscreenImage import OnscreenImage from direct.gui.OnscreenText import OnscreenText @@ -177,7 +176,7 @@ def make_cone(num_sides: int = 16) -> Geom: # Define vertex format vformat = GeomVertexFormat.get_v3n3t2() vdata = GeomVertexData('vdata', vformat, Geom.UH_static) - vdata.uncleanSetNumRows(num_sides + 3) + vdata.unclean_set_num_rows(num_sides + 3) vertex = GeomVertexWriter(vdata, 'vertex') normal = GeomVertexWriter(vdata, 'normal') tcoord = GeomVertexWriter(vdata, 'texcoord') @@ -214,41 +213,42 @@ def make_cone(num_sides: int = 16) -> Geom: return geom -def make_height_map(height_map: Callable[ - [np.ndarray], Tuple[float, np.ndarray]], - grid_size: float, - grid_unit: float) -> Geom: +def make_heightmap(heightmap: np.ndarray) -> Geom: """Create height map. """ - # Compute grid size and number of vertices - grid_dim = int(np.ceil(grid_size / grid_unit)) + 1 - num_vertices = grid_dim ** 2 + # Compute the number of vertices and triangles, assuming it is square + dim = int(math.sqrt(heightmap.shape[0])) + num_vertices = int(dim * dim) + num_triangles = int(2 * (dim - 1) ** 2) - # Define vertex format - vformat = GeomVertexFormat.get_v3n3t2() + # Allocation vertex + vformat = GeomVertexFormat.get_v3n3() vdata = GeomVertexData('vdata', vformat, Geom.UH_static) - vdata.uncleanSetNumRows(num_vertices) - vertex = GeomVertexWriter(vdata, 'vertex') - normal = GeomVertexWriter(vdata, 'normal') - tcoord = GeomVertexWriter(vdata, 'texcoord') + vdata.unclean_set_num_rows(num_vertices) - # # Add grid points - for x in np.arange(grid_dim) * grid_unit - grid_size / 2.0: - for y in np.arange(grid_dim) * grid_unit - grid_size / 2.0: - height, normal_i = height_map(np.array([x, y, 0.0])) - vertex.addData3(x, y, height) - normal.addData3(*normal_i) - tcoord.addData2(x, y) + # Set vertex data + vdata_view = memoryview(vdata.modify_array(0)).cast("B") + vdata_view[:] = array.array("f", heightmap.reshape((-1,))).tobytes() # Make triangles prim = GeomTriangles(Geom.UH_static) - for j in range(grid_dim): - for i in range(grid_dim - 1): - k = j * grid_dim + i - if j < grid_dim - 1: - prim.add_vertices(k + 1, k, k + grid_dim) - if j > 0: - prim.add_vertices(k, k + 1, k + 1 - grid_dim) + prim.set_index_type(Geom.NT_uint32) + tris_array = prim.modify_vertices() + indices = np.empty((num_triangles, 3), dtype=np.uint32) + tri_idx = 0 + for i in range(dim - 1): + for j in range(1, dim - 1): + k = j * dim + i + indices[tri_idx] = k + 1, k, k + dim + indices[tri_idx + 1] = k, k + 1, k + 1 - dim + tri_idx += 2 + k = (dim - 1) * dim + i + indices[tri_idx] = i + 1, i, i + dim + indices[tri_idx + 1] = k, k + 1, k + 1 - dim + tri_idx += 2 + tris_array.unclean_set_num_rows(indices.size) + memview = memoryview(tris_array) + memview[:] = array.array("I", indices.reshape((-1,))) # Create geometry object geom = Geom(vdata) @@ -734,13 +734,12 @@ def _make_axes(self) -> NodePath: return node def _make_floor(self, - height_map: Optional[Callable[ - [np.ndarray], Tuple[float, np.ndarray]]] = None, - grid_unit: float = 0.2) -> NodePath: + heightmap: Optional[np.ndarray] = None, + show_meshes: bool = False) -> NodePath: model = GeomNode('floor') node = self.render.attach_new_node(model) - if height_map is None: + if heightmap is None: for xi in range(-10, 11): for yi in range(-10, 11): tile = GeomNode(f"tile-{xi}.{yi}") @@ -748,32 +747,47 @@ def _make_floor(self, tile_path = node.attach_new_node(tile) tile_path.set_pos((xi, yi, 0.0)) if (xi + yi) % 2: - tile_path.set_color((0.95, 0.95, 1.0, 1)) + tile_path.set_color((0.95, 0.95, 1.0, 1.0)) else: - tile_path.set_color((0.13, 0.13, 0.2, 1)) + tile_path.set_color((0.13, 0.13, 0.2, 1.0)) else: - model.add_geom(make_height_map(height_map, 20.0, grid_unit)) - render_attrib = node.get_state().get_attrib_def( - RenderModeAttrib.get_class_slot()) - node.set_attrib(RenderModeAttrib.make( - RenderModeAttrib.M_filled_wireframe, - 0.5, # thickness - render_attrib.perspective, - (0.7, 0.7, 0.7, 1.0) # wireframe_color - )) + model.add_geom(make_heightmap(heightmap)) + material = Material() + material.set_ambient(Vec4(0.95, 0.95, 1.0, 1.0)) + material.set_diffuse(Vec4(0.95, 0.95, 1.0, 1.0)) + material.set_specular(Vec3(1.0, 1.0, 1.0)) + material.set_roughness(0.4) + node.set_material(material, 1) + # node.set_color((0.95, 0.95, 1.0, 1.0)) + if show_meshes: + render_attrib = node.get_state().get_attrib_def( + RenderModeAttrib.get_class_slot()) + node.set_attrib(RenderModeAttrib.make( + RenderModeAttrib.M_filled_wireframe, + 0.5, # thickness + render_attrib.perspective, + (0.7, 0.7, 0.7, 1.0) # wireframe_color + )) node.set_two_sided(True) return node def update_floor(self, - height_map: Optional[Callable[ - [np.ndarray], Tuple[float, np.ndarray]]] = None, - grid_unit: float = 0.2) -> NodePath: - if height_map is not None and not callable(height_map): - height_map = pickle.loads(height_map) + heightmap: Optional[np.ndarray] = None, + show_meshes: bool = False) -> NodePath: + """Update the floor. + + :param heightmap: Height map of the ground, as a 2D nd.array of shape + [N_X * N_Y, 6], where N_X, N_Y are the number of + vertices on x and y axes respectively, while the + last dimension corresponds to the position (x, y, z) + and normal (n_x, n_y, nz) of the vertex in space. It + renders a flat tile ground if not specified. + Optional: None by default. + """ self._floor.remove_node() - self._floor = self._make_floor(height_map, grid_unit) + self._floor = self._make_floor(heightmap, show_meshes) def append_group(self, root_path: str, @@ -1433,7 +1447,7 @@ def loadViewerGeometryObject(self, # Panda3d model caching procedure. is_success = True mesh_path = geometry_object.meshPath - if '\\' in mesh_path or '/' in mesh_path: + if any(char in mesh_path for char in ('\\', '/', '.')): # Assuming it is an actual path if it has a least on slash. It is # way faster than actually checking if the path actually exists. mesh_path = _sanitize_path(geometry_object.meshPath) @@ -1471,7 +1485,7 @@ def loadViewerGeometryObject(self, # Create primitive triangle geometry vformat = GeomVertexFormat.get_v3() vdata = GeomVertexData('vdata', vformat, Geom.UHStatic) - vdata.uncleanSetNumRows(geom.num_points) + vdata.unclean_set_num_rows(geom.num_points) vwriter = GeomVertexWriter(vdata, 'vertex') for vertex in vertices: vwriter.addData3(*vertex) diff --git a/python/jiminy_py/src/jiminy_py/viewer/replay.py b/python/jiminy_py/src/jiminy_py/viewer/replay.py index be08a37a3..d66170553 100644 --- a/python/jiminy_py/src/jiminy_py/viewer/replay.py +++ b/python/jiminy_py/src/jiminy_py/viewer/replay.py @@ -18,7 +18,7 @@ from .. import core as jiminy from ..log import (TrajectoryDataType, read_log, - build_robot_from_log_constants, + build_robot_from_log, extract_trajectory_data_from_log, emulate_sensors_data_from_log) from .viewer import (COLORS, @@ -650,7 +650,7 @@ def play_logs_files(logs_files: Union[str, Sequence[str]], robots, logs_data = [], [] for log_file in logs_files: log_data, log_constants = read_log(log_file) - robot = build_robot_from_log_constants( + robot = build_robot_from_log( log_constants, mesh_package_dirs) logs_data.append(log_data) robots.append(robot) diff --git a/python/jiminy_py/src/jiminy_py/viewer/viewer.py b/python/jiminy_py/src/jiminy_py/viewer/viewer.py index 5fae1bd07..317cd6e20 100644 --- a/python/jiminy_py/src/jiminy_py/viewer/viewer.py +++ b/python/jiminy_py/src/jiminy_py/viewer/viewer.py @@ -38,7 +38,8 @@ from pinocchio.visualize import GepettoVisualizer from .. import core as jiminy -from ..core import ContactSensor as contact +from ..core import (ContactSensor as contact, + discretize_heightmap) from ..state import State from ..dynamics import XYZQuatToXYZRPY from .meshcat.utilities import interactive_mode @@ -309,7 +310,8 @@ def __init__(self, scene_name: str = 'world', display_com: bool = False, display_dcm: bool = False, - display_contacts: bool = False, + display_contact_frames: bool = False, + display_contact_forces: bool = False, display_f_external: Optional[ Union[Sequence[bool], bool]] = None, **kwargs: Any): @@ -356,11 +358,14 @@ def __init__(self, Optional: Disabled by default. :param display_dcm: Whether or not to display the capture point / DCM. Optional: Disabled by default. - :param display_contacts: Whether or not to display the contact forces. - Note that the user is responsible for updating - sensors data since `Viewer.display` is only - computing kinematic quantities. - Optional: Disabled by default. + :param display_contact_frames: + Whether or not to display the contact frames. + Optional: Disabled by default. + :param display_contact_forces: + Whether or not to display the contact forces. Note that the user is + responsible for updating sensors data since `Viewer.display` is + only computing kinematic quantities. + Optional: Disabled by default. :param display_f_external: Whether or not to display the external external forces applied at the joints on the robot. If a boolean is provided, the same @@ -379,11 +384,15 @@ def __init__(self, # Make sure user arguments are valid if not Viewer.backend.startswith('panda3d'): - if display_com or display_dcm or display_contacts: + if display_com or display_dcm or display_contact_frames or \ + display_contact_forces: logger.warning( "Panda3d backend is required to display markers, e.g. " "CoM, DCM or Contact.") - display_com, display_dcm, display_contacts = False, False, False + display_com = False + display_dcm = False + display_contact_frames = False + display_contact_forces = False # Backup some user arguments self.robot_color = get_color_code(robot_color) @@ -394,7 +403,8 @@ def __init__(self, self._lock = lock or Viewer._lock self._display_com = display_com self._display_dcm = display_dcm - self._display_contacts = display_contacts + self._display_contact_frames = display_contact_frames + self._display_contact_forces = display_contact_forces self._display_f_external = display_f_external # Initialize marker register @@ -616,16 +626,9 @@ def _setup(self, self.f_external.extend([ pin.Force.Zero() for _ in range(pinocchio_model.njoints - 1)]) - # Create robot visual model. - # Note that it does not actually loads the meshes if possible, since - # the rendering backend will reload them anyway. - visual_model = jiminy.build_geom_from_urdf( - pinocchio_model, self.urdf_path, pin.GeometryType.VISUAL, - robot.mesh_package_dirs, load_meshes=False) - # Create backend wrapper to get (almost) backend-independent API self._client = backends_available[Viewer.backend]( - pinocchio_model, robot.collision_model, visual_model) + pinocchio_model, robot.collision_model, robot.visual_model) self._client.data = pinocchio_data self._client.collision_data = robot.collision_data @@ -715,6 +718,20 @@ def get_dcm_scale() -> Tuple3FType: self.display_capture_point(self._display_dcm) + # Add contact frame markers + for frame_name, frame_idx in zip( + robot.contact_frames_names, robot.contact_frames_idx): + frame_pose = robot.pinocchio_data.oMf[frame_idx] + self.add_marker(name='_'.join(("ContactFrame", frame_name)), + shape="sphere", + color="yellow", + pose=[frame_pose.translation, None], + remove_if_exists=True, + auto_refresh=False, + radius=0.02) + + self.display_contact_frames(self._display_contact_frames) + # Add contact sensor markers def get_contact_pose( sensor: contact) -> Tuple[Tuple3FType, Tuple4FType]: @@ -740,7 +757,7 @@ def get_contact_scale(sensor: contact) -> Tuple3FType: length=0.5, anchor_bottom=True) - self.display_contact_forces(self._display_contacts) + self.display_contact_forces(self._display_contact_forces) # Add external forces def get_force_pose( @@ -962,7 +979,11 @@ def close(self=None) -> None: if Viewer.backend == 'meshcat': recorder_proc = Viewer._backend_obj.recorder.proc _ProcessWrapper(recorder_proc).kill() + if Viewer.backend == 'panda3d': + Viewer._backend_obj._app.stop() + Viewer._backend_proc.wait(timeout=0.1) Viewer._backend_proc.kill() + atexit.unregister(Viewer.close) Viewer._backend_obj = None Viewer._backend_proc = None Viewer._has_gui = False @@ -1030,6 +1051,12 @@ def _get_colorized_urdf(robot: jiminy.Model, urdf_path = robot.urdf_path mesh_package_dirs = robot.mesh_package_dirs + # Make sure the robot is associated with an existing URDF + if not urdf_path: + raise RuntimeError( + "Impossible to call this method if the robot is not " + "associated with any URDF.") + # Define color tag and string representation color_tag = " ".join(map(str, list(rgb) + [1.0])) color_str = "_".join(map(str, list(rgb) + [1.0])) @@ -1264,6 +1291,10 @@ def _gepetto_client_connect(get_proc_info=False): Viewer._backend_obj = client Viewer._backend_proc = proc + # Make sure to close cleanly the viewer at exit + if close_at_exit: + atexit.register(Viewer.close) + # Make sure the backend process is alive assert Viewer.is_alive(), ( "Something went wrong. Impossible to instantiate viewer backend.") @@ -1698,6 +1729,49 @@ def set_color(self, else: logger.warning("This method is only supported by Panda3d.") + @__must_be_open + @__with_lock + def update_floor(self, + ground_profile: Optional[jiminy.HeightmapFunctor] = None, + grid_size: float = 20.0, + grid_unit: float = 0.04, + show_meshes: bool = False) -> None: + """Display a custom ground profile as a height map or the original tile + ground floor. + + .. note:: + This method is only supported by Panda3d for now. + + :param ground_profile: `jiminy_py.core.HeightmapFunctor` associated + with the ground profile. It renders a flat tile + ground if not specified. + Optional: None by default. + :param grid_size: X and Y dimension of the ground profile to render. + Optional: 20m by default. + :param grid_unit: X and Y discretization step of the ground profile. + Optional: 4cm by default. + :param show_meshes: Whether or not to highlight the meshes. + Optional: disabled by default. + """ + if Viewer.backend.startswith('panda3d'): + # Restore tile ground if heightmap is not specified + if ground_profile is None: + self._gui.update_floor() + return + + # Discretize heightmap + grid = discretize_heightmap(ground_profile, grid_size, grid_unit) + + # Make sure it is not flat ground + if np.unique(grid[:, 2:], axis=0).shape[0] == 1 and \ + np.allclose(grid[0, 2:], [0.0, 0.0, 0.0, 1.0], atol=1e-3): + self._gui.update_floor() + return + + self._gui.update_floor(grid, show_meshes) + else: + logger.warning("This method is only supported by Panda3d.") + @__must_be_open @__with_lock def capture_frame(self, @@ -1944,7 +2018,7 @@ def display_center_of_mass(self, visibility: bool) -> None: @__with_lock def display_capture_point(self, visibility: bool) -> None: """Display the position of the capture point,also called divergent - component of motion (DCM), as a sphere. + component of motion (DCM) as a sphere. .. note:: Calling `Viewer.display` will update it automatically, while @@ -1969,11 +2043,43 @@ def display_capture_point(self, visibility: bool) -> None: if visibility: self.refresh() + @__must_be_open + @__with_lock + def display_contact_frames(self, visibility: bool) -> None: + """Display the contact frames of the robot as spheres. + + .. note:: + The frames to display are specified by the attribute + `contact_frames_names` of the provided `robot`. Calling + `Viewer.display` will update it automatically, while + `Viewer.refresh` will not. + + .. warning:: + This method is only supported by Panda3d. + + :param visibility: Whether or not to display the contact frames. + """ + # Make sure the current backend is supported by this method + if not Viewer.backend.startswith('panda3d'): + raise NotImplementedError( + "This method is only supported by Panda3d.") + + # Update visibility + for name in self.markers: + if name.startswith("ContactFrame"): + self._gui.show_node(self._markers_group, name, visibility) + self._markers_visibility[name] = visibility + self._display_contact_frames = visibility + + # Must refresh the scene + if visibility: + self.refresh() + @__must_be_open @__with_lock def display_contact_forces(self, visibility: bool) -> None: """Display forces associated with the contact sensors attached to the - robot, as a capsule of variable length depending of Fz. + robot, as cylinders of variable length depending of Fz. .. note:: Fz can be signed. It will affect the orientation of the capsule. @@ -1998,7 +2104,7 @@ def display_contact_forces(self, visibility: bool) -> None: if name.startswith(contact.type): self._gui.show_node(self._markers_group, name, visibility) self._markers_visibility[name] = visibility - self._display_contacts = visibility + self._display_contact_forces = visibility # Must refresh the scene if visibility: @@ -2009,8 +2115,8 @@ def display_contact_forces(self, visibility: bool) -> None: def display_external_forces(self, visibility: Union[Sequence[bool], bool] ) -> None: - """Display external forces applied on the joints the robot, as an - arrow of variable length depending of magnitude of the force. + """Display external forces applied on the joints the robot, as arrows + of variable length depending of magnitude of the force. .. warning:: It only display the linear component of the force, while ignoring @@ -2260,9 +2366,9 @@ def replay(self, :param wait: Whether or not to wait for rendering to finish. """ # Disable display of sensor data if no update hook is provided - disable_display_contacts = False - if update_hook is None and self._display_contacts: - disable_display_contacts = True + disable_display_contact_forces = False + if update_hook is None and self._display_contact_forces: + disable_display_contact_forces = True self.display_contact_forces(False) # Disable display of DCM if no velocity data provided @@ -2341,7 +2447,7 @@ def replay(self, Viewer.set_clock() # Restore display if necessary - if disable_display_contacts: + if disable_display_contact_forces: self.display_contact_forces(True) if disable_display_dcm: self.display_capture_point(True) diff --git a/python/jiminy_pywrap/CMakeLists.txt b/python/jiminy_pywrap/CMakeLists.txt index 217ded3b8..c8972634b 100755 --- a/python/jiminy_pywrap/CMakeLists.txt +++ b/python/jiminy_pywrap/CMakeLists.txt @@ -13,8 +13,9 @@ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${WARN_FULL}") # Add cpp sources (in binary dir, since the sources are pre-processed to substitute docstring) set(SRC "${CMAKE_CURRENT_BINARY_DIR}/src/Compatibility.cc" - "${CMAKE_CURRENT_BINARY_DIR}/src/Functors.cc" "${CMAKE_CURRENT_BINARY_DIR}/src/Helpers.cc" + "${CMAKE_CURRENT_BINARY_DIR}/src/Functors.cc" + "${CMAKE_CURRENT_BINARY_DIR}/src/Generators.cc" "${CMAKE_CURRENT_BINARY_DIR}/src/Robot.cc" "${CMAKE_CURRENT_BINARY_DIR}/src/Sensors.cc" "${CMAKE_CURRENT_BINARY_DIR}/src/Motors.cc" diff --git a/python/jiminy_pywrap/include/jiminy/python/Functors.h b/python/jiminy_pywrap/include/jiminy/python/Functors.h index 91a637a31..65a96d824 100644 --- a/python/jiminy_pywrap/include/jiminy/python/Functors.h +++ b/python/jiminy_pywrap/include/jiminy/python/Functors.h @@ -199,41 +199,42 @@ namespace python sensorsDataMap_t const & /* sensorsData */, vectorN_t & /* command */)>; - // ************************** HeatMapFunctorPyWrapper ****************************** + // ************************** HeightmapFunctorPyWrapper ****************************** - enum class heatMapType_t : uint8_t + enum class heightmapType_t : uint8_t { CONSTANT = 0x01, STAIRS = 0x02, GENERIC = 0x03, }; - struct HeatMapFunctorPyWrapper { + struct HeightmapFunctorPyWrapper + { public: // Disable the copy of the class - HeatMapFunctorPyWrapper & operator = (HeatMapFunctorPyWrapper const & other) = delete; + HeightmapFunctorPyWrapper & operator = (HeightmapFunctorPyWrapper const & other) = delete; public: - HeatMapFunctorPyWrapper(bp::object const & objPy, - heatMapType_t const & objType) : - heatMapType_(objType), + HeightmapFunctorPyWrapper(bp::object const & objPy, + heightmapType_t const & objType) : + heightmapType_(objType), handlePyPtr_(objPy), out1Ptr_(new float64_t), out2Ptr_(new vector3_t), out1PyPtr_(), out2PyPtr_() { - if (heatMapType_ == heatMapType_t::CONSTANT) + if (heightmapType_ == heightmapType_t::CONSTANT) { *out1Ptr_ = bp::extract(handlePyPtr_); - *out2Ptr_ = (vector3_t() << 0.0, 0.0, 1.0).finished(); + *out2Ptr_ = vector3_t::UnitZ(); } - else if (heatMapType_ == heatMapType_t::STAIRS) + else if (heightmapType_ == heightmapType_t::STAIRS) { out1PyPtr_ = getNumpyReference(*out1Ptr_); - *out2Ptr_ = (vector3_t() << 0.0, 0.0, 1.0).finished(); + *out2Ptr_ = vector3_t::UnitZ(); } - else if (heatMapType_ == heatMapType_t::GENERIC) + else if (heightmapType_ == heightmapType_t::GENERIC) { out1PyPtr_ = getNumpyReference(*out1Ptr_); out2PyPtr_ = getNumpyReference(*out2Ptr_); @@ -241,8 +242,8 @@ namespace python } // Copy constructor, same as the normal constructor - HeatMapFunctorPyWrapper(HeatMapFunctorPyWrapper const & other) : - heatMapType_(other.heatMapType_), + HeightmapFunctorPyWrapper(HeightmapFunctorPyWrapper const & other) : + heightmapType_(other.heightmapType_), handlePyPtr_(other.handlePyPtr_), out1Ptr_(new float64_t), out2Ptr_(new vector3_t), @@ -256,8 +257,8 @@ namespace python } // Move constructor, takes a rvalue reference && - HeatMapFunctorPyWrapper(HeatMapFunctorPyWrapper && other) : - heatMapType_(other.heatMapType_), + HeightmapFunctorPyWrapper(HeightmapFunctorPyWrapper && other) : + heightmapType_(other.heightmapType_), handlePyPtr_(other.handlePyPtr_), out1Ptr_(nullptr), out2Ptr_(nullptr), @@ -279,7 +280,7 @@ namespace python } // Destructor - ~HeatMapFunctorPyWrapper() + ~HeightmapFunctorPyWrapper() { Py_XDECREF(out1PyPtr_); Py_XDECREF(out2PyPtr_); @@ -288,11 +289,11 @@ namespace python } // Move assignment, takes a rvalue reference && - HeatMapFunctorPyWrapper& operator = (HeatMapFunctorPyWrapper&& other) + HeightmapFunctorPyWrapper& operator = (HeightmapFunctorPyWrapper&& other) { /* "other" is soon going to be destroyed, so we let it destroy our current resource instead and we take "other"'s current resource via swapping */ - std::swap(heatMapType_, other.heatMapType_); + std::swap(heightmapType_, other.heightmapType_); std::swap(handlePyPtr_, other.handlePyPtr_); std::swap(out1Ptr_, other.out1Ptr_); std::swap(out2Ptr_, other.out2Ptr_); @@ -303,12 +304,12 @@ namespace python std::pair operator() (vector3_t const & posFrame) { - if (heatMapType_ == heatMapType_t::STAIRS) + if (heightmapType_ == heightmapType_t::STAIRS) { bp::handle<> out1Py(bp::borrowed(out1PyPtr_)); handlePyPtr_(posFrame[0], posFrame[1], out1Py); } - else if (heatMapType_ == heatMapType_t::GENERIC) + else if (heightmapType_ == heightmapType_t::GENERIC) { bp::handle<> out1Py(bp::borrowed(out1PyPtr_)); bp::handle<> out2Py(bp::borrowed(out2PyPtr_)); @@ -318,18 +319,20 @@ namespace python return {*out1Ptr_, *out2Ptr_}; } - private: - heatMapType_t heatMapType_; + public: + heightmapType_t heightmapType_; bp::object handlePyPtr_; + + private: float64_t * out1Ptr_; vector3_t * out2Ptr_; PyObject * out1PyPtr_; PyObject * out2PyPtr_; }; - // **************************** HeatMapFunctorVisitor ***************************** + // **************************** HeightmapFunctorVisitor ***************************** - void exposeHeatMapFunctor(void); + void exposeHeightmapFunctor(void); } // End of namespace python. } // End of namespace jiminy. diff --git a/python/jiminy_pywrap/include/jiminy/python/Generators.h b/python/jiminy_pywrap/include/jiminy/python/Generators.h new file mode 100644 index 000000000..b2c04f2b3 --- /dev/null +++ b/python/jiminy_pywrap/include/jiminy/python/Generators.h @@ -0,0 +1,13 @@ +#ifndef GENERATORS_PYTHON_H +#define GENERATORS_PYTHON_H + + +namespace jiminy +{ +namespace python +{ + void exposeGenerators(void); +} // End of namespace python. +} // End of namespace jiminy. + +#endif // GENERATORS_PYTHON_H diff --git a/python/jiminy_pywrap/include/jiminy/python/Utilities.h b/python/jiminy_pywrap/include/jiminy/python/Utilities.h index 2c56bec09..b964a64f9 100644 --- a/python/jiminy_pywrap/include/jiminy/python/Utilities.h +++ b/python/jiminy_pywrap/include/jiminy/python/Utilities.h @@ -62,7 +62,7 @@ namespace python consists in adding the expected tags as function doc. It works for now but it is not really reliable and may break in the future too. */ bp::converter::registration const * r = bp::converter::registry::query(typeid(WrappedClassT)); - assert(r && ("Class " + typeid(WrappedClassT).name() + " not registered to Boost Python.")); + assert((std::string("Class ") + typeid(WrappedClassT).name() + " not registered to Boost Python.", r != nullptr)); PyTypeObject * nsPtr = r->get_class_object(); bp::object nsName(bp::handle<>(PyObject_GetAttrString(reinterpret_cast(nsPtr), "__name__"))); bp::objects::function * funcPtr = bp::downcast(func.ptr()); @@ -167,8 +167,8 @@ namespace python return array; } - template - PyObject * getNumpyReferenceFromEigenMatrix(Eigen::Matrix & value) + template + PyObject * getNumpyReferenceFromEigenMatrix(Eigen::Matrix & value) { npy_intp dims[2] = {npy_intp(value.cols()), npy_intp(value.rows())}; PyObject * array = PyArray_SimpleNewFromData(2, dims, getPyType(*value.data()), const_cast(value.data())); @@ -177,8 +177,8 @@ namespace python return arrayT; } - template - PyObject * getNumpyReferenceFromEigenMatrix(Eigen::Ref > & value) + template + PyObject * getNumpyReferenceFromEigenMatrix(Eigen::Ref > & value) { npy_intp dims[2] = {npy_intp(value.cols()), npy_intp(value.rows())}; PyObject * array = PyArray_SimpleNewFromData(2, dims, getPyType(*value.data()), value.data()); @@ -187,11 +187,11 @@ namespace python return arrayT; } - template - PyObject * getNumpyReferenceFromEigenMatrix(Eigen::Matrix const & value) + template + PyObject * getNumpyReferenceFromEigenMatrix(Eigen::Matrix const & value) { PyObject * array = getNumpyReferenceFromEigenMatrix( - const_cast &>(value)); + const_cast &>(value)); PyArray_CLEARFLAGS(reinterpret_cast(array), NPY_ARRAY_WRITEABLE); return array; } diff --git a/python/jiminy_pywrap/src/Constraints.cc b/python/jiminy_pywrap/src/Constraints.cc index e4cd19da9..775d8fe14 100644 --- a/python/jiminy_pywrap/src/Constraints.cc +++ b/python/jiminy_pywrap/src/Constraints.cc @@ -81,8 +81,7 @@ namespace python } static std::shared_ptr fixedFrameConstraintFactory(std::string const & frameName, - bp::object const & maskFixedPy, - pinocchio::ReferenceFrame const & frameRef) + bp::object const & maskFixedPy) { Eigen::Matrix maskFixed; if (maskFixedPy.is_none()) @@ -102,7 +101,7 @@ namespace python maskFixed[i] = maskFixedListPyExtract(); } } - return std::make_shared(frameName, maskFixed, frameRef); + return std::make_shared(frameName, maskFixed); } static void setIsEnable(AbstractConstraintBase & self, @@ -118,11 +117,6 @@ namespace python } } - static void setReferenceConfiguration(JointConstraint & self, vectorN_t const & value) - { - self.setReferenceConfiguration(value); - } - public: /////////////////////////////////////////////////////////////////////////////// /// \brief Expose. @@ -156,15 +150,14 @@ namespace python bp::return_value_policy())) .add_property("reference_configuration", bp::make_function(&JointConstraint::getReferenceConfiguration, bp::return_value_policy >()), - &PyConstraintVisitor::setReferenceConfiguration); + &JointConstraint::setReferenceConfiguration); bp::class_, std::shared_ptr, boost::noncopyable>("FixedFrameConstraint", bp::no_init) .def("__init__", bp::make_constructor(&PyConstraintVisitor::fixedFrameConstraintFactory, bp::default_call_policies(), (bp::arg("frame_name"), - bp::arg("mask_fixed")=bp::object(), - bp::arg("reference_frame")=pinocchio::LOCAL_WORLD_ALIGNED))) + bp::arg("mask_fixed")=bp::object()))) .def_readonly("type", &FixedFrameConstraint::type_) .add_property("frame_name", bp::make_function(&FixedFrameConstraint::getFrameName, bp::return_value_policy())) @@ -172,11 +165,12 @@ namespace python bp::return_value_policy())) .add_property("dofs_fixed", bp::make_function(&FixedFrameConstraint::getDofsFixed, bp::return_value_policy())) - .add_property("reference_frame", bp::make_function(&FixedFrameConstraint::getReferenceFrame, - bp::return_value_policy())) .add_property("reference_transform", bp::make_function(&FixedFrameConstraint::getReferenceTransform, bp::return_internal_reference<>()), - &FixedFrameConstraint::setReferenceTransform); + &FixedFrameConstraint::setReferenceTransform) + .add_property("local_rotation", bp::make_function(&FixedFrameConstraint::getLocalFrame, + bp::return_value_policy >()), + &FixedFrameConstraint::setLocalFrame); bp::class_, std::shared_ptr, diff --git a/python/jiminy_pywrap/src/Engine.cc b/python/jiminy_pywrap/src/Engine.cc index 59047ec20..0d9e24784 100644 --- a/python/jiminy_pywrap/src/Engine.cc +++ b/python/jiminy_pywrap/src/Engine.cc @@ -981,7 +981,7 @@ namespace python vectorN_t const & qInit, vectorN_t const & vInit, bp::object const & aInitPy, - bool const & isStateTheoretical) + bool_t const & isStateTheoretical) { boost::optional aInit = boost::none; if (!aInitPy.is_none()) @@ -996,7 +996,7 @@ namespace python vectorN_t const & qInit, vectorN_t const & vInit, bp::object const & aInitPy, - bool const & isStateTheoretical) + bool_t const & isStateTheoretical) { boost::optional aInit = boost::none; if (!aInitPy.is_none()) diff --git a/python/jiminy_pywrap/src/Functors.cc b/python/jiminy_pywrap/src/Functors.cc index c7ad16e9f..ba21c1de1 100644 --- a/python/jiminy_pywrap/src/Functors.cc +++ b/python/jiminy_pywrap/src/Functors.cc @@ -26,10 +26,10 @@ namespace python return (new pinocchio::Force(vector6_t::Zero())); } - // **************************** PyHeatMapFunctorVisitor ***************************** + // **************************** PyHeightmapFunctorVisitor ***************************** - struct PyHeatMapFunctorVisitor - : public bp::def_visitor + struct PyHeightmapFunctorVisitor + : public bp::def_visitor { public: /////////////////////////////////////////////////////////////////////////////// @@ -39,25 +39,38 @@ namespace python void visit(PyClass & cl) const { cl - .def("__init__", bp::make_constructor(&PyHeatMapFunctorVisitor::factory, + .def("__init__", bp::make_constructor(&PyHeightmapFunctorVisitor::factory, bp::default_call_policies(), - (bp::args("heatmap_function", "heatmap_type")))) - .def("__call__", &PyHeatMapFunctorVisitor::eval, - (bp::arg("self"), bp::arg("position"))) + (bp::arg("heightmap_function"), + bp::arg("heightmap_type")=heightmapType_t::GENERIC))) + .def("__call__", &PyHeightmapFunctorVisitor::eval, + (bp::args("self", "position"))) + .add_property("py_function", bp::make_function(&PyHeightmapFunctorVisitor::getPyFun, + bp::return_value_policy())); ; } - static bp::tuple eval(heatMapFunctor_t & self, - vector3_t const & posFrame) + static bp::tuple eval(heightmapFunctor_t & self, + vector3_t const & posFrame) { - std::pair ground = self(posFrame); - return bp::make_tuple(std::move(std::get<0>(ground)), std::move(std::get<1>(ground))); + std::pair const ground = self(posFrame); + return bp::make_tuple(std::get(ground), std::get(ground)); } - static std::shared_ptr factory(bp::object & objPy, - heatMapType_t const & objType) + static bp::object getPyFun(heightmapFunctor_t & self) { - return std::make_shared(HeatMapFunctorPyWrapper(std::move(objPy), objType)); + HeightmapFunctorPyWrapper * pyWrapper(self.target()); + if (!pyWrapper || pyWrapper->heightmapType_ != heightmapType_t::GENERIC) + { + return {}; + } + return pyWrapper->handlePyPtr_; + } + + static std::shared_ptr factory(bp::object & objPy, + heightmapType_t const & objType) + { + return std::make_shared(HeightmapFunctorPyWrapper(objPy, objType)); } /////////////////////////////////////////////////////////////////////////////// @@ -65,12 +78,12 @@ namespace python /////////////////////////////////////////////////////////////////////////////// static void expose() { - bp::class_ >("HeatMapFunctor", bp::no_init) - .def(PyHeatMapFunctorVisitor()); + bp::class_ >("HeightmapFunctor", bp::no_init) + .def(PyHeightmapFunctorVisitor()); } }; - BOOST_PYTHON_VISITOR_EXPOSE(HeatMapFunctor) + BOOST_PYTHON_VISITOR_EXPOSE(HeightmapFunctor) } // End of namespace python. } // End of namespace jiminy. diff --git a/python/jiminy_pywrap/src/Generators.cc b/python/jiminy_pywrap/src/Generators.cc new file mode 100644 index 000000000..6762f8c98 --- /dev/null +++ b/python/jiminy_pywrap/src/Generators.cc @@ -0,0 +1,111 @@ +#include "jiminy/core/utilities/Random.h" + +#include "jiminy/python/Utilities.h" +#include "jiminy/python/Generators.h" + + +namespace jiminy +{ +namespace python +{ + namespace bp = boost::python; + + heightmapFunctor_t sumHeightmap(bp::list const & heightmapsPy) + { + auto heightmaps = convertFromPython >(heightmapsPy); + return ::jiminy::sumHeightmap(heightmaps); + } + + heightmapFunctor_t mergeHeightmap(bp::list const & heightmapsPy) + { + auto heightmaps = convertFromPython >(heightmapsPy); + return ::jiminy::mergeHeightmap(heightmaps); + } + + void resetRandomGenerators(bp::object const & seedPy) + { + boost::optional seed = boost::none; + if (!seedPy.is_none()) + { + seed = bp::extract(seedPy); + } + ::jiminy::resetRandomGenerators(seed); + } + + void exposeGenerators(void) + { + bp::def("reset_random_generator", &resetRandomGenerators, (bp::arg("seed") = bp::object())); + + bp::class_, + boost::noncopyable>("RandomPerlinProcess", + bp::init( + (bp::arg("self"), "wavelength", bp::arg("num_octaves") = 6U))) + .def("__call__", &RandomPerlinProcess::operator(), + (bp::arg("self"), bp::arg("time"))) + .def("reset", &RandomPerlinProcess::reset) + .add_property("wavelength", bp::make_function(&RandomPerlinProcess::getWavelength, + bp::return_value_policy())) + .add_property("num_octaves", bp::make_function(&RandomPerlinProcess::getNumOctaves, + bp::return_value_policy())); + + bp::class_, + boost::noncopyable>("PeriodicPerlinProcess", + bp::init( + (bp::arg("self"), "wavelength", "period", bp::arg("num_octaves") = 6U))) + .def("__call__", &PeriodicPerlinProcess::operator(), + (bp::arg("self"), bp::arg("time"))) + .def("reset", &PeriodicPerlinProcess::reset) + .add_property("wavelength", bp::make_function(&PeriodicPerlinProcess::getWavelength, + bp::return_value_policy())) + .add_property("period", bp::make_function(&PeriodicPerlinProcess::getPeriod, + bp::return_value_policy())) + .add_property("num_octaves", bp::make_function(&PeriodicPerlinProcess::getNumOctaves, + bp::return_value_policy())); + + bp::class_, + boost::noncopyable>("PeriodicGaussianProcess", + bp::init( + bp::args("self", "wavelength", "period"))) + .def("__call__", &PeriodicGaussianProcess::operator(), + (bp::arg("self"), bp::arg("time"))) + .def("reset", &PeriodicGaussianProcess::reset) + .add_property("wavelength", bp::make_function(&PeriodicGaussianProcess::getWavelength, + bp::return_value_policy())) + .add_property("period", bp::make_function(&PeriodicGaussianProcess::getPeriod, + bp::return_value_policy())) + .add_property("dt", bp::make_function(&PeriodicGaussianProcess::getDt, + bp::return_value_policy())); + + bp::class_, + boost::noncopyable>("PeriodicFourierProcess", + bp::init( + bp::args("self", "wavelength", "period"))) + .def("__call__", &PeriodicFourierProcess::operator(), + (bp::arg("self"), bp::arg("time"))) + .def("reset", &PeriodicFourierProcess::reset) + .add_property("wavelength", bp::make_function(&PeriodicFourierProcess::getWavelength, + bp::return_value_policy())) + .add_property("period", bp::make_function(&PeriodicFourierProcess::getPeriod, + bp::return_value_policy())) + .add_property("dt", bp::make_function(&PeriodicFourierProcess::getDt, + bp::return_value_policy())) + .add_property("num_harmonics", bp::make_function(&PeriodicFourierProcess::getNumHarmonics, + bp::return_value_policy())); + + bp::def("random_tile_ground", &randomTileGround, + bp::args("size", "height_max", "interp_delta", "sparsity", "orientation", "seed")); + bp::def("sum_heightmap", &sumHeightmap, bp::args("heightmaps")); + bp::def("merge_heightmap", &mergeHeightmap, bp::args("heightmaps")); + + bp::def("discretize_heightmap", &discretizeHeightmap, bp::args("heightmap", "grid_size", "grid_unit")); + } + +} // End of namespace python. +} // End of namespace jiminy. + + + diff --git a/python/jiminy_pywrap/src/Helpers.cc b/python/jiminy_pywrap/src/Helpers.cc index bcab848e0..9074ae283 100644 --- a/python/jiminy_pywrap/src/Helpers.cc +++ b/python/jiminy_pywrap/src/Helpers.cc @@ -8,6 +8,8 @@ #include "jiminy/core/utilities/Json.h" #include "jiminy/core/utilities/Random.h" +#include "pinocchio/algorithm/model.hpp" + #include /* Note that it is necessary to import eigenpy to get access to the converters. @@ -55,16 +57,6 @@ namespace python return positionOut; } - void resetRandomGenerators(bp::object const & seedPy) - { - boost::optional seed = boost::none; - if (!seedPy.is_none()) - { - seed = bp::extract(seedPy); - } - ::jiminy::resetRandomGenerators(seed); - } - pinocchio::GeometryModel buildGeomFromUrdf(pinocchio::Model const & model, std::string const & filename, bp::object const & typePy, @@ -118,6 +110,25 @@ namespace python return bp::make_tuple(model, collisionModel); } + bp::tuple buildReducedModels(pinocchio::Model const & model, + pinocchio::GeometryModel const & collisionModel, + pinocchio::GeometryModel const & visualModel, + bp::list const & jointsToLockPy, + vectorN_t const & referenceConfiguration) + { + auto jointsToLock = convertFromPython >(jointsToLockPy); + pinocchio::Model reducedModel; + pinocchio::GeometryModel reducedCollisionModel, reducedVisualModel; + pinocchio::buildReducedModel(model, collisionModel, jointsToLock, + referenceConfiguration, reducedModel, + reducedCollisionModel); + reducedModel = pinocchio::Model(); + pinocchio::buildReducedModel(model, visualModel, jointsToLock, + referenceConfiguration, reducedModel, + reducedVisualModel); + return bp::make_tuple(reducedModel, reducedCollisionModel, reducedVisualModel); + } + configHolder_t loadConfigJsonString(std::string const & jsonString) { std::vector jsonStringVec(jsonString.begin(), jsonString.end()); @@ -150,8 +161,6 @@ namespace python void exposeHelpers(void) { - bp::def("reset_random_generator", &resetRandomGenerators, (bp::arg("seed") = bp::object())); - bp::def("build_geom_from_urdf", &buildGeomFromUrdf, (bp::arg("pinocchio_model"), "urdf_filename", "geom_type", bp::arg("mesh_package_dirs") = bp::list(), @@ -164,6 +173,12 @@ namespace python bp::arg("build_visual_model") = false, bp::arg("load_visual_meshes") = false)); + bp::def("build_reduced_models", &buildReducedModels, + (bp::arg("pinocchio_model"), "collision_model", + "visual_model", + "joint_locked_indices", + "configuration_ref")); + bp::def("load_config_json_string", &loadConfigJsonString, (bp::arg("json_string"))); bp::def("get_joint_type", &getJointTypeFromIdx, @@ -212,66 +227,6 @@ namespace python bp::return_value_policy >()); bp::def("solveJMinvJtv", &solveJMinvJtv, (bp::arg("pinocchio_data"), "v", bp::arg("update_decomposition") = true)); - - bp::class_, - boost::noncopyable>("RandomPerlinProcess", - bp::init( - (bp::arg("self"), "wavelength", bp::arg("num_octaves") = 6U))) - .def("__call__", &RandomPerlinProcess::operator(), - (bp::arg("self"), bp::arg("time"))) - .def("reset", &RandomPerlinProcess::reset) - .add_property("wavelength", bp::make_function(&RandomPerlinProcess::getWavelength, - bp::return_value_policy())) - .add_property("num_octaves", bp::make_function(&RandomPerlinProcess::getNumOctaves, - bp::return_value_policy())); - - bp::class_, - boost::noncopyable>("PeriodicPerlinProcess", - bp::init( - (bp::arg("self"), "wavelength", "period", bp::arg("num_octaves") = 6U))) - .def("__call__", &PeriodicPerlinProcess::operator(), - (bp::arg("self"), bp::arg("time"))) - .def("reset", &PeriodicPerlinProcess::reset) - .add_property("wavelength", bp::make_function(&PeriodicPerlinProcess::getWavelength, - bp::return_value_policy())) - .add_property("period", bp::make_function(&PeriodicPerlinProcess::getPeriod, - bp::return_value_policy())) - .add_property("num_octaves", bp::make_function(&PeriodicPerlinProcess::getNumOctaves, - bp::return_value_policy())); - - bp::class_, - boost::noncopyable>("PeriodicGaussianProcess", - bp::init( - bp::args("self", "wavelength", "period"))) - .def("__call__", &PeriodicGaussianProcess::operator(), - (bp::arg("self"), bp::arg("time"))) - .def("reset", &PeriodicGaussianProcess::reset) - .add_property("wavelength", bp::make_function(&PeriodicGaussianProcess::getWavelength, - bp::return_value_policy())) - .add_property("period", bp::make_function(&PeriodicGaussianProcess::getPeriod, - bp::return_value_policy())) - .add_property("dt", bp::make_function(&PeriodicGaussianProcess::getDt, - bp::return_value_policy())); - - bp::class_, - boost::noncopyable>("PeriodicFourierProcess", - bp::init( - bp::args("self", "wavelength", "period"))) - .def("__call__", &PeriodicFourierProcess::operator(), - (bp::arg("self"), bp::arg("time"))) - .def("reset", &PeriodicFourierProcess::reset) - .add_property("wavelength", bp::make_function(&PeriodicFourierProcess::getWavelength, - bp::return_value_policy())) - .add_property("period", bp::make_function(&PeriodicFourierProcess::getPeriod, - bp::return_value_policy())) - .add_property("dt", bp::make_function(&PeriodicFourierProcess::getDt, - bp::return_value_policy())) - .add_property("num_harmonics", bp::make_function(&PeriodicFourierProcess::getNumHarmonics, - bp::return_value_policy())); } } // End of namespace python. diff --git a/python/jiminy_pywrap/src/Module.cc b/python/jiminy_pywrap/src/Module.cc index 0d75b162d..cf143c516 100755 --- a/python/jiminy_pywrap/src/Module.cc +++ b/python/jiminy_pywrap/src/Module.cc @@ -23,6 +23,7 @@ #include "jiminy/python/Utilities.h" #include "jiminy/python/Helpers.h" #include "jiminy/python/Functors.h" +#include "jiminy/python/Generators.h" #include "jiminy/python/Engine.h" #include "jiminy/python/Constraints.h" #include "jiminy/python/Controllers.h" @@ -91,11 +92,11 @@ namespace python .value("SPHERICAL", joint_t::SPHERICAL) .value("FREE", joint_t::FREE); - // Interfaces for heatMapType_t enum - bp::enum_("heatMapType_t") - .value("CONSTANT", heatMapType_t::CONSTANT) - .value("STAIRS", heatMapType_t::STAIRS) - .value("GENERIC", heatMapType_t::GENERIC); + // Interfaces for heightmapType_t enum + bp::enum_("heightmapType_t") + .value("CONSTANT", heightmapType_t::CONSTANT) + .value("STAIRS", heightmapType_t::STAIRS) + .value("GENERIC", heightmapType_t::GENERIC); // Disable CPP docstring bp::docstring_options doc_options; @@ -113,15 +114,16 @@ namespace python // Expose functors TIME_STATE_FCT_EXPOSE(bool_t, Bool) TIME_STATE_FCT_EXPOSE(pinocchio::Force, PinocchioForce) - exposeHeatMapFunctor(); + exposeHeightmapFunctor(); /* Expose compatibility layer, to support both new and old C++ ABI, and to restore automatic converters of numpy scalars without altering python docstring signature. */ exposeCompatibility(); - // Expose helpers + // Expose helpers and generators exposeHelpers(); + exposeGenerators(); // Expose structs and classes exposeSensorsDataMap(); diff --git a/python/jiminy_pywrap/src/Robot.cc b/python/jiminy_pywrap/src/Robot.cc index f5c517579..1edaf7e60 100644 --- a/python/jiminy_pywrap/src/Robot.cc +++ b/python/jiminy_pywrap/src/Robot.cc @@ -78,23 +78,27 @@ namespace python .def("get_rigid_velocity_from_flexible", &PyModelVisitor::getRigidVelocityFromFlexible, (bp::arg("self"), "flexible_velocity")) - .add_property("pinocchio_model", bp::make_getter(&Model::pncModel_, - bp::return_internal_reference<>())) - .add_property("pinocchio_data", bp::make_getter(&Model::pncData_, - bp::return_internal_reference<>())) .add_property("pinocchio_model_th", bp::make_getter(&Model::pncModelRigidOrig_, bp::return_internal_reference<>())) - .add_property("pinocchio_data_th", bp::make_getter(&Model::pncDataRigidOrig_, - bp::return_internal_reference<>())) + .add_property("pinocchio_model", bp::make_getter(&Model::pncModel_, + bp::return_internal_reference<>())) .add_property("collision_model", bp::make_getter(&Model::collisionModel_, bp::return_internal_reference<>())) - .add_property("collision_data", bp::make_function(&PyModelVisitor::getGeometryData, + .add_property("visual_model", bp::make_getter(&Model::visualModel_, + bp::return_internal_reference<>())) + .add_property("pinocchio_data_th", bp::make_getter(&Model::pncDataRigidOrig_, + bp::return_internal_reference<>())) + .add_property("pinocchio_data", bp::make_getter(&Model::pncData_, + bp::return_internal_reference<>())) + .add_property("collision_data", bp::make_function(&PyModelVisitor::getCollisionData, bp::return_internal_reference<>())) .add_property("is_initialized", bp::make_function(&Model::getIsInitialized, bp::return_value_policy())) .add_property("mesh_package_dirs", bp::make_function(&Model::getMeshPackageDirs, bp::return_value_policy >())) + .add_property("name", bp::make_function(&Model::getName, + bp::return_value_policy())) .add_property("urdf_path", bp::make_function(&Model::getUrdfPath, bp::return_value_policy())) .add_property("has_freeflyer", bp::make_function(&Model::getHasFreeflyer, @@ -148,9 +152,9 @@ namespace python ; } - static pinocchio::GeometryData & getGeometryData(Model & self) + static pinocchio::GeometryData & getCollisionData(Model & self) { - return *(self.pncGeometryData_); + return *(self.pncCollisionData_); } static hresult_t addCollisionBodies(Model & self, @@ -281,6 +285,11 @@ namespace python (bp::arg("self"), "urdf_path", bp::arg("has_freeflyer") = false, bp::arg("mesh_package_dirs") = bp::list())) + .def("initialize", + static_cast< + hresult_t (Robot::*)(pinocchio::Model const &, pinocchio::GeometryModel const &, pinocchio::GeometryModel const &) + >(&Robot::initialize), + (bp::arg("self"), "pinocchio_model", "collision_model", "visual_model")) .add_property("is_locked", bp::make_function(&Robot::getIsLocked, bp::return_value_policy())) diff --git a/soup/gtest/CMakeLists.txt b/soup/gtest/CMakeLists.txt index ac77f1b79..5a2f53e77 100644 --- a/soup/gtest/CMakeLists.txt +++ b/soup/gtest/CMakeLists.txt @@ -67,21 +67,25 @@ string(REPLACE "" "${BINARY_DIR}" gmock_main_PATH_Debug "${gmock_mai # Import the generated libraries as targets add_library(gtest::gtest STATIC IMPORTED GLOBAL) set_target_properties(gtest::gtest PROPERTIES + IMPORTED_CONFIGURATIONS "Debug;Release" IMPORTED_LOCATION ${gtest_PATH_Release} IMPORTED_LOCATION_DEBUG ${gtest_PATH_Debug} ) add_library(gtest::gtest_main STATIC IMPORTED GLOBAL) set_target_properties(gtest::gtest_main PROPERTIES + IMPORTED_CONFIGURATIONS "Debug;Release" IMPORTED_LOCATION ${gtest_main_PATH_Release} IMPORTED_LOCATION_DEBUG ${gtest_main_PATH_Debug} ) add_library(gtest::gmock STATIC IMPORTED GLOBAL) set_target_properties(gtest::gmock PROPERTIES + IMPORTED_CONFIGURATIONS "Debug;Release" IMPORTED_LOCATION ${gmock_PATH_Release} IMPORTED_LOCATION_DEBUG ${gmock_PATH_Debug} ) add_library(gtest::gmock_main STATIC IMPORTED GLOBAL) set_target_properties(gtest::gmock_main PROPERTIES + IMPORTED_CONFIGURATIONS "Debug;Release" IMPORTED_LOCATION ${gmock_main_PATH_Release} IMPORTED_LOCATION_DEBUG ${gmock_main_PATH_Debug} ) diff --git a/soup/hdf5/CMakeLists.txt b/soup/hdf5/CMakeLists.txt index ebb45a38b..62250aba4 100644 --- a/soup/hdf5/CMakeLists.txt +++ b/soup/hdf5/CMakeLists.txt @@ -8,7 +8,7 @@ project(hdf5_external) if(NOT WIN32) set(zlib_BUILD_LIB_PATH "/libz.a") set(zlib_PATH "${CMAKE_INSTALL_PREFIX}/lib/libz.a") - set(zlib_NINJA "${zlib_PATH}") + set(zlib_NINJA BUILD_BYPRODUCTS "${zlib_PATH}") else() set(zlib_BUILD_LIB_PATH "/Release/zlibstatic.lib") set(zlib_PATH "${CMAKE_INSTALL_PREFIX}/lib/zlibstatic.lib") @@ -50,13 +50,17 @@ ExternalProject_Add_Step( # Define paths of the generated hdf5 libraries if(NOT WIN32) - set(hdf5_PATH "${CMAKE_INSTALL_PREFIX}/lib/libhdf5.a") - set(hdf5_NINJA "${hdf5_PATH}") - set(hdf5_cpp_PATH "${CMAKE_INSTALL_PREFIX}/lib/libhdf5_cpp.a") - set(hdf5_cpp_NINJA "${hdf5_cpp_PATH}") + set(libhdf5_PATH_Release "${CMAKE_INSTALL_PREFIX}/lib/libhdf5.a") + set(libhdf5_PATH_Debug "${CMAKE_INSTALL_PREFIX}/lib/libhdf5_debug.a") + set(libhdf5_NINJA BUILD_BYPRODUCTS "${libhdf5_PATH_${CMAKE_BUILD_TYPE}}") + set(libhdf5_cpp_PATH_Release "${CMAKE_INSTALL_PREFIX}/lib/libhdf5_cpp.a") + set(libhdf5_cpp_PATH_Debug "${CMAKE_INSTALL_PREFIX}/lib/libhdf5_cpp_debug.a") + set(libhdf5_cpp_NINJA BUILD_BYPRODUCTS "${libhdf5_cpp_PATH_${CMAKE_BUILD_TYPE}}") else() - set(hdf5_PATH "${CMAKE_INSTALL_PREFIX}/lib/libhdf5.lib") - set(hdf5_cpp_PATH "${CMAKE_INSTALL_PREFIX}/lib/libhdf5_cpp.lib") + set(libhdf5_PATH_Release "${CMAKE_INSTALL_PREFIX}/lib/libhdf5.lib") + set(libhdf5_PATH_Debug "${CMAKE_INSTALL_PREFIX}/lib/libhdf5.lib") + set(libhdf5_cpp_PATH_Release "${CMAKE_INSTALL_PREFIX}/lib/libhdf5_cpp.lib") + set(libhdf5_cpp_PATH_Debug "${CMAKE_INSTALL_PREFIX}/lib/libhdf5_cpp.lib") endif() # Download, build and install hdf5. @@ -127,9 +131,13 @@ set_target_properties(hdf5::zlib PROPERTIES ) add_library(hdf5::hdf5 STATIC IMPORTED GLOBAL) set_target_properties(hdf5::hdf5 PROPERTIES - IMPORTED_LOCATION ${hdf5_PATH} + IMPORTED_CONFIGURATIONS "Debug;Release" + IMPORTED_LOCATION ${libhdf5_PATH_Release} + IMPORTED_LOCATION_DEBUG ${libhdf5_PATH_Debug} ) add_library(hdf5::hdf5_cpp STATIC IMPORTED GLOBAL) set_target_properties(hdf5::hdf5_cpp PROPERTIES - IMPORTED_LOCATION ${hdf5_cpp_PATH} + IMPORTED_CONFIGURATIONS "Debug;Release" + IMPORTED_LOCATION ${libhdf5_cpp_PATH_Release} + IMPORTED_LOCATION_DEBUG ${libhdf5_cpp_PATH_Debug} ) diff --git a/unit_py/data/box_collision_mesh.urdf b/unit_py/data/box_collision_mesh.urdf index 28468307e..8c64f0268 100644 --- a/unit_py/data/box_collision_mesh.urdf +++ b/unit_py/data/box_collision_mesh.urdf @@ -15,6 +15,9 @@ The force sensor is mounted with an offset rotation to verify frame computations + + + diff --git a/unit_py/data/sphere_primitive.urdf b/unit_py/data/sphere_primitive.urdf index 7496d66da..4569f5ef1 100644 --- a/unit_py/data/sphere_primitive.urdf +++ b/unit_py/data/sphere_primitive.urdf @@ -15,6 +15,9 @@ The force sensor is mounted with an offset rotation to verify frame computations + + +