Skip to content

Commit

Permalink
Merge branch 'dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
ToKiNoBug committed Feb 13, 2022
2 parents 9168142 + 40d1077 commit 8d12b65
Show file tree
Hide file tree
Showing 11 changed files with 284 additions and 154 deletions.
7 changes: 5 additions & 2 deletions Genetic/GABase.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ This file is part of HeuristicFlow.

#ifndef Heu_GABASE_H
#define Heu_GABASE_H
#include <iostream>
#include "./GAOption.hpp"
#include <tuple>
#include <vector>
Expand Down Expand Up @@ -67,19 +66,23 @@ class GABase : public GAAbstract<Var_t,Fitness_t,Args_t>
///Gene type for Var
class Gene {
public:
using fastFitness_t = typename
std::conditional<(sizeof(Fitness_t)>sizeof(double)),const Fitness_t &,Fitness_t>::type;
Var_t self;
bool isCalculated() const {
return _isCalculated;
}
void setUncalculated() {
_isCalculated=false;
}
Fitness_t fitness() const {
fastFitness_t fitness() const {
return _Fitness;
}

bool _isCalculated;
Fitness_t _Fitness;


};
///list iterator to Gene
using GeneIt_t = typename std::list<Gene>::iterator;
Expand Down
30 changes: 25 additions & 5 deletions Genetic/MOGAAbstract.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,20 +45,21 @@ enum PFOption : unsigned char {
*/
template<typename Var_t,
size_t ObjNum,
typename Fitness_t,
DoubleVectorOption DVO,
FitnessOption fOpt,
RecordOption rOpt,
PFOption pfOpt,
class Args_t>
class MOGAAbstract
: public GABase<Var_t,Fitness_t,rOpt,Args_t>
: public GABase<Var_t,FitnessVec_t<DVO,ObjNum>,rOpt,Args_t>
{
public:
MOGAAbstract() {};
virtual ~MOGAAbstract() {};

using Base_t = GABase<Var_t,Fitness_t,rOpt,Args_t>;
using Base_t = GABase<Var_t,FitnessVec_t<DVO,ObjNum>,rOpt,Args_t>;
Heu_MAKE_GABASE_TYPES
using Fitness_t = FitnessVec_t<DVO,ObjNum>;

///get pareto front in vec
inline void paretoFront(std::vector<Fitness_t> & front) const {
Expand Down Expand Up @@ -96,6 +97,7 @@ class MOGAAbstract
///whether A strong domainates B
static bool isStrongDomain(const Fitness_t * A,const Fitness_t * B) {
if(A==B) return false;
if constexpr (DVO!=Eigen) {
uint32_t notWorseNum=0,betterNum=0;
for(size_t objIdx=0;objIdx<A->size();objIdx++) {
if constexpr (fOpt==FITNESS_GREATER_BETTER) {
Expand All @@ -110,6 +112,19 @@ class MOGAAbstract
if(notWorseNum<A->size())
return false;
return betterNum>0;
}
else {
bool isNotWorse,isBetter;
if constexpr (fOpt==FITNESS_GREATER_BETTER) {
isNotWorse=((*A)>=(*B)).all();
isBetter=((*A)>(*B)).any();
}
else {
isNotWorse=((*A)<=(*B)).all();
isBetter=((*A)<(*B)).any();
}
return isNotWorse&&isBetter;
}
} //isStrongDomain

virtual size_t makePFCheckSum() const {
Expand Down Expand Up @@ -147,10 +162,15 @@ class MOGAAbstract
}

private:

#ifndef Heu_NO_STATICASSERT
static_assert(std::integral_constant<bool,(ObjNum!=1)>::value,
"HeuristicFlow : You used less than 1 objective in NSGA2");
"HeuristicFlow : You assigned single objective in MOGA");

#ifndef EIGEN_CORE_H
static_assert(DVO!=DoubleVectorOption::Eigen,
"Include Eigen before using Eigen arrays as Fitness types");
#endif // EIGEN_CORE_H

#endif

}; // MOGAAbstract
Expand Down
14 changes: 7 additions & 7 deletions Genetic/MOGABase.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,16 +45,16 @@ namespace Heu {
*/
template<typename Var_t,
size_t ObjNum,
typename Fitness_t,
DoubleVectorOption DVO,
FitnessOption fOpt,
RecordOption rOpt,
PFOption pfOpt,
class Args_t>
class MOGABase
: public MOGAAbstract<Var_t,ObjNum,Fitness_t,fOpt,rOpt,pfOpt,Args_t>
: public MOGAAbstract<Var_t,ObjNum,DVO,fOpt,rOpt,pfOpt,Args_t>
{
public:
using Base_t = MOGAAbstract<Var_t,ObjNum,Fitness_t,fOpt,rOpt,pfOpt,Args_t>;
using Base_t = MOGAAbstract<Var_t,ObjNum,DVO,fOpt,rOpt,pfOpt,Args_t>;
Heu_MAKE_GABASE_TYPES

MOGABase() {};
Expand All @@ -77,20 +77,20 @@ class MOGABase
* @tparam Args
*/
template<typename Var_t,
typename Fitness_t,
DoubleVectorOption DVO,
FitnessOption fOpt,
RecordOption rOpt,
PFOption pfOpt,
class Args_t>
class MOGABase<Var_t,Dynamic,Fitness_t,fOpt,rOpt,pfOpt,Args_t>
: public MOGAAbstract<Var_t,Dynamic,Fitness_t,fOpt,rOpt,pfOpt,Args_t>
class MOGABase<Var_t,Dynamic,DVO,fOpt,rOpt,pfOpt,Args_t>
: public MOGAAbstract<Var_t,Dynamic,DVO,fOpt,rOpt,pfOpt,Args_t>
{
public:

MOGABase() {};
virtual ~MOGABase() {};

using Base_t = MOGAAbstract<Var_t,Dynamic,Fitness_t,fOpt,rOpt,pfOpt,Args_t>;
using Base_t = MOGAAbstract<Var_t,Dynamic,DVO,fOpt,rOpt,pfOpt,Args_t>;
Heu_MAKE_GABASE_TYPES

inline size_t objectiveNum() const {
Expand Down
61 changes: 4 additions & 57 deletions Genetic/NSGA2.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@ This file is part of HeuristicFlow.
namespace Heu
{



/**
* @brief NSGA2 MOGA solver. Suitable for not too many objectives.
*
Expand All @@ -53,7 +51,7 @@ template<typename Var_t,
class NSGA2
: public NSGA2Base<Var_t,
ObjNum,
stdVecD_t<ObjNum>,
DVO,
isGreaterBetter,
Record,
ProtectPF,
Expand All @@ -62,12 +60,11 @@ class NSGA2
public:
using Base_t = NSGA2Base<Var_t,
ObjNum,
stdVecD_t<ObjNum>,
DVO,
isGreaterBetter,
Record,
ProtectPF,
Args_t>;
using Fitness_t = stdVecD_t<ObjNum>;
Heu_MAKE_NSGABASE_TYPES

NSGA2() {
Expand Down Expand Up @@ -107,7 +104,7 @@ class NSGA2<Var_t,
Args_t>
: public NSGA2Base<Var_t,
ObjNum,
EigenVecD_t<ObjNum>,
DoubleVectorOption::Eigen,
isGreaterBetter,
Record,
ProtectPF,
Expand All @@ -116,12 +113,11 @@ class NSGA2<Var_t,
public:
using Base_t =NSGA2Base<Var_t,
ObjNum,
EigenVecD_t<ObjNum>,
DoubleVectorOption::Eigen,
isGreaterBetter,
Record,
ProtectPF,
Args_t>;
using Fitness_t = EigenVecD_t<ObjNum>;
Heu_MAKE_NSGABASE_TYPES

using congestComposeFun = typename Base_t::congestComposeFun;
Expand Down Expand Up @@ -175,55 +171,6 @@ class NSGA2<Var_t,

protected:

///whether A strong domainates B
static bool Eig_isStrongDomain(const Fitness_t * A,const Fitness_t * B) {
bool isNotWorse,isBetter;
if constexpr (isGreaterBetter==FITNESS_GREATER_BETTER) {
isNotWorse=((*A)>=(*B)).all();
isBetter=((*A)>(*B)).any();
}
else {
isNotWorse=((*A)<=(*B)).all();
isBetter=((*A)<(*B)).any();
}
return isNotWorse&&isBetter;
} //isStrongDomain

//calculate domainedByNum
virtual void calculateDominatedNum(infoUnitBase_t ** pop,
const size_t popSizeBefore) const {
#ifdef Heu_NSGA2_DO_PARALLELIZE
static const size_t thN=OtGlobal::threadNum();
#pragma omp parallel for
for(size_t begIdx=0;begIdx<thN;begIdx++) {

for(size_t ed=begIdx;ed<popSizeBefore;ed+=thN) {
pop[ed]->domainedByNum=0;
for(size_t er=0;er<popSizeBefore;er++) {
if(er==ed)
continue;
pop[ed]->domainedByNum+=
Eig_isStrongDomain(&(pop[er]->iterator->_Fitness),
&(pop[ed]->iterator->_Fitness));
}
}
}

#else
for(size_t ed=0;ed<popSizeBefore;ed++) {
pop[ed]->domainedByNum=0;
for(size_t er=0;er<popSizeBefore;er++) {
if(er==ed)
continue;
pop[ed]->domainedByNum+=
Eig_isStrongDomain(&(pop[er]->iterator->_Fitness),
&(pop[ed]->iterator->_Fitness));
}
}
#endif

}

private:

};
Expand Down
46 changes: 29 additions & 17 deletions Genetic/NSGA2Base.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,21 +35,21 @@ enum CompareOption : int64_t {
*/
template<typename Var_t,
size_t ObjNum,
typename Fitness_t,
DoubleVectorOption DVO,
FitnessOption fOpt=FITNESS_LESS_BETTER,
RecordOption rOpt=DONT_RECORD_FITNESS,
PFOption pfOpt=PARETO_FRONT_CAN_MUTATE,
class Args_t=void>
class NSGA2Base
:public NSGABase<Var_t,ObjNum,Fitness_t,fOpt,rOpt,pfOpt,Args_t>
:public NSGABase<Var_t,ObjNum,DVO,fOpt,rOpt,pfOpt,Args_t>
{
public:
NSGA2Base() {
_ccFun=default_ccFun_liner;
};
virtual ~NSGA2Base() {};

using Base_t = NSGABase<Var_t,ObjNum,Fitness_t,fOpt,rOpt,pfOpt,Args_t>;
using Base_t = NSGABase<Var_t,ObjNum,DVO,fOpt,rOpt,pfOpt,Args_t>;
Heu_MAKE_NSGABASE_TYPES

using congestComposeFun = double(*)(const Fitness_t *);
Expand Down Expand Up @@ -133,7 +133,6 @@ class NSGA2Base
public:
/** @brief whether this gene is selected
*/
bool isSelected;
Fitness_t congestion;
};

Expand Down Expand Up @@ -176,8 +175,15 @@ class NSGA2Base

for(auto it=this->_population.begin();it!=this->_population.end();++it) {
pop.emplace_back();
pop.back().isSelected=false;
pop.back().iterator=it;
if constexpr (ObjNum==Dynamic) {
if constexpr (DVO==DoubleVectorOption::Eigen) {
pop.back().congestion.resize(this->objectiveNum(),1);
}
else {
pop.back().congestion.resize(this->objectiveNum());
}
}
}

std::vector<infoUnit*> sortSpace(popSizeBefore);
Expand Down Expand Up @@ -208,12 +214,14 @@ class NSGA2Base
unLayeredNum--;
}
}

this->updatePF((const infoUnitBase_t **)paretoLayers.front().data(),
const size_t PFSize=paretoLayers.front().size();
if(PFSize<=this->_option.populationSize)
this->updatePF((const infoUnitBase_t **)paretoLayers.front().data(),
paretoLayers.front().size());


std::queue<infoUnit *> selected;
std::unordered_set<infoUnit *> selected;
selected.reserve(this->_option.populationSize);
bool needCongestion=true;
while(true) {
//don't need to calculate congestion
Expand All @@ -236,8 +244,7 @@ class NSGA2Base
//calculate congestion
if(needCongestion) {
for(size_t objIdx=0;objIdx<this->objectiveNum();objIdx++) {
std::vector<infoUnit*>
& cursortSpace=sortSpace;
std::vector<infoUnit*> & cursortSpace=sortSpace;

std::sort(cursortSpace.begin(),cursortSpace.end(),fitnessCmpFuns[objIdx]);

Expand Down Expand Up @@ -269,17 +276,22 @@ class NSGA2Base

} // end applying congestion

//mark selected genes
while(!selected.empty()) {
selected.front()->isSelected=true;
selected.pop();
}

//erase unselected
for(infoUnit & i : pop) {
if(!i.isSelected) {
for(auto & i : pop) {
if(selected.find(&i)==selected.end()) {
this->_population.erase(i.iterator);
}
}

if(PFSize>this->_option.populationSize) {
std::vector<const infoUnitBase_t*> PF;
PF.reserve(selected.size());
for(auto i : selected) {
PF.emplace_back(i);
}
this->updatePF(PF.data(),PF.size());
}
}


Expand Down
Loading

0 comments on commit 8d12b65

Please sign in to comment.