diff --git a/MLModelRunner/ONNXModelRunner/CMakeLists.txt b/MLModelRunner/ONNXModelRunner/CMakeLists.txt index de1d59d..9a14833 100755 --- a/MLModelRunner/ONNXModelRunner/CMakeLists.txt +++ b/MLModelRunner/ONNXModelRunner/CMakeLists.txt @@ -52,3 +52,4 @@ endif(LLVM_MLBRIDGE) target_link_libraries(ONNXModelRunnerLib PUBLIC onnxruntime) target_include_directories(ONNXModelRunnerLib PUBLIC "${ONNXRUNTIME_ROOTDIR}/include" "${ONNXRUNTIME_ROOTDIR}/include/onnxruntime/core/session" ${TENSORFLOW_AOT_PATH}/include) +target_link_directories(ONNXModelRunnerLib PUBLIC ${ONNXRUNTIME_ROOTDIR}/lib) \ No newline at end of file diff --git a/MLModelRunner/ONNXModelRunner/ONNXModelRunner.cpp b/MLModelRunner/ONNXModelRunner/ONNXModelRunner.cpp index 3326b0d..1676278 100755 --- a/MLModelRunner/ONNXModelRunner/ONNXModelRunner.cpp +++ b/MLModelRunner/ONNXModelRunner/ONNXModelRunner.cpp @@ -9,6 +9,7 @@ #include "MLModelRunner/ONNXModelRunner/ONNXModelRunner.h" #include "SerDes/baseSerDes.h" +using namespace llvm; namespace MLBridge { ONNXModelRunner::ONNXModelRunner(Environment *env, diff --git a/MLModelRunner/PipeModelRunner.cpp b/MLModelRunner/PipeModelRunner.cpp index acb5c42..06e0088 100755 --- a/MLModelRunner/PipeModelRunner.cpp +++ b/MLModelRunner/PipeModelRunner.cpp @@ -23,6 +23,8 @@ #define DEBUG_TYPE "pipe-model-runner" +using namespace llvm; + namespace MLBridge { PipeModelRunner::PipeModelRunner(StringRef OutboundName, StringRef InboundName, BaseSerDes::Kind SerDesType, LLVMContext *Ctx) diff --git a/SerDes/bitstreamSerDes.cpp b/SerDes/bitstreamSerDes.cpp index 2e8763f..a3a7f11 100644 --- a/SerDes/bitstreamSerDes.cpp +++ b/SerDes/bitstreamSerDes.cpp @@ -18,8 +18,6 @@ #define DEBUG_TYPE "bitstream-serdes" -using namespace llvm; -using namespace std; namespace MLBridge { void BitstreamSerDes::setFeature(const std::string &name, const int &value) { @@ -113,9 +111,9 @@ void BitstreamSerDes::setFeature(const std::string &name, } void *BitstreamSerDes::getSerializedData() { - std::unique_ptr OS = - std::make_unique(Buffer); - json::OStream J(*OS); + std::unique_ptr OS = + std::make_unique(Buffer); + llvm::json::OStream J(*OS); J.object([&]() { J.attributeArray("features", [&]() { for (size_t I = 0; I < tensorSpecs.size(); ++I) { @@ -125,7 +123,7 @@ void *BitstreamSerDes::getSerializedData() { }); J.flush(); OS->write("\n", 1); - LLVM_DEBUG(errs() << "rawData.size(): " << rawData.size() << "\n"); + LLVM_DEBUG(llvm::errs() << "rawData.size(): " << rawData.size() << "\n"); for (size_t I = 0; I < rawData.size(); ++I) { OS->write(reinterpret_cast(rawData[I]), tensorSpecs[I].getTotalTensorBufferSize()); diff --git a/SerDes/jsonSerDes.cpp b/SerDes/jsonSerDes.cpp index aa3a87d..e5abf8a 100755 --- a/SerDes/jsonSerDes.cpp +++ b/SerDes/jsonSerDes.cpp @@ -14,6 +14,8 @@ #define DEBUG_TYPE "json-serdes" +using namespace llvm; + namespace MLBridge { void *JsonSerDes::getSerializedData() { auto tempJO = J; diff --git a/SerDes/tensorflowSerDes.cpp b/SerDes/tensorflowSerDes.cpp index be63e5a..ffde191 100644 --- a/SerDes/tensorflowSerDes.cpp +++ b/SerDes/tensorflowSerDes.cpp @@ -21,7 +21,7 @@ SUPPORTED_TYPES(SET_FEATURE) #undef SET_FEATURE void TensorflowSerDes::setFeature(const std::string &Name, - const vector &Value) { + const std::vector &Value) { std::string prefix = "feed_"; const int Index = CompiledModel->LookupArgIndex(prefix + Name); std::copy(Value.begin(), Value.end(), @@ -29,7 +29,7 @@ void TensorflowSerDes::setFeature(const std::string &Name, } void TensorflowSerDes::setFeature(const std::string &Name, - const vector &Value) { + const std::vector &Value) { std::string prefix = "feed_"; const int Index = CompiledModel->LookupArgIndex(prefix + Name); std::copy(Value.begin(), Value.end(), @@ -37,7 +37,7 @@ void TensorflowSerDes::setFeature(const std::string &Name, } void TensorflowSerDes::setFeature(const std::string &Name, - const vector &Value) { + const std::vector &Value) { std::string prefix = "feed_"; const int Index = CompiledModel->LookupArgIndex(prefix + Name); std::copy(Value.begin(), Value.end(), @@ -45,7 +45,7 @@ void TensorflowSerDes::setFeature(const std::string &Name, } void TensorflowSerDes::setFeature(const std::string &Name, - const vector &Value) { + const std::vector &Value) { std::string prefix = "feed_"; const int Index = CompiledModel->LookupArgIndex(prefix + Name); std::copy(Value.begin(), Value.end(), @@ -53,7 +53,7 @@ void TensorflowSerDes::setFeature(const std::string &Name, } void TensorflowSerDes::setFeature(const std::string &Name, - const vector &Value) { + const std::vector &Value) { std::string prefix = "feed_"; const int Index = CompiledModel->LookupArgIndex(prefix + Name); std::copy(Value.begin(), Value.end(), @@ -61,7 +61,7 @@ void TensorflowSerDes::setFeature(const std::string &Name, } void TensorflowSerDes::setFeature(const std::string &Name, - const vector &Value) { + const std::vector &Value) { std::string prefix = "feed_"; const int Index = CompiledModel->LookupArgIndex(prefix + Name); std::copy(Value.begin(), Value.end(), diff --git a/include/MLModelRunner/MLModelRunner.h b/include/MLModelRunner/MLModelRunner.h index 7d531bc..b369844 100644 --- a/include/MLModelRunner/MLModelRunner.h +++ b/include/MLModelRunner/MLModelRunner.h @@ -82,19 +82,19 @@ class MLModelRunner { protected: MLModelRunner(Kind Type, BaseSerDes::Kind SerDesType, - LLVMContext *Ctx = nullptr) + llvm::LLVMContext *Ctx = nullptr) : Ctx(Ctx), Type(Type), SerDesType(SerDesType) { assert(Type != Kind::Unknown); initSerDes(); } - MLModelRunner(Kind Type, LLVMContext *Ctx = nullptr) + MLModelRunner(Kind Type, llvm::LLVMContext *Ctx = nullptr) : Ctx(Ctx), Type(Type), SerDesType(BaseSerDes::Kind::Unknown) { SerDes = nullptr; }; virtual void *evaluateUntyped() = 0; - LLVMContext *Ctx; + llvm::LLVMContext *Ctx; const Kind Type; const BaseSerDes::Kind SerDesType; diff --git a/include/MLModelRunner/ONNXModelRunner/ONNXModelRunner.h b/include/MLModelRunner/ONNXModelRunner/ONNXModelRunner.h index 94e7d17..9699208 100755 --- a/include/MLModelRunner/ONNXModelRunner/ONNXModelRunner.h +++ b/include/MLModelRunner/ONNXModelRunner/ONNXModelRunner.h @@ -22,7 +22,7 @@ class ONNXModelRunner : public MLModelRunner { public: ONNXModelRunner(MLBridge::Environment *env, std::map agents, - LLVMContext *Ctx = nullptr); + llvm::LLVMContext *Ctx = nullptr); void setEnvironment(MLBridge::Environment *_env) { env = _env; } MLBridge::Environment *getEnvironment() { return env; } void addAgent(Agent *agent, std::string name); diff --git a/include/MLModelRunner/PipeModelRunner.h b/include/MLModelRunner/PipeModelRunner.h index 2b84e40..bd0772a 100755 --- a/include/MLModelRunner/PipeModelRunner.h +++ b/include/MLModelRunner/PipeModelRunner.h @@ -43,8 +43,8 @@ namespace MLBridge { /// (which will hang until there's a writer on the other end). class PipeModelRunner : public MLModelRunner { public: - PipeModelRunner(StringRef OutboundName, StringRef InboundName, - BaseSerDes::Kind Kind, LLVMContext *Ctx = nullptr); + PipeModelRunner(llvm::StringRef OutboundName, llvm::StringRef InboundName, + BaseSerDes::Kind Kind, llvm::LLVMContext *Ctx = nullptr); static bool classof(const MLModelRunner *R) { return R->getKind() == MLModelRunner::Kind::Pipe; @@ -64,7 +64,7 @@ class PipeModelRunner : public MLModelRunner { int Inbound = -1; std::error_code OutEC; std::error_code InEC; - std::unique_ptr OutStream; + std::unique_ptr OutStream; }; } // namespace MLBridge #endif // PipeModelRunner_H diff --git a/include/MLModelRunner/TFModelRunner.h b/include/MLModelRunner/TFModelRunner.h index b77d83b..1e301fd 100644 --- a/include/MLModelRunner/TFModelRunner.h +++ b/include/MLModelRunner/TFModelRunner.h @@ -29,9 +29,9 @@ template class TFModelRunner final : public MLModelRunner { public: /// FeatureNames' type should be an indexed collection of std::string, like /// std::array or std::vector, that has a size() method. - TFModelRunner(StringRef DecisionName, LLVMContext &Ctx, - StringRef FeedPrefix = "feed_", - StringRef FetchPrefix = "fetch_") + TFModelRunner(llvm::StringRef DecisionName, llvm::LLVMContext &Ctx, + llvm::StringRef FeedPrefix = "feed_", + llvm::StringRef FetchPrefix = "fetch_") : MLModelRunner(MLModelRunner::Kind::TFAOT, BaseSerDes::Kind::Tensorflow, &Ctx), CompiledModel(std::make_unique()) { @@ -44,8 +44,8 @@ template class TFModelRunner final : public MLModelRunner { DecisionName.str()); assert(ResultIndex >= 0 && "Cannot find DecisionName in inlining model"); } - TFModelRunner(StringRef DecisionName, StringRef FeedPrefix = "feed_", - StringRef FetchPrefix = "fetch_") + TFModelRunner(llvm::StringRef DecisionName, llvm::StringRef FeedPrefix = "feed_", + llvm::StringRef FetchPrefix = "fetch_") : MLModelRunner(MLModelRunner::Kind::TFAOT, BaseSerDes::Kind::Tensorflow), CompiledModel(std::make_unique()) { diff --git a/include/MLModelRunner/gRPCModelRunner.h b/include/MLModelRunner/gRPCModelRunner.h index 9dd8275..33d4e4b 100644 --- a/include/MLModelRunner/gRPCModelRunner.h +++ b/include/MLModelRunner/gRPCModelRunner.h @@ -24,7 +24,7 @@ template class gRPCModelRunner : public MLModelRunner { public: gRPCModelRunner(std::string server_address, grpc::Service *s, - LLVMContext *Ctx = nullptr) // For server mode + llvm::LLVMContext *Ctx = nullptr) // For server mode : MLModelRunner(MLModelRunner::Kind::gRPC, BaseSerDes::Kind::Protobuf, Ctx), server_address(server_address), request(nullptr), response(nullptr), @@ -34,7 +34,7 @@ class gRPCModelRunner : public MLModelRunner { gRPCModelRunner(std::string server_address, Request *request, Response *response, - LLVMContext *Ctx = nullptr) // For client mode + llvm::LLVMContext *Ctx = nullptr) // For client mode : MLModelRunner(MLModelRunner::Kind::gRPC, BaseSerDes::Kind::Protobuf, Ctx), server_address(server_address), request(request), response(response), @@ -44,7 +44,7 @@ class gRPCModelRunner : public MLModelRunner { // void *getStub() { return stub_; } void requestExit() override { - errs() << "Exit from grpc\n"; + llvm::errs() << "Exit from grpc\n"; exit_requested->set_value(); } @@ -102,7 +102,7 @@ class gRPCModelRunner : public MLModelRunner { Response *getResponse() { return (Response *)SerDes->getResponse(); } void printMessage(const google::protobuf::Message *message) { - errs() << "In gRPCModelRunner printMessage...\n"; + llvm::errs() << "In gRPCModelRunner printMessage...\n"; std::string s; if (google::protobuf::TextFormat::PrintToString(*message, &s)) { std::cout << "Your message: " << s << std::endl; diff --git a/include/SerDes/TensorSpec.h b/include/SerDes/TensorSpec.h index 40edecc..a306493 100644 --- a/include/SerDes/TensorSpec.h +++ b/include/SerDes/TensorSpec.h @@ -18,7 +18,6 @@ #include #include -using namespace llvm; namespace MLBridge { /// TensorSpec encapsulates the specification of a tensor: its dimensions, or /// "shape" (row-major), its type (see TensorSpec::getDataType specializations @@ -91,7 +90,7 @@ class TensorSpec final { : TensorSpec(NewName, Other.Port, Other.Type, Other.ElementSize, Other.Shape) {} - void toJSON(json::OStream &OS) const; + void toJSON(llvm::json::OStream &OS) const; private: TensorSpec(const std::string &Name, int Port, TensorType Type, @@ -117,8 +116,8 @@ std::string tensorValueToString(const char *Buffer, const TensorSpec &Spec); /// "shape": } /// For the "type" field, see the C++ primitive types used in /// TFUTILS_SUPPORTED_TYPES. -std::optional getTensorSpecFromJSON(LLVMContext &Ctx, - const json::Value &Value); +std::optional getTensorSpecFromJSON(llvm::LLVMContext &Ctx, + const llvm::json::Value &Value); #define TFUTILS_GETDATATYPE_DEF(T, Name) \ template <> TensorType TensorSpec::getDataType(); diff --git a/include/SerDes/bitstreamSerDes.h b/include/SerDes/bitstreamSerDes.h index c73068d..de1a0f7 100644 --- a/include/SerDes/bitstreamSerDes.h +++ b/include/SerDes/bitstreamSerDes.h @@ -16,15 +16,14 @@ #include #include -using namespace std; namespace MLBridge { class BitstreamSerDes : public BaseSerDes { public: BitstreamSerDes() : BaseSerDes(Kind::Bitstream) { Buffer = ""; - tensorSpecs = vector(); - rawData = vector(); + tensorSpecs = std::vector(); + rawData = std::vector(); #define TEMPORARY_STORAGE_INIT(TYPE) \ features##TYPE = {}; \ @@ -42,8 +41,8 @@ class BitstreamSerDes : public BaseSerDes { void cleanDataStructures() override { Buffer = ""; - tensorSpecs = vector(); - rawData = vector(); + tensorSpecs = std::vector(); + rawData = std::vector(); #define TEMPORARY_STORAGE_CLEAN(TYPE) \ for (auto &it : features##TYPE) { \ @@ -62,9 +61,9 @@ class BitstreamSerDes : public BaseSerDes { private: void *deserializeUntyped(void *) override; - vector tensorSpecs; - vector rawData; - string Buffer; + std::vector tensorSpecs; + std::vector rawData; + std::string Buffer; #define TEMPORARY_STORAGE_DEF(TYPE) \ std::map features##TYPE; \ diff --git a/include/SerDes/jsonSerDes.h b/include/SerDes/jsonSerDes.h index 9f3f966..b1354e9 100644 --- a/include/SerDes/jsonSerDes.h +++ b/include/SerDes/jsonSerDes.h @@ -13,8 +13,6 @@ #include #include -using namespace std; -using namespace llvm; namespace MLBridge { class JsonSerDes : public BaseSerDes { public: @@ -30,21 +28,21 @@ class JsonSerDes : public BaseSerDes { } \ void setFeature(const std::string &name, const std::vector &value) \ override { \ - J[name] = json::Array(value); \ + J[name] = llvm::json::Array(value); \ } SUPPORTED_TYPES(SET_FEATURE) #undef SET_FEATURE void *getSerializedData() override; - void cleanDataStructures() override { J = json::Object(); } + void cleanDataStructures() override { J = llvm::json::Object(); } private: void *deserializeUntyped(void *data) override; - void *desJson(json::Value *V); + void *desJson(llvm::json::Value *V); private: - json::Object J; + llvm::json::Object J; }; } // namespace MLBridge diff --git a/test/inference/HelloMLBridge_Env.h b/test/inference/HelloMLBridge_Env.h index fb0cdd1..8ea88ad 100644 --- a/test/inference/HelloMLBridge_Env.h +++ b/test/inference/HelloMLBridge_Env.h @@ -3,7 +3,6 @@ #include "llvm/IR/Module.h" #include "llvm/Support/raw_ostream.h" -using namespace llvm; using namespace MLBridge; class HelloMLBridgeEnv : public Environment { Observation CurrObs; @@ -22,7 +21,7 @@ Observation& HelloMLBridgeEnv::step(Action Action) { CurrObs.clear(); std::copy(FeatureVector.begin(), FeatureVector.end(), std::back_inserter(CurrObs)); - outs() << "Action: " << Action << "\n"; + llvm::outs() << "Action: " << Action << "\n"; setDone(); return CurrObs; }