Skip to content

Commit

Permalink
removed using namespace llvm from header files
Browse files Browse the repository at this point in the history
  • Loading branch information
pranav-159 committed Jan 14, 2024
1 parent 3d59126 commit 19b8509
Show file tree
Hide file tree
Showing 15 changed files with 47 additions and 48 deletions.
1 change: 1 addition & 0 deletions MLModelRunner/ONNXModelRunner/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,4 @@ endif(LLVM_MLBRIDGE)

target_link_libraries(ONNXModelRunnerLib PUBLIC onnxruntime)
target_include_directories(ONNXModelRunnerLib PUBLIC "${ONNXRUNTIME_ROOTDIR}/include" "${ONNXRUNTIME_ROOTDIR}/include/onnxruntime/core/session" ${TENSORFLOW_AOT_PATH}/include)
target_link_directories(ONNXModelRunnerLib PUBLIC ${ONNXRUNTIME_ROOTDIR}/lib)
1 change: 1 addition & 0 deletions MLModelRunner/ONNXModelRunner/ONNXModelRunner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include "MLModelRunner/ONNXModelRunner/ONNXModelRunner.h"
#include "SerDes/baseSerDes.h"

using namespace llvm;
namespace MLBridge {

ONNXModelRunner::ONNXModelRunner(Environment *env,
Expand Down
2 changes: 2 additions & 0 deletions MLModelRunner/PipeModelRunner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@

#define DEBUG_TYPE "pipe-model-runner"

using namespace llvm;

namespace MLBridge {
PipeModelRunner::PipeModelRunner(StringRef OutboundName, StringRef InboundName,
BaseSerDes::Kind SerDesType, LLVMContext *Ctx)
Expand Down
10 changes: 4 additions & 6 deletions SerDes/bitstreamSerDes.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@

#define DEBUG_TYPE "bitstream-serdes"

using namespace llvm;
using namespace std;

namespace MLBridge {
void BitstreamSerDes::setFeature(const std::string &name, const int &value) {
Expand Down Expand Up @@ -113,9 +111,9 @@ void BitstreamSerDes::setFeature(const std::string &name,
}

void *BitstreamSerDes::getSerializedData() {
std::unique_ptr<raw_ostream> OS =
std::make_unique<raw_string_ostream>(Buffer);
json::OStream J(*OS);
std::unique_ptr<llvm::raw_ostream> OS =
std::make_unique<llvm::raw_string_ostream>(Buffer);
llvm::json::OStream J(*OS);
J.object([&]() {
J.attributeArray("features", [&]() {
for (size_t I = 0; I < tensorSpecs.size(); ++I) {
Expand All @@ -125,7 +123,7 @@ void *BitstreamSerDes::getSerializedData() {
});
J.flush();
OS->write("\n", 1);
LLVM_DEBUG(errs() << "rawData.size(): " << rawData.size() << "\n");
LLVM_DEBUG(llvm::errs() << "rawData.size(): " << rawData.size() << "\n");
for (size_t I = 0; I < rawData.size(); ++I) {
OS->write(reinterpret_cast<const char *>(rawData[I]),
tensorSpecs[I].getTotalTensorBufferSize());
Expand Down
2 changes: 2 additions & 0 deletions SerDes/jsonSerDes.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@

#define DEBUG_TYPE "json-serdes"

using namespace llvm;

namespace MLBridge {
void *JsonSerDes::getSerializedData() {
auto tempJO = J;
Expand Down
12 changes: 6 additions & 6 deletions SerDes/tensorflowSerDes.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,47 +21,47 @@ SUPPORTED_TYPES(SET_FEATURE)
#undef SET_FEATURE

void TensorflowSerDes::setFeature(const std::string &Name,
const vector<int64_t> &Value) {
const std::vector<int64_t> &Value) {
std::string prefix = "feed_";
const int Index = CompiledModel->LookupArgIndex(prefix + Name);
std::copy(Value.begin(), Value.end(),
static_cast<int64_t *>(CompiledModel->arg_data(Index)));
}

void TensorflowSerDes::setFeature(const std::string &Name,
const vector<float> &Value) {
const std::vector<float> &Value) {
std::string prefix = "feed_";
const int Index = CompiledModel->LookupArgIndex(prefix + Name);
std::copy(Value.begin(), Value.end(),
static_cast<float *>(CompiledModel->arg_data(Index)));
}

void TensorflowSerDes::setFeature(const std::string &Name,
const vector<double> &Value) {
const std::vector<double> &Value) {
std::string prefix = "feed_";
const int Index = CompiledModel->LookupArgIndex(prefix + Name);
std::copy(Value.begin(), Value.end(),
static_cast<double *>(CompiledModel->arg_data(Index)));
}

void TensorflowSerDes::setFeature(const std::string &Name,
const vector<std::string> &Value) {
const std::vector<std::string> &Value) {
std::string prefix = "feed_";
const int Index = CompiledModel->LookupArgIndex(prefix + Name);
std::copy(Value.begin(), Value.end(),
static_cast<std::string *>(CompiledModel->arg_data(Index)));
}

void TensorflowSerDes::setFeature(const std::string &Name,
const vector<bool> &Value) {
const std::vector<bool> &Value) {
std::string prefix = "feed_";
const int Index = CompiledModel->LookupArgIndex(prefix + Name);
std::copy(Value.begin(), Value.end(),
static_cast<bool *>(CompiledModel->arg_data(Index)));
}

void TensorflowSerDes::setFeature(const std::string &Name,
const vector<int> &Value) {
const std::vector<int> &Value) {
std::string prefix = "feed_";
const int Index = CompiledModel->LookupArgIndex(prefix + Name);
std::copy(Value.begin(), Value.end(),
Expand Down
6 changes: 3 additions & 3 deletions include/MLModelRunner/MLModelRunner.h
Original file line number Diff line number Diff line change
Expand Up @@ -82,19 +82,19 @@ class MLModelRunner {

protected:
MLModelRunner(Kind Type, BaseSerDes::Kind SerDesType,
LLVMContext *Ctx = nullptr)
llvm::LLVMContext *Ctx = nullptr)
: Ctx(Ctx), Type(Type), SerDesType(SerDesType) {
assert(Type != Kind::Unknown);
initSerDes();
}
MLModelRunner(Kind Type, LLVMContext *Ctx = nullptr)
MLModelRunner(Kind Type, llvm::LLVMContext *Ctx = nullptr)
: Ctx(Ctx), Type(Type), SerDesType(BaseSerDes::Kind::Unknown) {
SerDes = nullptr;
};

virtual void *evaluateUntyped() = 0;

LLVMContext *Ctx;
llvm::LLVMContext *Ctx;
const Kind Type;
const BaseSerDes::Kind SerDesType;

Expand Down
2 changes: 1 addition & 1 deletion include/MLModelRunner/ONNXModelRunner/ONNXModelRunner.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class ONNXModelRunner : public MLModelRunner {
public:
ONNXModelRunner(MLBridge::Environment *env,
std::map<std::string, Agent *> agents,
LLVMContext *Ctx = nullptr);
llvm::LLVMContext *Ctx = nullptr);
void setEnvironment(MLBridge::Environment *_env) { env = _env; }
MLBridge::Environment *getEnvironment() { return env; }
void addAgent(Agent *agent, std::string name);
Expand Down
6 changes: 3 additions & 3 deletions include/MLModelRunner/PipeModelRunner.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ namespace MLBridge {
/// (which will hang until there's a writer on the other end).
class PipeModelRunner : public MLModelRunner {
public:
PipeModelRunner(StringRef OutboundName, StringRef InboundName,
BaseSerDes::Kind Kind, LLVMContext *Ctx = nullptr);
PipeModelRunner(llvm::StringRef OutboundName, llvm::StringRef InboundName,
BaseSerDes::Kind Kind, llvm::LLVMContext *Ctx = nullptr);

static bool classof(const MLModelRunner *R) {
return R->getKind() == MLModelRunner::Kind::Pipe;
Expand All @@ -64,7 +64,7 @@ class PipeModelRunner : public MLModelRunner {
int Inbound = -1;
std::error_code OutEC;
std::error_code InEC;
std::unique_ptr<raw_fd_ostream> OutStream;
std::unique_ptr<llvm::raw_fd_ostream> OutStream;
};
} // namespace MLBridge
#endif // PipeModelRunner_H
10 changes: 5 additions & 5 deletions include/MLModelRunner/TFModelRunner.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ template <class TGen> class TFModelRunner final : public MLModelRunner {
public:
/// FeatureNames' type should be an indexed collection of std::string, like
/// std::array or std::vector, that has a size() method.
TFModelRunner(StringRef DecisionName, LLVMContext &Ctx,
StringRef FeedPrefix = "feed_",
StringRef FetchPrefix = "fetch_")
TFModelRunner(llvm::StringRef DecisionName, llvm::LLVMContext &Ctx,
llvm::StringRef FeedPrefix = "feed_",
llvm::StringRef FetchPrefix = "fetch_")
: MLModelRunner(MLModelRunner::Kind::TFAOT, BaseSerDes::Kind::Tensorflow,
&Ctx),
CompiledModel(std::make_unique<TGen>()) {
Expand All @@ -44,8 +44,8 @@ template <class TGen> class TFModelRunner final : public MLModelRunner {
DecisionName.str());
assert(ResultIndex >= 0 && "Cannot find DecisionName in inlining model");
}
TFModelRunner(StringRef DecisionName, StringRef FeedPrefix = "feed_",
StringRef FetchPrefix = "fetch_")
TFModelRunner(llvm::StringRef DecisionName, llvm::StringRef FeedPrefix = "feed_",
llvm::StringRef FetchPrefix = "fetch_")
: MLModelRunner(MLModelRunner::Kind::TFAOT, BaseSerDes::Kind::Tensorflow),
CompiledModel(std::make_unique<TGen>()) {

Expand Down
8 changes: 4 additions & 4 deletions include/MLModelRunner/gRPCModelRunner.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ template <class Client, class Stub, class Request, class Response>
class gRPCModelRunner : public MLModelRunner {
public:
gRPCModelRunner(std::string server_address, grpc::Service *s,
LLVMContext *Ctx = nullptr) // For server mode
llvm::LLVMContext *Ctx = nullptr) // For server mode
: MLModelRunner(MLModelRunner::Kind::gRPC, BaseSerDes::Kind::Protobuf,
Ctx),
server_address(server_address), request(nullptr), response(nullptr),
Expand All @@ -34,7 +34,7 @@ class gRPCModelRunner : public MLModelRunner {

gRPCModelRunner(std::string server_address, Request *request,
Response *response,
LLVMContext *Ctx = nullptr) // For client mode
llvm::LLVMContext *Ctx = nullptr) // For client mode
: MLModelRunner(MLModelRunner::Kind::gRPC, BaseSerDes::Kind::Protobuf,
Ctx),
server_address(server_address), request(request), response(response),
Expand All @@ -44,7 +44,7 @@ class gRPCModelRunner : public MLModelRunner {

// void *getStub() { return stub_; }
void requestExit() override {
errs() << "Exit from grpc\n";
llvm::errs() << "Exit from grpc\n";
exit_requested->set_value();
}

Expand Down Expand Up @@ -102,7 +102,7 @@ class gRPCModelRunner : public MLModelRunner {
Response *getResponse() { return (Response *)SerDes->getResponse(); }

void printMessage(const google::protobuf::Message *message) {
errs() << "In gRPCModelRunner printMessage...\n";
llvm::errs() << "In gRPCModelRunner printMessage...\n";
std::string s;
if (google::protobuf::TextFormat::PrintToString(*message, &s)) {
std::cout << "Your message: " << s << std::endl;
Expand Down
7 changes: 3 additions & 4 deletions include/SerDes/TensorSpec.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
#include <optional>
#include <vector>

using namespace llvm;
namespace MLBridge {
/// TensorSpec encapsulates the specification of a tensor: its dimensions, or
/// "shape" (row-major), its type (see TensorSpec::getDataType specializations
Expand Down Expand Up @@ -91,7 +90,7 @@ class TensorSpec final {
: TensorSpec(NewName, Other.Port, Other.Type, Other.ElementSize,
Other.Shape) {}

void toJSON(json::OStream &OS) const;
void toJSON(llvm::json::OStream &OS) const;

private:
TensorSpec(const std::string &Name, int Port, TensorType Type,
Expand All @@ -117,8 +116,8 @@ std::string tensorValueToString(const char *Buffer, const TensorSpec &Spec);
/// "shape": <array of ints> }
/// For the "type" field, see the C++ primitive types used in
/// TFUTILS_SUPPORTED_TYPES.
std::optional<TensorSpec> getTensorSpecFromJSON(LLVMContext &Ctx,
const json::Value &Value);
std::optional<TensorSpec> getTensorSpecFromJSON(llvm::LLVMContext &Ctx,
const llvm::json::Value &Value);

#define TFUTILS_GETDATATYPE_DEF(T, Name) \
template <> TensorType TensorSpec::getDataType<T>();
Expand Down
15 changes: 7 additions & 8 deletions include/SerDes/bitstreamSerDes.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,14 @@
#include <utility>
#include <vector>

using namespace std;

namespace MLBridge {
class BitstreamSerDes : public BaseSerDes {
public:
BitstreamSerDes() : BaseSerDes(Kind::Bitstream) {
Buffer = "";
tensorSpecs = vector<TensorSpec>();
rawData = vector<const void *>();
tensorSpecs = std::vector<TensorSpec>();
rawData = std::vector<const void *>();

#define TEMPORARY_STORAGE_INIT(TYPE) \
features##TYPE = {}; \
Expand All @@ -42,8 +41,8 @@ class BitstreamSerDes : public BaseSerDes {

void cleanDataStructures() override {
Buffer = "";
tensorSpecs = vector<TensorSpec>();
rawData = vector<const void *>();
tensorSpecs = std::vector<TensorSpec>();
rawData = std::vector<const void *>();

#define TEMPORARY_STORAGE_CLEAN(TYPE) \
for (auto &it : features##TYPE) { \
Expand All @@ -62,9 +61,9 @@ class BitstreamSerDes : public BaseSerDes {

private:
void *deserializeUntyped(void *) override;
vector<TensorSpec> tensorSpecs;
vector<const void *> rawData;
string Buffer;
std::vector<TensorSpec> tensorSpecs;
std::vector<const void *> rawData;
std::string Buffer;

#define TEMPORARY_STORAGE_DEF(TYPE) \
std::map<std::string, TYPE *> features##TYPE; \
Expand Down
10 changes: 4 additions & 6 deletions include/SerDes/jsonSerDes.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@
#include <string>
#include <utility>

using namespace std;
using namespace llvm;
namespace MLBridge {
class JsonSerDes : public BaseSerDes {
public:
Expand All @@ -30,21 +28,21 @@ class JsonSerDes : public BaseSerDes {
} \
void setFeature(const std::string &name, const std::vector<TYPE> &value) \
override { \
J[name] = json::Array(value); \
J[name] = llvm::json::Array(value); \
}
SUPPORTED_TYPES(SET_FEATURE)
#undef SET_FEATURE

void *getSerializedData() override;

void cleanDataStructures() override { J = json::Object(); }
void cleanDataStructures() override { J = llvm::json::Object(); }

private:
void *deserializeUntyped(void *data) override;
void *desJson(json::Value *V);
void *desJson(llvm::json::Value *V);

private:
json::Object J;
llvm::json::Object J;
};
} // namespace MLBridge

Expand Down
3 changes: 1 addition & 2 deletions test/inference/HelloMLBridge_Env.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
#include "llvm/IR/Module.h"
#include "llvm/Support/raw_ostream.h"

using namespace llvm;
using namespace MLBridge;
class HelloMLBridgeEnv : public Environment {
Observation CurrObs;
Expand All @@ -22,7 +21,7 @@ Observation& HelloMLBridgeEnv::step(Action Action) {
CurrObs.clear();
std::copy(FeatureVector.begin(), FeatureVector.end(),
std::back_inserter(CurrObs));
outs() << "Action: " << Action << "\n";
llvm::outs() << "Action: " << Action << "\n";
setDone();
return CurrObs;
}
Expand Down

0 comments on commit 19b8509

Please sign in to comment.