Skip to content

Commit

Permalink
Fix codeQL warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
Karim Vadsariya committed Jan 24, 2025
1 parent 1a4f582 commit 9857e34
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,6 @@ struct ProviderHost {
virtual std::string demangle(const char* name) = 0;
virtual std::string demangle(const std::string& name) = 0;

// #ifdef USE_CUDA
virtual std::unique_ptr<IAllocator> CreateCUDAAllocator(int16_t device_id, const char* name) = 0;
virtual std::unique_ptr<IAllocator> CreateCUDAPinnedAllocator(const char* name) = 0;
virtual std::unique_ptr<IDataTransfer> CreateGPUDataTransfer() = 0;
Expand All @@ -190,7 +189,6 @@ struct ProviderHost {

virtual Status CudaCall_false(int retCode, const char* exprString, const char* libName, int successCode, const char* msg, const char* file, const int line) = 0;
virtual void CudaCall_true(int retCode, const char* exprString, const char* libName, int successCode, const char* msg, const char* file, const int line) = 0;
// #endif

#ifdef USE_MIGRAPHX
virtual std::unique_ptr<IAllocator> CreateMIGraphXAllocator(int16_t device_id, const char* name) = 0;
Expand Down Expand Up @@ -1255,9 +1253,7 @@ struct ProviderHost {
virtual training::DistributedRunContext& GetDistributedRunContextInstance() = 0;
#endif

// #if defined(USE_CUDA) || defined(USE_ROCM)
virtual PhiloxGenerator& PhiloxGenerator__Default() = 0;
// #endif

#ifdef ENABLE_TRAINING_TORCH_INTEROP
virtual void contrib__PythonOpBase__Init(contrib::PythonOpBase* p, const OpKernelInfo& info) = 0;
Expand Down
2 changes: 0 additions & 2 deletions onnxruntime/core/session/provider_bridge_ort.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1559,9 +1559,7 @@ struct ProviderHostImpl : ProviderHost {
training::DistributedRunContext& GetDistributedRunContextInstance() override { return training::DistributedRunContext::GetInstance(); }
#endif

// #if defined(USE_CUDA) || defined(USE_ROCM)
PhiloxGenerator& PhiloxGenerator__Default() override { return PhiloxGenerator::Default(); }
// #endif

#ifdef ENABLE_TRAINING_TORCH_INTEROP
void contrib__PythonOpBase__Init(contrib::PythonOpBase* p, const OpKernelInfo& info) override { p->PythonOpBase::Init(info); }
Expand Down

0 comments on commit 9857e34

Please sign in to comment.