Skip to content

Commit

Permalink
Remove unused torch_mlu interface
Browse files Browse the repository at this point in the history
  • Loading branch information
ClowDragon authored and fuwenguang committed May 31, 2024
1 parent 6e77806 commit fcb6674
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 52 deletions.
46 changes: 0 additions & 46 deletions mmcv/ops/csrc/common/pytorch_mlu_helper.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,56 +14,10 @@

#ifdef MMCV_WITH_MLU
#include "utils/cnlog.h"
#include "utils/assert_tensor.h"
#include "framework/core/device.h"
#include "framework/core/queue.h"
#include "framework/core/notifier.h"
#include "aten/utils/cnnl_util.h"
#include "aten/utils/types.h"
#include "utils/cndumper.h"
#include "c10/core/ScalarTypeToTypeMeta.h"

#define NFU_ALIGN_SIZE 128

#define PAD_UP(x, y) (((x) / (y) + (int)((x) % (y) > 0)) * (y))

#define PAD_DOWN(x, y) (((x) / (y)) * (y))

#define CEIL_DIV(x, y) (((x) + (y)-1) / (y))

#define CEIL_ALIGN(x, y) (((x) + (y)-1) / (y) * (y))

inline int32_t getJobLimitCapability() {
CNcontext drv_ctx;
TORCH_CHECK(CN_SUCCESS == cnCtxGetCurrent(&drv_ctx), "cnCtxGetCurrent fails");
CNctxConfigParam ctx_conf_param;
TORCH_CHECK(
CN_SUCCESS == cnGetCtxConfigParam(drv_ctx, CN_CTX_CONFIG_UNION_LIMIT,
&ctx_conf_param),
"cnGetCtxConfigParam fails.");
return (int32_t)ctx_conf_param.unionLimit;
}

inline int32_t getCoreNumOfJobLimitCapability() {
switch (getJobLimitCapability()) {
default:
return torch_mlu::getDeviceAttr(cnrtAttrMcorePerCluster) *
getJobLimitCapability();
case CN_KERNEL_CLASS_BLOCK:
return 1;
case CN_KERNEL_CLASS_UNION:
return torch_mlu::getDeviceAttr(cnrtAttrMcorePerCluster);
case CN_KERNEL_CLASS_UNION2:
return torch_mlu::getDeviceAttr(cnrtAttrMcorePerCluster) * 2;
case CN_KERNEL_CLASS_UNION4:
return torch_mlu::getDeviceAttr(cnrtAttrMcorePerCluster) * 4;
case CN_KERNEL_CLASS_UNION8:
return torch_mlu::getDeviceAttr(cnrtAttrMcorePerCluster) * 8;
case CN_KERNEL_CLASS_UNION16:
return torch_mlu::getDeviceAttr(cnrtAttrMcorePerCluster) * 16;
}
}

#ifdef MMCV_WITH_MLU_KPRIVATE
#define REGISTER_MLU_IMPL(key, value) REGISTER_DEVICE_IMPL(key, PrivateUse1, value)
#else
Expand Down
6 changes: 0 additions & 6 deletions mmcv/ops/csrc/pytorch/mlu/mlu_common_helper.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,9 @@
#pragma once
#include <ATen/ATen.h>
#include <c10/core/ScalarType.h>
#include "utils/cnlog.h"
#include "utils/assert_tensor.h"
#include "framework/core/device.h"
#include "framework/core/queue.h"
#include "framework/core/notifier.h"
#include "aten/utils/cnnl_util.h"
#include "aten/operators/cnnl/cnnl_kernel.h"
#include "aten/operators/cnnl/internal/cnnl_internal.h"
#include "utils/cndumper.h"
namespace torch_mlu::cnnl::ops {
using torch_mlu::cnnl_contiguous;
using torch_mlu::get_channels_last_memory_format;
Expand Down

0 comments on commit fcb6674

Please sign in to comment.