Skip to content

Commit

Permalink
fix daily errors
Browse files Browse the repository at this point in the history
fix daily errors as below:
```
onnx_test_bm1686 FAILED
['Gemm', 'MatMul', 'TorchLogSoftmax', 'TorchActivation', 'PoolAfterRelu']
mobilebert_tf_bm1684x FAILED
sd_decoder_pt_bm1684x FAILED
arcface_res50_bm1684x FAILED
feature_extract_cf_bm1684x FAILED
mnist-12_bm1686 FAILED
bert-tiny_from_pt_bm1686 FAILED
mobilebert_tf_bm1686 FAILED
sd_decoder_pt_bm1686 FAILED
feature_extract_cf_bm1686 FAILED
arcface_res50_bm1686 FAILED
mnist-12_cv186x FAILED
bert-tiny_from_pt_cv186x FAILED
mobilebert_tf_cv186x FAILED
sd_decoder_pt_cv186x FAILED
arcface_res50_cv186x FAILED
feature_extract_cf_cv186x FAILED
```

Change-Id: Ie4bd84aa300bb690f14d7eef324206276a65b2ce
  • Loading branch information
HarmonyHu committed May 28, 2023
1 parent 5749653 commit 27f2dd0
Show file tree
Hide file tree
Showing 7 changed files with 14 additions and 39 deletions.
1 change: 0 additions & 1 deletion include/tpu_mlir/Dialect/Top/IR/TopOps.td
Original file line number Diff line number Diff line change
Expand Up @@ -729,7 +729,6 @@ def Top_AttentionOp: Top_Op<"Attention"> {
);

let results = (outs AnyTensor:$output);
let hasCanonicalizer = 1;
}

def Top_PadOp:Top_Op<"Pad"> {
Expand Down
32 changes: 0 additions & 32 deletions lib/Dialect/Top/Canonicalize/Attention.cpp

This file was deleted.

3 changes: 2 additions & 1 deletion lib/Dialect/Top/Transforms/ChipOptimize/OptimizeBM1684X.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,9 @@ class ConvertMatMul2Attention : public OpRewritePattern<top::MatMulOp> {
using OpRewritePattern::OpRewritePattern;
LogicalResult matchAndRewrite(top::MatMulOp op,
PatternRewriter &rewriter) const override {
// sd_decoder_pt error in bm1684x/bm1686
return failure();
auto filter = op.getRight();
// return failure();
if (module::isWeight(filter) == false) {
return failure();
}
Expand Down
12 changes: 8 additions & 4 deletions lib/Dialect/Tpu/Interfaces/BM1684X/Load.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ void tpu::LoadOp::codegen_local_bm1684x(int64_t n_step, int64_t c_step,
gdma_format = BM168x::getGdmaFormat(data_type);
auto fmt_bytes = BM168x::getFmtBytes(data_type);
auto g_addr = module::getAddress(getInput());
int64_t dhw = D * H * W;
int64_t eu_num = BM168x::eu_num(fmt_bytes);
// int64_t dhw = D * H * W;
// int64_t eu_num = BM168x::eu_num(fmt_bytes);
int64_t use_3ic = getUse_3icOptimize();
if (use_3ic < 4 && use_3ic > 0) {
auto g_stride = BM168x::getGlobalStride(N, C, H, W);
Expand Down Expand Up @@ -103,7 +103,9 @@ void tpu::LoadOp::codegen_local_bm1684x(int64_t n_step, int64_t c_step,
s_stride.N, s_stride.H, gdma_format, true, GDMA_VALUE_DIR_S2L,
pid_node);
}
} else if (dhw <= eu_num && (C & 0xff) == 0 && data_type == DTYPE_INT8 &&
}
#if 0
else if (dhw <= eu_num && (C & 0xff) == 0 && data_type == DTYPE_INT8 &&
real_dslice == D && real_hslice == H && real_wslice == W &&
real_cslice == C && N == 1) {
// optimize coeff load shape
Expand All @@ -125,7 +127,9 @@ void tpu::LoadOp::codegen_local_bm1684x(int64_t n_step, int64_t c_step,
N, C, H, W, nstride, cstride, hstride, wstride,
dst_nstride, dst_cstride, dst_hstride, dst_wstride,
gdma_format, GDMA_VALUE_DIR_S2L, 0, pid_node);
} else {
}
#endif
else {
int64_t c_num_local = ceiling_func(real_cslice, Arch::NPU_NUM);
int64_t c_stride = gi.eu_align ? align_up(real_hslice * real_wslice,
Arch::eu_num(fmt_bytes))
Expand Down
2 changes: 1 addition & 1 deletion python/test/test_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def __init__(self,
"Addmm": (self.test_Addmm, Y, Y, Y),
"Arange": (self.test_Arange, Y, Y, Y),
"Attention": (self.test_Attention, Y, Y, Y),
"AttentionNew": (self.test_AttentionNew, Y, N, N),
"AttentionNew": (self.test_AttentionNew, N, N, N),
"AvgPool1d": (self.test_AvgPool1d, Y, Y, Y),
"AvgPool2d": (self.test_AvgPool2d, Y, Y, Y),
"AvgPool3d": (self.test_AvgPool3d, Y, Y, Y),
Expand Down
3 changes: 3 additions & 0 deletions python/transform/TFLiteConverter.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,8 @@ def __init__(self,
else:
self.output_names = output_names
self.input_shapes = [x.shape for x in self.graph.inputs]
for x in self.graph.inputs:
self.addShape(x.name, x.shape)
self.output_shapes = []
self.outputs = []
for op in self.graph.operators:
Expand All @@ -248,6 +250,7 @@ def __init__(self,
self.outputs.append(out)
self.__nhwc2nchw(out)
self.output_shapes.append(out.shape)
self.addShape(out.name, out.shape)

self.mlir = MLIRImporter(
self.input_shapes,
Expand Down
Binary file modified third_party/nntoolchain/lib/libbackend_1686.so
Binary file not shown.

0 comments on commit 27f2dd0

Please sign in to comment.