Skip to content

Commit 6e42f95

Browse files
committed
Patch int8PrepareBias to work with both calls from PrepareBiasForBNodeOp and PrepareFakeBiasForBNodeOp
Also this effectively is just a patch for a nullptr dereference error that did *not* cause a crash in the wasm interpreter? Worrying.
1 parent 53c4f7e commit 6e42f95

1 file changed

Lines changed: 13 additions & 5 deletions

File tree

src/tensors/cpu/wasm_intgemm_fallback.cpp

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -64,11 +64,19 @@ extern "C" void int8PrepareBiasFallback(const int8_t* input_B_prepared,
6464
const float* input_bias,
6565
float* output) {
6666
float unquant_factor = (-1) * ((127.0f / scale_A) * (127.0f / scale_B)) / (127.0f);
67-
intgemm::Int8Shift::PrepareBias(
68-
input_B_prepared,
69-
width,
70-
cols_B,
71-
intgemm::callbacks::UnquantizeAndAddBiasAndWrite(unquant_factor, input_bias, output));
67+
if (input_bias == nullptr) {
68+
intgemm::Int8Shift::PrepareBias(
69+
input_B_prepared,
70+
width,
71+
cols_B,
72+
intgemm::callbacks::UnquantizeAndWrite(unquant_factor, output));
73+
} else {
74+
intgemm::Int8Shift::PrepareBias(
75+
input_B_prepared,
76+
width,
77+
cols_B,
78+
intgemm::callbacks::UnquantizeAndAddBiasAndWrite(unquant_factor, input_bias, output));
79+
}
7280
}
7381

7482
extern "C" void int8MultiplyAndAddBiasFallback(const int8_t* input_A_prepared,

0 commit comments

Comments
 (0)