Skip to content

Commit 59ec23c

Browse files
unamedkrclaude
andcommitted
cleanup: remove stale debug prints (q2 presence, L-spot XBN dump)
Removed two debug prints that always fired when TQ_DEBUG_PREFILL was set — they were useful during the drift investigation but are now redundant (the remaining dumps behind TQ_DEBUG_PREFILL gate cover the same ground more precisely). No behavior change. 11/11 STRICT tests pass. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent f4934e9 commit 59ec23c

1 file changed

Lines changed: 0 additions & 17 deletions

File tree

src/engine/tq_transformer.c

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -2492,11 +2492,6 @@ float* tq_forward(tq_model_t* model, tq_state_t* s, int token, int pos) {
24922492

24932493
/* Pre-attention/DeltaNet RMSNorm */
24942494
tq_rmsnorm(s->xb, s->x, layer->attn_norm, dim, c->rms_norm_eps);
2495-
if ((l == 0 || l == 1 || l == 4 || l == 8 || l == 15) && pos <= 1 && getenv("TQ_DEBUG_PREFILL")) {
2496-
fprintf(stderr, "[fwd] L%d pos=%d xb [0:8] = ", l, pos);
2497-
for (int i = 0; i < 8; i++) fprintf(stderr, "%.4f ", s->xb[i]);
2498-
fprintf(stderr, "\n");
2499-
}
25002495

25012496
/* Begin layer-level GPU batch scope: all GGUF matmuls in this layer
25022497
* (QKV, wo, gate, up, down) encode into shared command buffers.
@@ -3149,24 +3144,12 @@ int tq_forward_batch(tq_model_t* model, tq_state_t* s,
31493144
free(OB); free(GB); free(UB);
31503145
return -1;
31513146
}
3152-
if (l == 0 && dbg) {
3153-
fprintf(stderr, "[batch] layer 0 q2 presence: wq=%p wk=%p wv=%p wo=%p g=%p u=%p d=%p\n",
3154-
(void*)layer->wq_q2, (void*)layer->wk_q2, (void*)layer->wv_q2,
3155-
(void*)layer->wo_q2, (void*)layer->w_gate_q2, (void*)layer->w_up_q2, (void*)layer->w_down_q2);
3156-
}
31573147

31583148
/* 1. attn RMSNorm (per-row) */
31593149
for (int n = 0; n < N; n++) {
31603150
tq_rmsnorm(XBN + (size_t)n * dim, Xres + (size_t)n * dim,
31613151
layer->attn_norm, dim, c->rms_norm_eps);
31623152
}
3163-
if ((l == 0 || l == 1 || l == 4 || l == 8 || l == 15) && dbg) {
3164-
for (int tn = 0; tn < N && tn < 2; tn++) {
3165-
fprintf(stderr, "[batch] L%d XBN tok%d [0:8] = ", l, tn);
3166-
for (int i = 0; i < 8; i++) fprintf(stderr, "%.4f ", XBN[(size_t)tn * dim + i]);
3167-
fprintf(stderr, "\n");
3168-
}
3169-
}
31703153

31713154
/* 2. Q, K, V batched matmul (Q4 main weights) */
31723155
tq_batched_matmul_q4(QB, layer->wq_q4, layer->wq_q4s, XBN, q_dim, dim, N, NULL);

0 commit comments

Comments
 (0)