Skip to content

Commit c73958f

Browse files
committed
improve error messaging
1 parent 2556a2a commit c73958f

2 files changed

Lines changed: 44 additions & 23 deletions

File tree

bases/rsptx/web2py_server/applications/runestone/controllers/peer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1079,7 +1079,7 @@ def _call_openai(messages):
10791079
"temperature": 0.4,
10801080
"max_tokens": 300,
10811081
}
1082-
resp = requests.post(url, headers=headers, data=json.dumps(payload), timeout=30)
1082+
resp = requests.post(url, headers=headers, json=payload, timeout=30)
10831083
logger.warning(f"PEER LLM CALL | provider=openai-course-token | model={model}")
10841084
resp.raise_for_status()
10851085
data = resp.json()

bases/rsptx/web2py_server/applications/runestone/views/peer/peer_async.html

Lines changed: 43 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -222,8 +222,6 @@ <h3>Congratulations, you have completed this assignment!</h3>
222222
};
223223
})();
224224
</script>
225-
226-
<!-- <script src="/runestone/static/js/llmtest.js"></script> -->
227225

228226
<script>
229227
(function () {
@@ -354,21 +352,27 @@ <h3>Congratulations, you have completed this assignment!</h3>
354352
];
355353

356354

357-
const resp = await fetch("/runestone/peer/get_async_llm_reflection", {
358-
method: "POST",
359-
headers: { "Content-Type": "application/json" },
360-
body: JSON.stringify({
361-
div_id: currentQuestion,
362-
selected_answer: selected,
363-
messages: window._llmMessages
364-
})
365-
});
355+
let resp;
356+
try {
357+
resp = await fetchWithRetry("/runestone/peer/get_async_llm_reflection", {
358+
method: "POST",
359+
headers: { "Content-Type": "application/json" },
360+
body: JSON.stringify({
361+
div_id: currentQuestion,
362+
selected_answer: selected,
363+
messages: window._llmMessages
364+
})
365+
});
366+
} catch (e) {
367+
chat.innerHTML = "<p><em>LLM error. Please try again.</em></p>";
368+
return;
369+
}
366370

367371
const data = await resp.json();
368372
chat.innerHTML = "";
369373

370374
if (!data.ok) {
371-
chat.innerHTML = "<p><em>Error talking to peer.</em></p>";
375+
chat.innerHTML = "<p><em>LLM error. Please try again.</em></p>";
372376
return;
373377
}
374378

@@ -461,23 +465,40 @@ <h3>Congratulations, you have completed this assignment!</h3>
461465
content: msg
462466
});
463467

464-
const resp = await fetch("/runestone/peer/get_async_llm_reflection", {
465-
method: "POST",
466-
headers: { "Content-Type": "application/json" },
467-
body: JSON.stringify({
468-
div_id: currentQuestion,
469-
selected_answer: selected,
470-
messages: window._llmMessages
471-
})
472-
});
468+
btn.disabled = true;
469+
input.disabled = true;
470+
471+
await new Promise(resolve => setTimeout(resolve, 1000));
472+
473+
let resp;
474+
try {
475+
resp = await fetchWithRetry("/runestone/peer/get_async_llm_reflection", {
476+
method: "POST",
477+
headers: { "Content-Type": "application/json" },
478+
body: JSON.stringify({
479+
div_id: currentQuestion,
480+
selected_answer: selected,
481+
messages: window._llmMessages
482+
})
483+
});
484+
} catch (e) {
485+
appendMsg("assistant", "LLM error. Please try again.");
486+
btn.disabled = false;
487+
input.disabled = false;
488+
return;
489+
}
473490

474491
const data = await resp.json();
475492
if (!data.ok) {
476-
appendMsg("assistant", "(error talking to peer)");
493+
appendMsg("assistant", "LLM error. Please try again.");
494+
btn.disabled = false;
495+
input.disabled = false;
477496
return;
478497
}
479498

480499
appendMsg("assistant", data.reply);
500+
btn.disabled = false;
501+
input.disabled = false;
481502
if (typeof logPeerEvent === "function") {
482503
window._llmTurnIndex = (window._llmTurnIndex || 0) + 1;
483504
logPeerEvent({

0 commit comments

Comments
 (0)