Skip to content

Commit 2556a2a

Browse files
committed
allow legacy pi chat for aysnc when no llm key
1 parent 66db255 commit 2556a2a

2 files changed

Lines changed: 119 additions & 58 deletions

File tree

bases/rsptx/web2py_server/applications/runestone/controllers/peer.py

Lines changed: 24 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -778,12 +778,29 @@ def peer_async():
778778

779779
@auth.requires_login()
780780
def get_async_explainer():
781-
return json.dumps({
782-
"mess": "",
783-
"user": "",
784-
"answer": "",
785-
"responses": {}
786-
})
781+
course_name = request.vars.course
782+
div_id = request.vars.div_id
783+
784+
messages = db(
785+
(db.useinfo.event == "sendmessage")
786+
& (db.useinfo.div_id == div_id)
787+
& (db.useinfo.course_id == course_name)
788+
).select(orderby=db.useinfo.id)
789+
790+
if len(messages) == 0:
791+
mess = "Sorry there are no explanations yet."
792+
else:
793+
parts = []
794+
for row in messages:
795+
try:
796+
msg = row.act.split(":", 2)[2]
797+
except Exception:
798+
msg = row.act
799+
parts.append(f"<li><strong>{row.sid}</strong> said: {msg}</li>")
800+
mess = "<ul>" + "".join(parts) + "</ul>"
801+
802+
logger.debug(f"Get message for {div_id}")
803+
return json.dumps({"mess": mess, "user": "", "answer": "", "responses": {}})
787804

788805

789806
def _get_mcq_context(div_id):
@@ -886,6 +903,7 @@ def get_async_llm_reflection():
886903
"focus on reasoning not teaching.\n\n"
887904
)
888905

906+
889907
if question:
890908
sys_content += f"question:\n{question}\n\n"
891909

bases/rsptx/web2py_server/applications/runestone/views/peer/peer_async.html

Lines changed: 95 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,7 @@
44
</script>
55
<script>
66
window.DISABLE_LLMTEST = true;
7-
window.PI_LLM_MODE = false;
8-
</script>
9-
<script>
10-
window.PI_LLM_MODE = false;
7+
window.PI_LLM_MODE = {{='true' if llm_enabled else 'false'}};
118
window.DISABLE_LLMTEST = true;
129

1310
window.eBookConfig = window.eBookConfig || {};
@@ -42,6 +39,8 @@
4239
eBookConfig.peer = true;
4340
eBookConfig.peerMode = "async";
4441
</script>
42+
<!-- <script>window.PI_LLM_MODE = false;</script> -->
43+
4544

4645
<div class="hidden-content" style="display: none">
4746
\( {{=XML(latex_macros)}} \)
@@ -298,65 +297,109 @@ <h3>Congratulations, you have completed this assignment!</h3>
298297
discussion.style.display = "block";
299298
chat.innerHTML = "<p><em>Thinking about your explanation…</em></p>";
300299

301-
const mcq = document.querySelector('.mchoice');
302-
303-
const questionText = mcq ? mcq.innerText : "";
300+
if (window.PI_LLM_MODE !== true) {
301+
const resp = await fetch("/runestone/peer/get_async_explainer", {
302+
method: "POST",
303+
headers: { "Content-Type": "application/json" },
304+
body: JSON.stringify({
305+
div_id: currentQuestion,
306+
course: eBookConfig.course
307+
})
308+
});
304309

305-
window._llmMessages = [
306-
{
307-
role: "user",
308-
content: `i chose answer ${selected}. my explanation was:\n\n${reflection}`
309-
}
310-
];
310+
if (!resp.ok) {
311+
chat.innerHTML = "<p><em>Error talking to peer.</em></p>";
312+
return;
313+
}
311314

315+
const spec = await resp.json();
316+
let res = "";
317+
for (let response in spec.responses) {
318+
res += `User ${response} answered ${answerToString(
319+
spec.responses[response]
320+
)} <br />`;
321+
}
322+
chat.innerHTML = "";
323+
chat.innerHTML += `<p><strong>Other students said:</strong></p>`;
324+
if (res) {
325+
chat.innerHTML += `<p>${res}</p>`;
326+
}
327+
if (spec.mess) {
328+
chat.innerHTML += spec.mess;
329+
}
330+
const replyInput = document.getElementById("llmReplyInput");
331+
const replyBtn = document.getElementById("llmReplyBtn");
332+
if (replyInput) replyInput.style.display = "none";
333+
if (replyBtn) replyBtn.style.display = "none";
334+
nextStep.textContent =
335+
"Please answer the question again, even if you do not wish to change your answer.";
336+
const readyBtn = document.getElementById("readyVote2Btn");
337+
if (readyBtn) {
338+
readyBtn.style.display = "inline-block";
339+
readyBtn.disabled = false;
340+
readyBtn.title = "";
341+
}
342+
studentSubmittedVote2 = false;
343+
return;
344+
} else {
345+
const mcq = document.querySelector('.mchoice');
312346

313-
const resp = await fetch("/runestone/peer/get_async_llm_reflection", {
314-
method: "POST",
315-
headers: { "Content-Type": "application/json" },
316-
body: JSON.stringify({
317-
div_id: currentQuestion,
318-
selected_answer: selected,
319-
messages: window._llmMessages
320-
})
321-
});
347+
const questionText = mcq ? mcq.innerText : "";
322348

323-
const data = await resp.json();
324-
chat.innerHTML = "";
349+
window._llmMessages = [
350+
{
351+
role: "user",
352+
content: `i chose answer ${selected}. my explanation was:\n\n${reflection}`
353+
}
354+
];
325355

326-
if (!data.ok) {
327-
chat.innerHTML = "<p><em>Error talking to peer.</em></p>";
328-
return;
329-
}
330356

331-
appendMsg("assistant", data.reply);
332-
if (typeof logPeerEvent === "function") {
333-
window._llmTurnIndex = (window._llmTurnIndex || 0) + 1;
334-
logPeerEvent({
335-
sid: eBookConfig.username,
336-
div_id: currentQuestion,
337-
event: "pi_llm_turn",
338-
act: JSON.stringify({
339-
pi_attempt_id: getPiAttemptId(),
340-
turn_index: window._llmTurnIndex,
341-
role: "assistant",
342-
content: data.reply,
343-
}),
344-
course_name: eBookConfig.course,
357+
const resp = await fetch("/runestone/peer/get_async_llm_reflection", {
358+
method: "POST",
359+
headers: { "Content-Type": "application/json" },
360+
body: JSON.stringify({
361+
div_id: currentQuestion,
362+
selected_answer: selected,
363+
messages: window._llmMessages
364+
})
345365
});
346-
}
347366

348-
window._llmMessages.push({
349-
role: "assistant",
350-
content: data.reply
351-
});
352-
nextStep.textContent =
353-
"Discuss this reasoning, then click 'Vote again' to vote again.";
367+
const data = await resp.json();
368+
chat.innerHTML = "";
354369

355-
studentSubmittedVote2 = false;
370+
if (!data.ok) {
371+
chat.innerHTML = "<p><em>Error talking to peer.</em></p>";
372+
return;
373+
}
356374

357-
375+
appendMsg("assistant", data.reply);
376+
if (typeof logPeerEvent === "function") {
377+
window._llmTurnIndex = (window._llmTurnIndex || 0) + 1;
378+
logPeerEvent({
379+
sid: eBookConfig.username,
380+
div_id: currentQuestion,
381+
event: "pi_llm_turn",
382+
act: JSON.stringify({
383+
pi_attempt_id: getPiAttemptId(),
384+
turn_index: window._llmTurnIndex,
385+
role: "assistant",
386+
content: data.reply,
387+
}),
388+
course_name: eBookConfig.course,
389+
});
390+
}
391+
392+
window._llmMessages.push({
393+
role: "assistant",
394+
content: data.reply
395+
});
396+
nextStep.textContent =
397+
"Discuss this reasoning, then click 'Vote again' to vote again.";
358398

359-
enableChat(reflection, selected);
399+
studentSubmittedVote2 = false;
400+
401+
enableChat(reflection, selected);
402+
}
360403

361404
});
362405
})();

0 commit comments

Comments
 (0)