Skip to content

Commit 1b4e998

Browse files
committed
rm redundant print
1 parent f225694 commit 1b4e998

3 files changed

Lines changed: 36 additions & 36 deletions

File tree

backend/app/main.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def extract_topic_name(s):
8989
intersection.append(ai_item)
9090

9191
# print("Selected topics:", selected_topics)
92-
print("Intersection:", intersection)
92+
# print("Intersection:", intersection)
9393
return jsonify({"success": True, "result": intersection})
9494

9595
except Exception as e:
@@ -134,7 +134,7 @@ def explain_topic():
134134
prompt = f"""Explain '{topic}' in the context of '{search_term}'.
135135
If it's an abbreviation, what it stands for in '{search_term}'
136136
Keep it concise but informative (1-2 sentences)."""
137-
print("Generated prompt:", prompt)
137+
# print("Generated prompt:", prompt)
138138

139139
try:
140140
# Create an event loop and run the async function
@@ -154,7 +154,7 @@ def explain_topic():
154154
)
155155
loop.close()
156156

157-
print("Received explanation:", explanation)
157+
# print("Received explanation:", explanation)
158158
if explanation and len(explanation) > 0:
159159
return jsonify({"success": True, "explanation": explanation[0]})
160160
else:
@@ -189,7 +189,7 @@ def home():
189189

190190

191191
if __name__ == "__main__":
192-
print("Starting Flask server...")
192+
# print("Starting Flask server...")
193193
port = 5002
194-
print(f"Server running on: http://127.0.0.1:{port}")
194+
# print(f"Server running on: http://127.0.0.1:{port}")
195195
app.run(host="127.0.0.1", port=port, debug=True)

src/components/TopicRefiner.tsx

Lines changed: 30 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ import {
1010
Minus,
1111
Check
1212
} from "lucide-react";
13-
import { API_ENDPOINTS } from '../lib/config';
1413
import { FaArrowLeft } from "react-icons/fa";
1514

1615
interface AIModel {
@@ -33,24 +32,26 @@ interface TopicRefinerProps {
3332
llmSuggestions: string[];
3433
setLlmSuggestions: (suggestions: string[]) => void;
3534
selectedTopics: string[];
35+
setSelectedTopics: (topics: string[]) => void;
3636
newTopic: string;
3737
setNewTopic: (topic: string) => void;
38-
addNewTopic: () => void;
3938
prevStep: () => void;
4039
handleSubmit: () => void;
4140
searchTerm: string;
41+
onRequestSuggestions: (model: string, prompt: string, apiKey: string, topics: string[]) => Promise<void>;
4242
}
4343

4444
export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
4545
llmSuggestions = [],
4646
setLlmSuggestions,
4747
selectedTopics = [],
48+
setSelectedTopics,
4849
newTopic = "",
4950
setNewTopic,
50-
addNewTopic,
5151
prevStep,
5252
handleSubmit,
53-
searchTerm
53+
searchTerm,
54+
onRequestSuggestions
5455
}) => {
5556
const [showPromptModal, setShowPromptModal] = useState(false);
5657
const [showWelcomeModal, setShowWelcomeModal] = useState(true);
@@ -91,29 +92,30 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
9192
}
9293

9394
try {
94-
const response = await fetch(API_ENDPOINTS.AI_PROCESS, {
95-
method: 'POST',
96-
headers: { 'Content-Type': 'application/json' },
97-
body: JSON.stringify({
98-
selectedModel,
99-
customPrompt,
100-
apiKey,
101-
searchTerm,
102-
selectedTopics
103-
})
104-
});
105-
const data = await response.json();
106-
if (data.success && data.result) {
107-
setLlmSuggestions(data.result);
108-
} else {
109-
alert('Failed to get AI suggestions.');
110-
}
95+
await onRequestSuggestions(selectedModel, customPrompt, apiKey, selectedTopics);
11196
setShowPromptModal(false);
11297
} catch {
11398
alert('Failed to get AI suggestions. Please try again.');
11499
}
115100
};
116101

102+
const handleAddNewTopic = () => {
103+
if (!newTopic.trim()) return; // Don't add empty topics
104+
105+
// Add to selected topics if not already present
106+
if (!selectedTopics.includes(newTopic.trim())) {
107+
setSelectedTopics([...selectedTopics, newTopic.trim()]);
108+
}
109+
110+
// Add to finalized topics if not already present
111+
if (!finalizedTopics.includes(newTopic.trim())) {
112+
setFinalizedTopics(prev => [...prev, newTopic.trim()]);
113+
}
114+
115+
// Clear the input
116+
setNewTopic("");
117+
};
118+
117119
return (
118120
<main className="container-fluid py-4" style={{ height: '100vh', overflowY: 'auto' }}>
119121
{/* Navigation/Header Row */}
@@ -255,12 +257,16 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
255257
placeholder="Add a custom topic"
256258
value={newTopic}
257259
onChange={(e) => setNewTopic(e.target.value)}
258-
onKeyDown={(e) => e.key === "Enter" && addNewTopic()}
260+
onKeyDown={(e) => {
261+
if (e.key === "Enter" && newTopic.trim()) {
262+
handleAddNewTopic();
263+
}
264+
}}
259265
/>
260266
<button
261267
className="btn btn-primary"
262-
onClick={addNewTopic}
263-
disabled={!newTopic}
268+
onClick={handleAddNewTopic}
269+
disabled={!newTopic.trim()}
264270
>
265271
<Plus size={16} />
266272
</button>

src/views/TopicHistogram.tsx

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -749,19 +749,13 @@ const TopicHistogram: FC = () => {
749749
{/* Step 2: Topic Refinement */}
750750
{currentStep === 2 && (
751751
<TopicRefiner
752-
isLlmProcessing={isLlmProcessing}
753752
llmSuggestions={llmSuggestionsState}
754753
setLlmSuggestions={setLlmSuggestionsState}
755754
onRequestSuggestions={handleRequestSuggestions}
756755
selectedTopics={selectedTopics}
757-
selectLlmSuggestion={(suggestion) => {
758-
if (!selectedTopics.includes(suggestion)) {
759-
setSelectedTopics([...selectedTopics, suggestion]);
760-
}
761-
}}
756+
setSelectedTopics={setSelectedTopics}
762757
newTopic=""
763758
setNewTopic={() => { }}
764-
addNewTopic={() => { }}
765759
prevStep={prevStep}
766760
handleSubmit={handleSubmit}
767761
searchTerm={searchTerm}

0 commit comments

Comments
 (0)