Skip to content

Commit 9c31ded

Browse files
committed
comment out print
1 parent c7ae437 commit 9c31ded

3 files changed

Lines changed: 49 additions & 5 deletions

File tree

backend/app/services/ai_service.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import re
77

88
# Configure logging
9-
logging.basicConfig(level=logging.DEBUG)
9+
# logging.basicConfig(level=logging.DEBUG)
1010
logger = logging.getLogger(__name__)
1111

1212
class AITopicProcessor:
@@ -136,7 +136,7 @@ async def process_with_gemini(self, prompt: str, topics: List[str], search_term:
136136
"""
137137

138138
response = self.gemini_client.generate_content(full_prompt)
139-
print("Raw response:", response.text) # Debug print
139+
# print("Raw response:", response.text) # Debug print
140140

141141
if response.text:
142142
suggestions = response.text.strip().split("\n")
@@ -174,7 +174,7 @@ async def process_with_gemini(self, prompt: str, topics: List[str], search_term:
174174
"topic": s.strip(),
175175
"explanation": f"Suggested as relevant to {search_term}"
176176
})
177-
print("Processed suggestions:", processed_suggestions) # Debug print
177+
# print("Processed suggestions:", processed_suggestions) # Debug print
178178
return processed_suggestions
179179
return []
180180

src/components/TopicRefiner.tsx

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -485,6 +485,41 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
485485
prevStep();
486486
};
487487

488+
// Add this function after the other helper functions
489+
const getTopicsWithBothModels = () => {
490+
const topicsByModel = suggestionsByModel.reduce((acc, suggestion) => {
491+
if (!acc[suggestion.topic]) {
492+
acc[suggestion.topic] = new Set();
493+
}
494+
acc[suggestion.topic].add(suggestion.model);
495+
return acc;
496+
}, {} as Record<string, Set<ModelType>>);
497+
498+
return Object.entries(topicsByModel)
499+
.filter(([, models]) => models.size === 2)
500+
.map(([topic]) => topic);
501+
};
502+
503+
const handleAddAllRecommendedTopics = () => {
504+
const topicsWithBothModels = getTopicsWithBothModels();
505+
const newTopics = topicsWithBothModels.filter(topic => !finalizedTopics.includes(topic));
506+
if (newTopics.length > 0) {
507+
setFinalizedTopics([...finalizedTopics, ...newTopics]);
508+
// Update topic counts for new topics
509+
newTopics.forEach(topic => {
510+
const suggestion = suggestions.find(s => s.name === topic);
511+
if (suggestion) {
512+
setTopicCounts(prev => ({
513+
...prev,
514+
[topic]: suggestion.count
515+
}));
516+
} else {
517+
fetchTopicCount(topic);
518+
}
519+
});
520+
}
521+
};
522+
488523
return (
489524
<main className="container-fluid py-4" style={{ height: '100vh', overflowY: 'auto' }}>
490525
{/* Navigation/Header Row */}
@@ -560,6 +595,15 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
560595
</span>
561596
</h3>
562597
<div className="d-flex gap-2">
598+
<button
599+
className="btn btn-outline-primary"
600+
onClick={handleAddAllRecommendedTopics}
601+
disabled={getTopicsWithBothModels().length === 0}
602+
title="Add all topics recommended by both AI models"
603+
>
604+
<Plus size={16} className="me-2" />
605+
Both AI Recommended
606+
</button>
563607
<button
564608
className="btn btn-outline-secondary"
565609
onClick={() => setShowPromptModal(true)}

src/views/ContextPanel.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ const ContextPanel: FC = () => {
6666
>
6767
<VscSettings /> Settings
6868
</button>
69-
<button
69+
{/* <button
7070
className={cx("btn ms-2 mt-1", "btn-outline-dark")}
7171
onClick={() => {
7272
const currentParams = new URLSearchParams(location.search);
@@ -91,7 +91,7 @@ const ContextPanel: FC = () => {
9191
}}
9292
>
9393
Topic Refiner
94-
</button>
94+
</button> */}
9595
</span>
9696
<span className="text-nowrap">
9797
{/*

0 commit comments

Comments
 (0)