@@ -49,6 +49,13 @@ interface TopicRefinerProps {
4949 onRequestSuggestions : ( model : string , prompt : string , apiKey : string , topics : string [ ] ) => Promise < void > ;
5050}
5151
52+ // Track which model suggested each topic
53+ type ModelType = 'openai' | 'gemini' ;
54+ interface TopicWithModel {
55+ topic : string ;
56+ model : ModelType ;
57+ }
58+
5259export const TopicRefiner : FC < Omit < TopicRefinerProps , 'isLlmProcessing' > > = ( {
5360 llmSuggestions = [ ] ,
5461 setLlmSuggestions,
@@ -74,6 +81,7 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
7481 const [ isLoadingSuggestions , setIsLoadingSuggestions ] = useState ( false ) ;
7582 const [ inputValue , setInputValue ] = useState ( "" ) ;
7683 const [ canAddTopic , setCanAddTopic ] = useState ( false ) ;
84+ const [ suggestionsByModel , setSuggestionsByModel ] = useState < TopicWithModel [ ] > ( [ ] ) ;
7785
7886 const moveToRightColumn = ( topic : string ) => {
7987 setFinalizedTopics ( prev => [ ...prev , topic ] ) ;
@@ -105,9 +113,36 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
105113 }
106114
107115 try {
116+ const currentModel : ModelType = selectedModel === 'gpt-3.5-turbo' ? 'openai' : 'gemini' ;
117+
118+ // Get suggestions from the API
108119 await onRequestSuggestions ( selectedModel , customPrompt , apiKey , selectedTopics ) ;
120+
121+ if ( ! llmSuggestions || llmSuggestions . length === 0 ) {
122+ console . warn ( `No suggestions received from ${ currentModel } ` ) ;
123+ return ;
124+ }
125+
126+ // Update suggestions, keeping existing ones from the other model
127+ setSuggestionsByModel ( prev => {
128+ // Remove any existing suggestions from the current model
129+ const otherModelSuggestions = prev . filter ( s => s . model !== currentModel ) ;
130+ // Add the new suggestions with the current model
131+ const newSuggestions : TopicWithModel [ ] = llmSuggestions . map ( topic => ( {
132+ topic,
133+ model : currentModel
134+ } ) ) ;
135+ const updatedSuggestions = [ ...otherModelSuggestions , ...newSuggestions ] ;
136+ console . log ( 'Updated suggestions:' , updatedSuggestions ) ;
137+ return updatedSuggestions ;
138+ } ) ;
139+
140+ // Don't update parent component - we'll manage suggestions internally
141+ // setLlmSuggestions(allTopics);
142+
109143 setShowPromptModal ( false ) ;
110- } catch {
144+ } catch ( error ) {
145+ console . error ( 'Error getting suggestions:' , error ) ;
111146 alert ( 'Failed to get AI suggestions. Please try again.' ) ;
112147 }
113148 } ;
@@ -185,6 +220,32 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
185220 // }
186221 } ;
187222
223+ // Helper function to render model badges
224+ const renderModelBadges = ( topic : string ) => {
225+ const normalizedTopic = topic . toLowerCase ( ) . trim ( ) ;
226+ const topicSuggestions = suggestionsByModel . filter (
227+ s => s . topic . toLowerCase ( ) . trim ( ) === normalizedTopic
228+ ) ;
229+ console . log ( `Rendering badges for ${ topic } :` , topicSuggestions ) ;
230+ return topicSuggestions . map ( suggestion => (
231+ < span
232+ key = { `${ suggestion . topic } -${ suggestion . model } ` }
233+ className = { `badge ms-1 ${ suggestion . model === 'openai' ? 'bg-primary' : 'bg-success' } ` }
234+ style = { { fontSize : '0.75rem' } }
235+ >
236+ { suggestion . model === 'openai' ? 'OpenAI' : 'Gemini' }
237+ </ span >
238+ ) ) ;
239+ } ;
240+
241+ // Add effect to initialize state when component mounts
242+ useEffect ( ( ) => {
243+ // Only clear our internal state
244+ setSuggestionsByModel ( [ ] ) ;
245+ // Don't clear parent state
246+ // setLlmSuggestions([]);
247+ } , [ ] ) ;
248+
188249 return (
189250 < main className = "container-fluid py-4" style = { { height : '100vh' , overflowY : 'auto' } } >
190251 { /* Navigation/Header Row */ }
@@ -271,13 +332,12 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
271332 < div className = "d-flex flex-column h-100" style = { { minHeight : 250 , justifyContent : 'flex-start' } } >
272333 < div className = "list-group w-100 mb-0" style = { { flex : 1 , overflowY : 'auto' , maxHeight : 480 , marginBottom : 0 , paddingBottom : 0 } } >
273334 { selectedTopics . map ( ( topic ) => {
274- const isAI = llmSuggestions . includes ( topic ) ;
275335 const isAdded = finalizedTopics . includes ( topic ) ;
276336 return (
277337 < div key = { topic } className = "list-group-item d-flex justify-content-between align-items-center" >
278338 < span >
279339 { topic }
280- { isAI && < span className = "badge bg-info ms-2" > AI </ span > }
340+ { renderModelBadges ( topic ) }
281341 </ span >
282342 { isAdded ? (
283343 < button
@@ -444,7 +504,7 @@ export const TopicRefiner: FC<Omit<TopicRefinerProps, 'isLlmProcessing'>> = ({
444504 < div key = { topic } className = "list-group-item py-2 px-3 d-flex justify-content-between align-items-center" >
445505 < span className = "d-flex align-items-center" style = { { fontSize : '0.9rem' } } >
446506 { topic }
447- { llmSuggestions . includes ( topic ) && < span className = "badge bg-info ms-2" style = { { fontSize : '0.75rem' } } > AI </ span > }
507+ { renderModelBadges ( topic ) }
448508 </ span >
449509 < button
450510 className = "btn btn-sm btn-outline-danger ms-2"
0 commit comments