diff --git a/src/components/SearchImages.tsx b/src/components/SearchImages.tsx index 9e9185c..c729a60 100644 --- a/src/components/SearchImages.tsx +++ b/src/components/SearchImages.tsx @@ -25,15 +25,27 @@ const SearchImages = ({ const [loading, setLoading] = useState(true); const [open, setOpen] = useState(false); const [slides, setSlides] = useState([]); - const hasLoadedRef = useRef(false); + const [displayLimit, setDisplayLimit] = useState(10); // Initially show only 10 images + const loadedMessageIdsRef = useRef>(new Set()); + + // Function to show more images when the Show More button is clicked + const handleShowMore = () => { + // If we're already showing all images, don't do anything + if (images && displayLimit >= images.length) return; + + // Otherwise, increase the display limit by 10, or show all images + setDisplayLimit(prev => images ? Math.min(prev + 10, images.length) : prev); + }; useEffect(() => { // Skip fetching if images are already loaded for this message - if (hasLoadedRef.current) { + if (loadedMessageIdsRef.current.has(messageId)) { return; } const fetchImages = async () => { + // Mark as loaded to prevent refetching + loadedMessageIdsRef.current.add(messageId); setLoading(true); const chatModelProvider = localStorage.getItem('chatModelProvider'); @@ -80,8 +92,7 @@ const SearchImages = ({ if (onImagesLoaded && images.length > 0) { onImagesLoaded(images.length); } - // Mark as loaded to prevent refetching - hasLoadedRef.current = true; + } catch (error) { console.error('Error fetching images:', error); } finally { @@ -91,11 +102,7 @@ const SearchImages = ({ fetchImages(); - // Reset the loading state when component unmounts - return () => { - hasLoadedRef.current = false; - }; - }, [query, messageId]); + }, [query, messageId, chatHistory, onImagesLoaded]); return ( <> @@ -111,8 +118,8 @@ const SearchImages = ({ )} {images !== null && images.length > 0 && ( <> -
- {images.map((image, i) => ( +
+ {images.slice(0, displayLimit).map((image, i) => ( { setOpen(true); @@ -129,6 +136,17 @@ const SearchImages = ({ /> ))}
+ {images.length > displayLimit && ( +
+ +
+ )} setOpen(false)} slides={slides} /> )} diff --git a/src/components/SearchVideos.tsx b/src/components/SearchVideos.tsx index 234cbf3..b8fd26f 100644 --- a/src/components/SearchVideos.tsx +++ b/src/components/SearchVideos.tsx @@ -40,12 +40,22 @@ const Searchvideos = ({ const [open, setOpen] = useState(false); const [slides, setSlides] = useState([]); const [currentIndex, setCurrentIndex] = useState(0); + const [displayLimit, setDisplayLimit] = useState(10); // Initially show only 10 videos const videoRefs = useRef<(HTMLIFrameElement | null)[]>([]); - const hasLoadedRef = useRef(false); + const loadedMessageIdsRef = useRef>(new Set()); + + // Function to show more videos when the Show More button is clicked + const handleShowMore = () => { + // If we're already showing all videos, don't do anything + if (videos && displayLimit >= videos.length) return; + + // Otherwise, increase the display limit by 10, or show all videos + setDisplayLimit(prev => videos ? Math.min(prev + 10, videos.length) : prev); + }; useEffect(() => { // Skip fetching if videos are already loaded for this message - if (hasLoadedRef.current) { + if (loadedMessageIdsRef.current.has(messageId)) { return; } @@ -99,7 +109,7 @@ const Searchvideos = ({ onVideosLoaded(videos.length); } // Mark as loaded to prevent refetching - hasLoadedRef.current = true; + loadedMessageIdsRef.current.add(messageId); } catch (error) { console.error('Error fetching videos:', error); } finally { @@ -109,11 +119,7 @@ const Searchvideos = ({ fetchVideos(); - // Reset the loading state when component unmounts - return () => { - hasLoadedRef.current = false; - }; - }, [query, messageId]); + }, [query, messageId, chatHistory, onVideosLoaded]); return ( <> @@ -129,8 +135,8 @@ const Searchvideos = ({ )} {videos !== null && videos.length > 0 && ( <> -
- {videos.map((video, i) => ( +
+ {videos.slice(0, displayLimit).map((video, i) => (
{ setOpen(true); @@ -155,6 +161,17 @@ const Searchvideos = ({
))}
+ {videos.length > displayLimit && ( +
+ +
+ )} setOpen(false)} diff --git a/src/lib/chains/imageSearchAgent.ts b/src/lib/chains/imageSearchAgent.ts index 4fd684f..b1dd8af 100644 --- a/src/lib/chains/imageSearchAgent.ts +++ b/src/lib/chains/imageSearchAgent.ts @@ -6,29 +6,73 @@ import { import { PromptTemplate } from '@langchain/core/prompts'; import formatChatHistoryAsString from '../utils/formatHistory'; import { BaseMessage } from '@langchain/core/messages'; -import { StringOutputParser } from '@langchain/core/output_parsers'; +import LineOutputParser from '../outputParsers/lineOutputParser'; import { searchSearxng } from '../searxng'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; const imageSearchChainPrompt = ` -You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images. -You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation. +# Instructions +- You will be given a question from a user and a conversation history +- Rephrase the question based on the conversation so it is a standalone question that can be used to search for images that are relevant to the question +- Ensure the rephrased question agrees with the conversation and is relevant to the conversation +- If you are thinking or reasoning, use tags to indicate your thought process +- If you are thinking or reasoning, do not use and tags in your thinking. Those tags should only be used in the final output +- Use the provided date to ensure the rephrased question is relevant to the current date and time if applicable -Example: -1. Follow up question: What is a cat? -Rephrased: A cat +# Data locations +- The history is contained in the tag after the below +- The user question is contained in the tag after the below +- Output your answer in an tag +- Current date & time in ISO format (UTC timezone) is: {date} +- Do not include any other text in your answer + + +## Example 1 input + + Who won the last F1 race?\nAyrton Senna won the Monaco Grand Prix. It was a tight race with lots of overtakes. Alain Prost was in the lead for most of the race until the last lap when Senna overtook them. + + + What were the highlights of the race? + -2. Follow up question: What is a car? How does it works? -Rephrased: Car working +## Example 1 output + + F1 Monaco Grand Prix highlights + -3. Follow up question: How does an AC work? -Rephrased: AC working +## Example 2 input + + What is the theory of relativity? + + + What is the theory of relativity? + -Conversation: +## Example 2 output + + Theory of relativity + + +## Example 3 input + + I'm looking for a nice vacation spot. Where do you suggest?\nI suggest you go to Hawaii. It's a beautiful place with lots of beaches and activities to do.\nI love the beach! What are some activities I can do there?\nYou can go surfing, snorkeling, or just relax on the beach. + + + What are some activities I can do in Hawaii? + + +## Example 3 output + + Hawaii activities + + + + {chat_history} - -Follow up question: {query} -Rephrased question: + + +{query} + `; type ImageSearchChainInput = { @@ -42,7 +86,9 @@ interface ImageSearchResult { title: string; } -const strParser = new StringOutputParser(); +const outputParser = new LineOutputParser({ + key: 'answer', +}); const createImageSearchChain = (llm: BaseChatModel) => { return RunnableSequence.from([ @@ -53,14 +99,13 @@ const createImageSearchChain = (llm: BaseChatModel) => { query: (input: ImageSearchChainInput) => { return input.query; }, + date: () => new Date().toISOString(), }), PromptTemplate.fromTemplate(imageSearchChainPrompt), llm, - strParser, - RunnableLambda.from(async (input: string) => { - input = input.replace(/.*?<\/think>/g, ''); - - const res = await searchSearxng(input, { + outputParser, + RunnableLambda.from(async (searchQuery: string) => { + const res = await searchSearxng(searchQuery, { engines: ['bing images', 'google images'], }); @@ -76,7 +121,7 @@ const createImageSearchChain = (llm: BaseChatModel) => { } }); - return images.slice(0, 10); + return images; }), ]); }; diff --git a/src/lib/chains/videoSearchAgent.ts b/src/lib/chains/videoSearchAgent.ts index f7cb156..5e63ad6 100644 --- a/src/lib/chains/videoSearchAgent.ts +++ b/src/lib/chains/videoSearchAgent.ts @@ -6,30 +6,74 @@ import { import { PromptTemplate } from '@langchain/core/prompts'; import formatChatHistoryAsString from '../utils/formatHistory'; import { BaseMessage } from '@langchain/core/messages'; -import { StringOutputParser } from '@langchain/core/output_parsers'; +import LineOutputParser from '../outputParsers/lineOutputParser'; import { searchSearxng } from '../searxng'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; const VideoSearchChainPrompt = ` - You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos. - You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation. +# Instructions +- You will be given a question from a user and a conversation history +- Rephrase the question based on the conversation so it is a standalone question that can be used to search Youtube for videos +- Ensure the rephrased question agrees with the conversation and is relevant to the conversation +- If you are thinking or reasoning, use tags to indicate your thought process +- If you are thinking or reasoning, do not use and tags in your thinking. Those tags should only be used in the final output +- Use the provided date to ensure the rephrased question is relevant to the current date and time if applicable + +# Data locations +- The history is contained in the tag after the below +- The user question is contained in the tag after the below +- Output your answer in an tag +- Current date & time in ISO format (UTC timezone) is: {date} +- Do not include any other text in your answer - Example: - 1. Follow up question: How does a car work? - Rephrased: How does a car work? - - 2. Follow up question: What is the theory of relativity? - Rephrased: What is theory of relativity - - 3. Follow up question: How does an AC work? - Rephrased: How does an AC work - - Conversation: - {chat_history} - - Follow up question: {query} - Rephrased question: - `; + +## Example 1 input + + Who won the last F1 race?\nAyrton Senna won the Monaco Grand Prix. It was a tight race with lots of overtakes. Alain Prost was in the lead for most of the race until the last lap when Senna overtook them. + + + What were the highlights of the race? + + +## Example 1 output + + F1 Monaco Grand Prix highlights + + +## Example 2 input + + What is the theory of relativity? + + + What is the theory of relativity? + + +## Example 2 output + + What is the theory of relativity? + + +## Example 3 input + + I'm looking for a nice vacation spot. Where do you suggest?\nI suggest you go to Hawaii. It's a beautiful place with lots of beaches and activities to do.\nI love the beach! What are some activities I can do there?\nYou can go surfing, snorkeling, or just relax on the beach. + + + What are some activities I can do in Hawaii? + + +## Example 3 output + + Activities to do in Hawaii + + + + +{chat_history} + + +{query} + +`; type VideoSearchChainInput = { chat_history: BaseMessage[]; @@ -43,7 +87,9 @@ interface VideoSearchResult { iframe_src: string; } -const strParser = new StringOutputParser(); +const answerParser = new LineOutputParser({ + key: 'answer', +}); const createVideoSearchChain = (llm: BaseChatModel) => { return RunnableSequence.from([ @@ -54,14 +100,13 @@ const createVideoSearchChain = (llm: BaseChatModel) => { query: (input: VideoSearchChainInput) => { return input.query; }, + date: () => new Date().toISOString(), }), PromptTemplate.fromTemplate(VideoSearchChainPrompt), llm, - strParser, - RunnableLambda.from(async (input: string) => { - input = input.replace(/.*?<\/think>/g, ''); - - const res = await searchSearxng(input, { + answerParser, + RunnableLambda.from(async (searchQuery: string) => { + const res = await searchSearxng(searchQuery, { engines: ['youtube'], }); @@ -83,7 +128,7 @@ const createVideoSearchChain = (llm: BaseChatModel) => { } }); - return videos.slice(0, 10); + return videos; }), ]); };