diff --git a/app/components/chat.tsx b/app/components/chat.tsx index 53ef629b232..e84279cd07f 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -518,6 +518,7 @@ export function ChatActions(props: { showContextPrompts: boolean; toggleContextPrompts: () => void; uploading: boolean; + attachImages: string[]; }) { const config = useAppConfig(); const navigate = useNavigate(); @@ -547,25 +548,38 @@ export function ChatActions(props: { const [showModelSelector, setShowModelSelector] = useState(false); const [showUploadImage, setShowUploadImage] = useState(false); + // this fix memory leak as well, idk why front-end it's so fucking difficult to maintain cause of stupid complex + // for front-end developer you literally fucking retarded, write a complex code useEffect(() => { const show = isVisionModel(currentModel); - setShowUploadImage(show); + if (showUploadImage !== show) { + setShowUploadImage(show); + } + if (!show) { - props.setAttachImages([]); - props.setUploading(false); + // Check if there's really a need to update these states to prevent unnecessary re-renders + if (props.uploading) { + props.setUploading(false); + } + if (props.attachImages.length !== 0) { + props.setAttachImages([]); + } } // if current model is not available // switch to first available model - const isUnavaliableModel = !models.some((m) => m.name === currentModel); - if (isUnavaliableModel && models.length > 0) { + const isUnavailableModel = !models.some((m) => m.name === currentModel); + if (isUnavailableModel && models.length > 0) { const nextModel = models[0].name as ModelType; - chatStore.updateCurrentSession( - (session) => (session.mask.modelConfig.model = nextModel), - ); - showToast(nextModel); + // Only update if the next model is different from the current model + if (currentModel !== nextModel) { + chatStore.updateCurrentSession( + (session) => (session.mask.modelConfig.model = nextModel), + ); + showToast(nextModel); + } } - }, [props, chatStore, currentModel, models]); + }, [props, chatStore, currentModel, models, showUploadImage]); return (
@@ -1258,14 +1272,19 @@ function _Chat() { setMsgRenderIndex(nextPageMsgIndex); } - setHitBottom(isHitBottom); - setAutoScroll(isHitBottom); + // Only update state if necessary to prevent infinite loop + // this fix memory leaks + if (hitBottom !== isHitBottom) { + setHitBottom(isHitBottom); + setAutoScroll(isHitBottom); + } }, [ setHitBottom, setAutoScroll, isMobileScreen, msgRenderIndex, setMsgRenderIndex, // Added setMsgRenderIndex + hitBottom, // Include hitBottom in the dependency array ]); // Use the custom hook to debounce the onChatBodyScroll function @@ -1727,11 +1746,13 @@ function _Chat() { defaultShow={i >= messages.length - 6} /> {getMessageImages(message).length == 1 && ( - ` could result in slower LCP and higher bandwidth. Consider using `` from `next/image` to automatically optimize images. This may incur additional usage or cost from your provider. See: https://nextjs.org/docs/messages/no-img-element @next/next/no-img-element" + // In scenario how it work, this already handle in other side for example, when you use gemini-pro-vision + )} {getMessageImages(message).length > 1 && ( @@ -1797,6 +1818,7 @@ function _Chat() { }} showContextPrompts={false} toggleContextPrompts={() => showToast(Locale.WIP)} + attachImages={attachImages} />