Skip to content
This repository was archived by the owner on Sep 15, 2024. It is now read-only.

Fix [UI/UX] [Front End] [Chats] Causing Memory Leaks #285

Merged
merged 3 commits into from
Feb 25, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 36 additions & 14 deletions app/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -518,6 +518,7 @@ export function ChatActions(props: {
showContextPrompts: boolean;
toggleContextPrompts: () => void;
uploading: boolean;
attachImages: string[];
}) {
const config = useAppConfig();
const navigate = useNavigate();
Expand Down Expand Up @@ -547,25 +548,38 @@ export function ChatActions(props: {
const [showModelSelector, setShowModelSelector] = useState(false);
const [showUploadImage, setShowUploadImage] = useState(false);

// this fix memory leak as well, idk why front-end it's so fucking difficult to maintain cause of stupid complex
// for front-end developer you literally fucking retarded, write a complex code
useEffect(() => {
const show = isVisionModel(currentModel);
setShowUploadImage(show);
if (showUploadImage !== show) {
setShowUploadImage(show);
}

if (!show) {
props.setAttachImages([]);
props.setUploading(false);
// Check if there's really a need to update these states to prevent unnecessary re-renders
if (props.uploading) {
props.setUploading(false);
}
if (props.attachImages.length !== 0) {
props.setAttachImages([]);
}
}

// if current model is not available
// switch to first available model
const isUnavaliableModel = !models.some((m) => m.name === currentModel);
if (isUnavaliableModel && models.length > 0) {
const isUnavailableModel = !models.some((m) => m.name === currentModel);
if (isUnavailableModel && models.length > 0) {
const nextModel = models[0].name as ModelType;
chatStore.updateCurrentSession(
(session) => (session.mask.modelConfig.model = nextModel),
);
showToast(nextModel);
// Only update if the next model is different from the current model
if (currentModel !== nextModel) {
chatStore.updateCurrentSession(
(session) => (session.mask.modelConfig.model = nextModel),
);
showToast(nextModel);
}
}
}, [props, chatStore, currentModel, models]);
}, [props, chatStore, currentModel, models, showUploadImage]);

return (
<div className={styles["chat-input-actions"]}>
Expand Down Expand Up @@ -1258,14 +1272,19 @@ function _Chat() {
setMsgRenderIndex(nextPageMsgIndex);
}

setHitBottom(isHitBottom);
setAutoScroll(isHitBottom);
// Only update state if necessary to prevent infinite loop
// this fix memory leaks
if (hitBottom !== isHitBottom) {
setHitBottom(isHitBottom);
setAutoScroll(isHitBottom);
}
}, [
setHitBottom,
setAutoScroll,
isMobileScreen,
msgRenderIndex,
setMsgRenderIndex, // Added setMsgRenderIndex
hitBottom, // Include hitBottom in the dependency array
]);

// Use the custom hook to debounce the onChatBodyScroll function
Expand Down Expand Up @@ -1727,11 +1746,13 @@ function _Chat() {
defaultShow={i >= messages.length - 6}
/>
{getMessageImages(message).length == 1 && (
<Image
// this fix when uploading
// Note: ignore a fucking stupid "1750:23 Warning: Using `<img>` could result in slower LCP and higher bandwidth. Consider using `<Image />` from `next/image` to automatically optimize images. This may incur additional usage or cost from your provider. See: https://nextjs.org/docs/messages/no-img-element @next/next/no-img-element"
// In scenario how it work, this already handle in other side for example, when you use gemini-pro-vision
<img
className={styles["chat-message-item-image"]}
src={getMessageImages(message)[0]}
alt=""
layout="responsive"
/>
)}
{getMessageImages(message).length > 1 && (
Expand Down Expand Up @@ -1797,6 +1818,7 @@ function _Chat() {
}}
showContextPrompts={false}
toggleContextPrompts={() => showToast(Locale.WIP)}
attachImages={attachImages}
/>
<label
className={`${styles["chat-input-panel-inner"]} ${
Expand Down
Loading