I have an app with cards. Cards are fetched with useInfiniteQuery from React Query. I fetch new card each time I reach the end of the list. Backend sends offset and limit based responses of this structure:
{
"count": 1546,
"next": "http://localhost:8080/api/v1/logs-list/json/?limit=25&offset=150",
"previous": "http://localhost:8080/api/v1/logs-list/json/?limit=25&offset=100",
"results": [
{
"id": 700,
"chat": "",
"pc_name": "kchernya",
"superuser": false,
"user_plant": "Plant 1",
"ip": "10.249.22.219",
"protocol": "[Test-01]",
"plant": "Plant 1",
"time_create": "2023-12-20T16:42:45.569613+03:00",
"if_extra": 0,
"extra_str": "",
"comment": "131221312321321",
"email_list": null,
"email_copy_list": null,
"status": "Waiting",
"attach": true
},
{
"id": 693,
"chat": "",
"pc_name": "ykonopat",
"superuser": false,
"user_plant": "Plant 2",
"ip": "10.249.122.120",
"protocol": "[Test-01]",
"plant": "Plant 2",
"time_create": "2023-12-13T15:31:43.395731+03:00",
"if_extra": 0,
"extra_str": "",
"comment": "test",
"email_list": null,
"email_copy_list": null,
"status": "Waiting",
"attach": false
}
// ...
]
}
So each new fetch brings response with the same structure and I need to flat results array of cards in order to add new cards to already existing ones.
interface LogCardsProps {
pageLimit: number;
}
const LogCards = ({ pageLimit }: LogCardsProps) => {
const {
data: logs,
fetchNextPage,
isFetchingNextPage,
isLoading,
} = useInfiniteLogsQuery(pageLimit);
const { ref, inView } = useInView();
const allLogs = useMemo(() => logs?.pages.map((log) => log.results).flat(), [logs]);
useEffect(() => {
if (inView) fetchNextPage();
}, [fetchNextPage, inView]);
return !isLoading ? (
<Box>
{allLogs?.map((log) => <LogCard key={log.id} log={log} />)}
<Box sx={{ height: '20px' }} ref={ref}>
{isFetchingNextPage && <LinearProgress sx={{ m: '2px' }} />}
</Box>
</Box>
) : (
<CircularProgress />
);
};
Here's the fetch function:
export const fetchLogs = async (
offset: number = 0,
pageLimit: number = 25,
): Promise<FetchLogsResponse> => {
try {
let link = `${BASE_URL}/logs-list/json/?offset=${offset}&limit=${pageLimit}`;
const request = await fetch(link);
const response = await request.json();
return response;
} catch (error) {
console.error('Failed to fetch logs:', error);
throw error;
}
};
But after each new fetch the UI freezes for a brief second because it flattens all of the results arrays from different responses. I partially solved it by using memo, now only new cards render when added but there is still this short freeze right after the fetch is done. How can I improve this?
Also here's inifinite query hook:
export const useInfiniteLogsQuery = (pageLimit: number = 25) =>
useInfiniteQuery({
queryKey: ['logs1'],
queryFn: ({ pageParam }) => fetchLogs(pageParam, pageLimit),
initialPageParam: 0,
getNextPageParam: (lastPage, __, prevLimit) => (lastPage.next ? prevLimit + pageLimit : null),
});
I have an app with cards. Cards are fetched with useInfiniteQuery from React Query. I fetch new card each time I reach the end of the list. Backend sends offset and limit based responses of this structure:
{
"count": 1546,
"next": "http://localhost:8080/api/v1/logs-list/json/?limit=25&offset=150",
"previous": "http://localhost:8080/api/v1/logs-list/json/?limit=25&offset=100",
"results": [
{
"id": 700,
"chat": "",
"pc_name": "kchernya",
"superuser": false,
"user_plant": "Plant 1",
"ip": "10.249.22.219",
"protocol": "[Test-01]",
"plant": "Plant 1",
"time_create": "2023-12-20T16:42:45.569613+03:00",
"if_extra": 0,
"extra_str": "",
"comment": "131221312321321",
"email_list": null,
"email_copy_list": null,
"status": "Waiting",
"attach": true
},
{
"id": 693,
"chat": "",
"pc_name": "ykonopat",
"superuser": false,
"user_plant": "Plant 2",
"ip": "10.249.122.120",
"protocol": "[Test-01]",
"plant": "Plant 2",
"time_create": "2023-12-13T15:31:43.395731+03:00",
"if_extra": 0,
"extra_str": "",
"comment": "test",
"email_list": null,
"email_copy_list": null,
"status": "Waiting",
"attach": false
}
// ...
]
}
So each new fetch brings response with the same structure and I need to flat results array of cards in order to add new cards to already existing ones.
interface LogCardsProps {
pageLimit: number;
}
const LogCards = ({ pageLimit }: LogCardsProps) => {
const {
data: logs,
fetchNextPage,
isFetchingNextPage,
isLoading,
} = useInfiniteLogsQuery(pageLimit);
const { ref, inView } = useInView();
const allLogs = useMemo(() => logs?.pages.map((log) => log.results).flat(), [logs]);
useEffect(() => {
if (inView) fetchNextPage();
}, [fetchNextPage, inView]);
return !isLoading ? (
<Box>
{allLogs?.map((log) => <LogCard key={log.id} log={log} />)}
<Box sx={{ height: '20px' }} ref={ref}>
{isFetchingNextPage && <LinearProgress sx={{ m: '2px' }} />}
</Box>
</Box>
) : (
<CircularProgress />
);
};
Here's the fetch function:
export const fetchLogs = async (
offset: number = 0,
pageLimit: number = 25,
): Promise<FetchLogsResponse> => {
try {
let link = `${BASE_URL}/logs-list/json/?offset=${offset}&limit=${pageLimit}`;
const request = await fetch(link);
const response = await request.json();
return response;
} catch (error) {
console.error('Failed to fetch logs:', error);
throw error;
}
};
But after each new fetch the UI freezes for a brief second because it flattens all of the results arrays from different responses. I partially solved it by using memo, now only new cards render when added but there is still this short freeze right after the fetch is done. How can I improve this?
Also here's inifinite query hook:
export const useInfiniteLogsQuery = (pageLimit: number = 25) =>
useInfiniteQuery({
queryKey: ['logs1'],
queryFn: ({ pageParam }) => fetchLogs(pageParam, pageLimit),
initialPageParam: 0,
getNextPageParam: (lastPage, __, prevLimit) => (lastPage.next ? prevLimit + pageLimit : null),
});
Share
Improve this question
edited Feb 6 at 8:07
magrega
asked Feb 5 at 8:41
magregamagrega
16810 bronze badges
2
|
2 Answers
Reset to default 0- Memoize LogCard: Prevent re-renders of existing cards.
- Virtualization: You can also use it to render a long list
- Avoid Flattening Pages: Directly render each page's results to prevent
Rather than merging all pages into a single array, each page’s results are rendered within a Fragment. This eliminates the need for the .flat()
operation, which tends to slow down as more pages accumulate.
React.memo on LogCard to Ensures that each log card re-renders only when its specific data changes, thereby minimizing rendering overhead.
Example :
// Memoize LogCard to prevent unnecessary re-renders
const LogCard = React.memo(({ log }: { log: LogType }) => {
// Your LogCard implementation
});
Fixed it by using virtualization from tanstack. Seems like LogCard component was heavy with MUI components and it was diffucult for the browser to render all of them smoothly. Virtualization helped.
import { useVirtualizer } from '@tanstack/react-virtual';
Here's the code:
const LogCards = ({ pageLimit }: LogCardsProps) => {
const {
data: logs,
fetchNextPage,
isFetchingNextPage,
isLoading,
} = useInfiniteLogsQuery(pageLimit);
const { ref, inView } = useInView();
const allLogs = useMemo(() => logs?.pages.flatMap((log) => log.results), [logs]) || [];
const parentRef = useRef<HTMLDivElement>(null);
const rowVirtualizer = useVirtualizer({
count: allLogs.length,
getScrollElement: useCallback(() => parentRef.current, []),
estimateSize: useCallback(() => 300, []),
overscan: 2,
gap: 8,
});
useEffect(() => {
if (inView) fetchNextPage();
}, [fetchNextPage, inView]);
return !isLoading ? (
<Box ref={parentRef} sx={{}}>
<div
style={{
height: `${rowVirtualizer.getTotalSize()}px`,
position: 'relative',
}}
>
{allLogs &&
rowVirtualizer.getVirtualItems().map((log, index) => {
const LogCardData = allLogs[log.index];
console.log(LogCardData);
if (!LogCardData) return <span key={`no card index ${index}`}>No cards</span>;
return (
<LogCard
key={LogCardData.id}
data-index={log.index}
ref={rowVirtualizer.measureElement}
log={LogCardData}
/>
);
})}
</div>
<Box sx={{ height: '20px' }} ref={ref}>
{isFetchingNextPage && <LinearProgress sx={{ m: '2px' }} />}
</Box>
</Box>
) : (
<LinearLoader />
);
};
useInfiniteLogsQuery
seems like it might be helpful to share... – Abraham Labkovsky Commented Feb 5 at 19:54