import { Stack, Group, Card, Accordion, Text, Table, RingProgress, ScrollArea, Space } from "@mantine/core"; import {marginTopBottom} from "~/styles.ts"; import {PerformanceTestFilter} from "~/components/subs/PerformanceTestFilter.tsx"; import { ResponsiveTableContainer } from "~/components/subs/ResponsiveTableContainer"; import React, {useEffect, useRef, useState} from "react"; import type {BookInfo, PerformanceGetListData} from "~/utils/models.ts"; import { proxy, useSnapshot } from 'valtio'; import {convertNumber, showErrorMessage, sleep} from "~/utils/utils.ts"; import {apiPerformanceGetList, apiPerformanceUploadData} from "~/utils/compare_api.ts"; import Charts from "~/components/Charts.tsx"; import {getTestExampleData, pushTestExampleData} from "~/exapleData.ts"; import {BookTable} from "~/components/subs/BookInfoTable.tsx"; import {confirmExportData} from "~/components/subs/confirms.tsx"; // const performanceDataState = proxy([]); export default function Component({isPushTest, performanceDataState}: { isPushTest: boolean, performanceDataState: PerformanceGetListData[] }) { const [startsCount, setStartsCount] = useState(200) const [endsCount, setEndsCount] = useState(100000) const [addCount, setAddCount] = useState(200) const [avgCount, setAvgCount] = useState(3) const [maxExampleCount, setMaxExampleCount] = useState(30) const [isStarting, setIsStarting] = useState(false) const [requestStopping, setRequestStopping] = useState(false) const requestStoppingRef = useRef(requestStopping); requestStoppingRef.current = requestStopping; const [currentCount, setCurrentCount] = useState(0) const [currentIdx, setCurrentIdx] = useState(0) const [currentAvgCount, setCurrentAvgCount] = useState(0) const [hideServerDeserialize, setHideServerDeserialize] = useState(!isPushTest) const [hideServerSerialize, setHideServerSerialize] = useState(isPushTest) const [chartsSize, setChartsSize] = useState(6) const performanceDataSnap = useSnapshot(performanceDataState); useEffect(() => { getExampleData() }, []) const exportData = () => { if (!performanceDataSnap) return confirmExportData(performanceDataSnap as PerformanceGetListData[], isPushTest ? "performance_test_push_data" : "performance_test_get_list_data") } const updateFilter = (startsCount: number, endsCount: number, addCount: number, avgCount: number, maxExampleCount: number, hideServerDeserialize: boolean, hideServerSerialize: boolean, chartsSize: number) => { setStartsCount(startsCount); setEndsCount(endsCount); setAddCount(addCount); setAvgCount(avgCount); setMaxExampleCount(maxExampleCount); setHideServerDeserialize(hideServerDeserialize); setHideServerSerialize(hideServerSerialize); setChartsSize(chartsSize); } const onClickStart = () => { StartTest() .finally(() => setIsStarting(false)) } const OnClickStop = () => { if (!isStarting) return setRequestStopping(true) } const getCurrentSingleStep = (queryIdx: number | undefined) => { if (!isStarting) return queryIdx; if (queryIdx !== currentIdx) return queryIdx; let value = currentAvgCount / avgCount * 100; return {currentAvgCount} / {avgCount}} /> } const getExampleData = () => { performanceDataState.splice(0); let exampleData = isPushTest ? pushTestExampleData : getTestExampleData for (let i = 0; i < exampleData.length; i++) { // @ts-ignore performanceDataState.push(exampleData[i]) } } const StartTest = async () => { setIsStarting(true) setRequestStopping(false) if (addCount <= 0) { showErrorMessage("Step Count must be greater than 0", "Invalid Step Count"); return; } if (startsCount > endsCount) { showErrorMessage("Starts Count can not be greater than Ends Count", "Invalid Starts Count"); return; } let currentForCount = startsCount; let idx = 0; performanceDataState.splice(0); while (true) { setCurrentCount(currentForCount); setCurrentIdx(idx) for (let i = 0; i < avgCount; i++) { try { if (requestStoppingRef.current) { setRequestStopping(false) return } let response = isPushTest ? await apiPerformanceUploadData(currentForCount, idx) : await apiPerformanceGetList(currentForCount, maxExampleCount, idx) if (response.success && response.data) { const checkLastIndex = performanceDataState.length - 1 if (performanceDataState.length > 0 && performanceDataState[checkLastIndex].idx === response.data.idx) { const lastData = performanceDataState[checkLastIndex] performanceDataState[checkLastIndex].grpc.client_networking_ping = (lastData.grpc.client_networking_ping + response.data.grpc.client_networking_ping) / 2; performanceDataState[checkLastIndex].grpc.client_request_cpu = (lastData.grpc.client_request_cpu + response.data.grpc.client_request_cpu) / 2; performanceDataState[checkLastIndex].grpc.client_request_time = (lastData.grpc.client_request_time + response.data.grpc.client_request_time) / 2; performanceDataState[checkLastIndex].grpc.networking_size.request_size = (lastData.grpc.networking_size.request_size + response.data.grpc.networking_size.request_size) / 2; performanceDataState[checkLastIndex].grpc.networking_size.response_size = (lastData.grpc.networking_size.response_size + response.data.grpc.networking_size.response_size) / 2; performanceDataState[checkLastIndex].grpc.server_deserialize_cpu = (lastData.grpc.server_deserialize_cpu + response.data.grpc.server_deserialize_cpu) / 2; performanceDataState[checkLastIndex].grpc.server_deserialize_time = (lastData.grpc.server_deserialize_time + response.data.grpc.server_deserialize_time) / 2; performanceDataState[checkLastIndex].grpc.server_serialize_cpu = (lastData.grpc.server_serialize_cpu + response.data.grpc.server_serialize_cpu) / 2; performanceDataState[checkLastIndex].grpc.server_serialize_time = (lastData.grpc.server_serialize_time + response.data.grpc.server_serialize_time) / 2; performanceDataState[checkLastIndex].grpc.server_protocol_total_time = (lastData.grpc.server_protocol_total_time + response.data.grpc.server_protocol_total_time) / 2; performanceDataState[checkLastIndex].rest.client_networking_ping = (lastData.rest.client_networking_ping + response.data.rest.client_networking_ping) / 2; performanceDataState[checkLastIndex].rest.client_request_cpu = (lastData.rest.client_request_cpu + response.data.rest.client_request_cpu) / 2; performanceDataState[checkLastIndex].rest.client_request_time = (lastData.rest.client_request_time + response.data.rest.client_request_time) / 2; performanceDataState[checkLastIndex].rest.networking_size.request_size = (lastData.rest.networking_size.request_size + response.data.rest.networking_size.request_size) / 2; performanceDataState[checkLastIndex].rest.networking_size.response_size = (lastData.rest.networking_size.response_size + response.data.rest.networking_size.response_size) / 2; performanceDataState[checkLastIndex].rest.server_deserialize_cpu = (lastData.rest.server_deserialize_cpu + response.data.rest.server_deserialize_cpu) / 2; performanceDataState[checkLastIndex].rest.server_deserialize_time = (lastData.rest.server_deserialize_time + response.data.rest.server_deserialize_time) / 2; performanceDataState[checkLastIndex].rest.server_serialize_cpu = (lastData.rest.server_serialize_cpu + response.data.rest.server_serialize_cpu) / 2; performanceDataState[checkLastIndex].rest.server_serialize_time = (lastData.rest.server_serialize_time + response.data.rest.server_serialize_time) / 2; performanceDataState[checkLastIndex].rest.server_protocol_total_time = (lastData.rest.server_protocol_total_time + response.data.rest.server_protocol_total_time) / 2; } else { performanceDataState.push(response.data) } } } catch (error: any) { showErrorMessage(error.toString(), "Error during performance test"); } finally { setCurrentAvgCount(i + 1) } } idx++; setCurrentIdx(idx) await sleep(200) if (currentForCount >= endsCount) { break; } currentForCount += addCount; if (currentForCount > endsCount) { currentForCount = endsCount; } } } const rows = performanceDataSnap.map(item => ( {getCurrentSingleStep(item.idx)} {item.request_count} {convertNumber(item.grpc.client_networking_ping, 1000, 2)} / {convertNumber(item.rest.client_networking_ping, 1000, 2)} {convertNumber(item.grpc.client_request_cpu, 1, 2)} / {convertNumber(item.rest.client_request_cpu, 1, 2)} {convertNumber(item.grpc.client_request_time, 1, 2)} / {convertNumber(item.rest.client_request_time, 1, 2)} {convertNumber(item.grpc.server_deserialize_cpu, 1, 2)} / {convertNumber(item.rest.server_deserialize_cpu, 1, 2)} {convertNumber(item.grpc.server_deserialize_time, 1000, 4)} / {convertNumber(item.rest.server_deserialize_time, 1000, 4)} {convertNumber(item.grpc.server_serialize_cpu, 1, 2)} / {convertNumber(item.rest.server_serialize_cpu, 1, 2)} {convertNumber(item.grpc.server_serialize_time, 1000, 4)} / {convertNumber(item.rest.server_serialize_time, 1000, 4)} {convertNumber(item.grpc.networking_size.request_size, 1 / 1024, 4)} / {convertNumber(item.rest.networking_size.request_size, 1 / 1024, 4)} {convertNumber(item.grpc.networking_size.response_size, 1 / 1024, 4)} / {convertNumber(item.rest.networking_size.response_size, 1 / 1024, 4)} )) return ( Config item || []) as PerformanceGetListData[]} gridSpan={chartsSize} hideServerDeserialize={hideServerDeserialize} hideServerSerialize={hideServerSerialize} /> Client (gRPC / Rest) Server Deserialization (gRPC / Rest) Server Serialization (gRPC / Rest) Networking Pack Size (gRPC / Rest) Index Count Ping (ms) CPU (%) Time (s) CPU (%) Time (ms) CPU (%) Time (ms) Request (KB) Response (KB) {rows}
{/**/}
{!isPushTest && Example Books Data 0 ? performanceDataSnap[performanceDataSnap.length - 1].rest.response_data || [] : []) as BookInfo[]} /> }
) }