From b909e828cfd181eedcd08f1da30d3d26919b3bd1 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Tue, 6 Jun 2023 13:53:44 -0500 Subject: [PATCH 1/3] chore: Testing using a cache to choose the best latency --- site/src/contexts/useProxyLatency.ts | 92 +++++++++++++++++++++++++++- 1 file changed, 90 insertions(+), 2 deletions(-) diff --git a/site/src/contexts/useProxyLatency.ts b/site/src/contexts/useProxyLatency.ts index 9fde64142f5f9..84071c9975c00 100644 --- a/site/src/contexts/useProxyLatency.ts +++ b/site/src/contexts/useProxyLatency.ts @@ -24,6 +24,29 @@ const proxyLatenciesReducer = ( state: Record, action: ProxyLatencyAction, ): Record => { + // TODO: We should probably not read from local storage on every action. + const history = loadStoredLatencies() + const proxyHistory = history[action.proxyID] || [] + const minReport = proxyHistory.reduce((min, report) => { + if(min.latencyMS === 0) { + // Not yet set, so use the new report. + return report + } + if(min.latencyMS < report.latencyMS) { + return min + } + return report + }, {} as ProxyLatencyReport) + + if(minReport.latencyMS > 0 && minReport.latencyMS < action.report.latencyMS) { + // The new report is slower then the min report, so use the min report. + return { + ...state, + [action.proxyID]: minReport, + } + } + + // Use the new report return { ...state, [action.proxyID]: action.report, @@ -113,14 +136,17 @@ export const useProxyLatency = ( ) latencyMS = entry.duration } - dispatchProxyLatencies({ + const update = { proxyID: check.id, report: { latencyMS, accurate, at: new Date(), }, - }) + } + dispatchProxyLatencies(update) + // Also save to local storage to persist the latency across page refreshes. + updateStoredLatencies(update) return } @@ -140,6 +166,10 @@ export const useProxyLatency = ( const proxyRequests = Object.keys(proxyChecks).map((latencyURL) => { return axios.get(latencyURL, { withCredentials: false, + // Must add a custom header to make the request not a "simple request". + // We want to force a preflight request. + // https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#simple_requests + headers: { "X-LATENCY-CHECK": "true" }, }) }) @@ -156,6 +186,9 @@ export const useProxyLatency = ( // At this point, we can be confident that all the proxy requests have been recorded // via the performance observer. So we can disconnect the observer. observer.disconnect() + + // Local storage cleanup + garbageCollectStoredLatencies(proxies) }) }, [proxies, latestFetchRequest]) @@ -164,3 +197,58 @@ export const useProxyLatency = ( refetch, } } + +// Local storage functions + +// loadStoredLatencies will load the stored latencies from local storage. +// Latencies are stored in local storage to minimize the impact of outliers. +// If a single request is slow, we want to omit that latency check, and go with +// a more accurate latency check. +const loadStoredLatencies = (): Record => { + const str = localStorage.getItem("workspace-proxy-latencies") + if (!str) { + return {} + } + + return JSON.parse(str) +} + +const updateStoredLatencies =(action: ProxyLatencyAction): void => { + const latencies = loadStoredLatencies() + const reports = latencies[action.proxyID] || [] + + reports.push(action.report) + latencies[action.proxyID] = reports + localStorage.setItem("workspace-proxy-latencies", JSON.stringify(latencies)) +} + +// garbageCollectStoredLatencies will remove any latencies that are older then 1 week or latencies of proxies +// that no longer exist. This is intended to keep the size of local storage down. +const garbageCollectStoredLatencies = (regions: RegionsResponse): void => { + const latencies = loadStoredLatencies() + const now = Date.now() + console.log("Garbage collecting stored latencies") + const cleaned = cleanupLatencies(latencies, regions, new Date(now)) + + localStorage.setItem("workspace-proxy-latencies", JSON.stringify(cleaned)) +} + +const cleanupLatencies = (stored: Record, regions: RegionsResponse, now: Date): Record => { + Object.keys(stored).forEach((proxyID) => { + if(!regions.regions.find((region) => region.id === proxyID)) { + delete(stored[proxyID]) + return + } + const reports = stored[proxyID] + const nowMS = now.getTime() + stored[proxyID] = reports.filter((report) => { + // Only keep the reports that are less then 1 week old. + return new Date(report.at).getTime() > nowMS - (1000 * 60 * 60 * 24 * 7) + }) + // Only keep the 5 latest + stored[proxyID] = stored[proxyID].slice(-5) + }) + return stored +} + + From 84bc00fa5ddb382a8f5c817bb025deda43abadd8 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Tue, 6 Jun 2023 14:04:56 -0500 Subject: [PATCH 2/3] Allow storing more latencies if needed --- site/src/contexts/useProxyLatency.ts | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/site/src/contexts/useProxyLatency.ts b/site/src/contexts/useProxyLatency.ts index 84071c9975c00..bb75677810b39 100644 --- a/site/src/contexts/useProxyLatency.ts +++ b/site/src/contexts/useProxyLatency.ts @@ -61,6 +61,16 @@ export const useProxyLatency = ( refetch: () => void proxyLatencies: Record } => { + // maxStoredLatencies is the maximum number of latencies to store per proxy in local storage. + let maxStoredLatencies = 5 + // The reason we pull this from local storage is so for development purposes, a user can manually + // set a larger number to collect data in their normal usage. This data can later be analyzed to come up + // with some better magic numbers. + const maxStoredLatenciesVar = localStorage.getItem("workspace-proxy-latencies-max") + if(maxStoredLatenciesVar) { + maxStoredLatencies = Number(maxStoredLatenciesVar) + } + const [proxyLatencies, dispatchProxyLatencies] = useReducer( proxyLatenciesReducer, {}, @@ -188,7 +198,7 @@ export const useProxyLatency = ( observer.disconnect() // Local storage cleanup - garbageCollectStoredLatencies(proxies) + garbageCollectStoredLatencies(proxies, maxStoredLatencies) }) }, [proxies, latestFetchRequest]) @@ -224,16 +234,15 @@ const updateStoredLatencies =(action: ProxyLatencyAction): void => { // garbageCollectStoredLatencies will remove any latencies that are older then 1 week or latencies of proxies // that no longer exist. This is intended to keep the size of local storage down. -const garbageCollectStoredLatencies = (regions: RegionsResponse): void => { +const garbageCollectStoredLatencies = (regions: RegionsResponse, maxStored: number): void => { const latencies = loadStoredLatencies() const now = Date.now() - console.log("Garbage collecting stored latencies") - const cleaned = cleanupLatencies(latencies, regions, new Date(now)) + const cleaned = cleanupLatencies(latencies, regions, new Date(now), maxStored) localStorage.setItem("workspace-proxy-latencies", JSON.stringify(cleaned)) } -const cleanupLatencies = (stored: Record, regions: RegionsResponse, now: Date): Record => { +const cleanupLatencies = (stored: Record, regions: RegionsResponse, now: Date, maxStored: number): Record => { Object.keys(stored).forEach((proxyID) => { if(!regions.regions.find((region) => region.id === proxyID)) { delete(stored[proxyID]) @@ -246,7 +255,7 @@ const cleanupLatencies = (stored: Record, regions: return new Date(report.at).getTime() > nowMS - (1000 * 60 * 60 * 24 * 7) }) // Only keep the 5 latest - stored[proxyID] = stored[proxyID].slice(-5) + stored[proxyID] = stored[proxyID].slice(-1*maxStored) }) return stored } From 1ab07582d615bd2f57af05357c51290cea1d1188 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Tue, 6 Jun 2023 14:19:27 -0500 Subject: [PATCH 3/3] Make fmt --- site/src/contexts/useProxyLatency.ts | 45 +++++++++++++++++----------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/site/src/contexts/useProxyLatency.ts b/site/src/contexts/useProxyLatency.ts index bb75677810b39..05f28e0f62bb7 100644 --- a/site/src/contexts/useProxyLatency.ts +++ b/site/src/contexts/useProxyLatency.ts @@ -28,17 +28,20 @@ const proxyLatenciesReducer = ( const history = loadStoredLatencies() const proxyHistory = history[action.proxyID] || [] const minReport = proxyHistory.reduce((min, report) => { - if(min.latencyMS === 0) { + if (min.latencyMS === 0) { // Not yet set, so use the new report. return report } - if(min.latencyMS < report.latencyMS) { + if (min.latencyMS < report.latencyMS) { return min } return report }, {} as ProxyLatencyReport) - if(minReport.latencyMS > 0 && minReport.latencyMS < action.report.latencyMS) { + if ( + minReport.latencyMS > 0 && + minReport.latencyMS < action.report.latencyMS + ) { // The new report is slower then the min report, so use the min report. return { ...state, @@ -62,12 +65,14 @@ export const useProxyLatency = ( proxyLatencies: Record } => { // maxStoredLatencies is the maximum number of latencies to store per proxy in local storage. - let maxStoredLatencies = 5 + let maxStoredLatencies = 8 // The reason we pull this from local storage is so for development purposes, a user can manually // set a larger number to collect data in their normal usage. This data can later be analyzed to come up // with some better magic numbers. - const maxStoredLatenciesVar = localStorage.getItem("workspace-proxy-latencies-max") - if(maxStoredLatenciesVar) { + const maxStoredLatenciesVar = localStorage.getItem( + "workspace-proxy-latencies-max", + ) + if (maxStoredLatenciesVar) { maxStoredLatencies = Number(maxStoredLatenciesVar) } @@ -179,7 +184,7 @@ export const useProxyLatency = ( // Must add a custom header to make the request not a "simple request". // We want to force a preflight request. // https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#simple_requests - headers: { "X-LATENCY-CHECK": "true" }, + headers: { "X-LATENCY-CHECK": "true" }, }) }) @@ -200,7 +205,7 @@ export const useProxyLatency = ( // Local storage cleanup garbageCollectStoredLatencies(proxies, maxStoredLatencies) }) - }, [proxies, latestFetchRequest]) + }, [proxies, latestFetchRequest, maxStoredLatencies]) return { proxyLatencies, @@ -223,7 +228,7 @@ const loadStoredLatencies = (): Record => { return JSON.parse(str) } -const updateStoredLatencies =(action: ProxyLatencyAction): void => { +const updateStoredLatencies = (action: ProxyLatencyAction): void => { const latencies = loadStoredLatencies() const reports = latencies[action.proxyID] || [] @@ -234,7 +239,10 @@ const updateStoredLatencies =(action: ProxyLatencyAction): void => { // garbageCollectStoredLatencies will remove any latencies that are older then 1 week or latencies of proxies // that no longer exist. This is intended to keep the size of local storage down. -const garbageCollectStoredLatencies = (regions: RegionsResponse, maxStored: number): void => { +const garbageCollectStoredLatencies = ( + regions: RegionsResponse, + maxStored: number, +): void => { const latencies = loadStoredLatencies() const now = Date.now() const cleaned = cleanupLatencies(latencies, regions, new Date(now), maxStored) @@ -242,22 +250,25 @@ const garbageCollectStoredLatencies = (regions: RegionsResponse, maxStored: numb localStorage.setItem("workspace-proxy-latencies", JSON.stringify(cleaned)) } -const cleanupLatencies = (stored: Record, regions: RegionsResponse, now: Date, maxStored: number): Record => { +const cleanupLatencies = ( + stored: Record, + regions: RegionsResponse, + now: Date, + maxStored: number, +): Record => { Object.keys(stored).forEach((proxyID) => { - if(!regions.regions.find((region) => region.id === proxyID)) { - delete(stored[proxyID]) + if (!regions.regions.find((region) => region.id === proxyID)) { + delete stored[proxyID] return } const reports = stored[proxyID] const nowMS = now.getTime() stored[proxyID] = reports.filter((report) => { // Only keep the reports that are less then 1 week old. - return new Date(report.at).getTime() > nowMS - (1000 * 60 * 60 * 24 * 7) + return new Date(report.at).getTime() > nowMS - 1000 * 60 * 60 * 24 * 7 }) // Only keep the 5 latest - stored[proxyID] = stored[proxyID].slice(-1*maxStored) + stored[proxyID] = stored[proxyID].slice(-1 * maxStored) }) return stored } - -