8000 chore: use a cache to select best latency by Emyrk · Pull Request #7879 · coder/coder · GitHub
[go: up one dir, main page]

Skip to content

chore: use a cache to select best latency #7879

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jun 7, 2023
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
chore: Testing using a cache to choose the best latency
  • Loading branch information
Emyrk committed Jun 6, 2023
commit b909e828cfd181eedcd08f1da30d3d26919b3bd1
92 changes: 90 additions & 2 deletions site/src/contexts/useProxyLatency.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,29 @@ const proxyLatenciesReducer = (
state: Record<string, ProxyLatencyReport>,
action: ProxyLatencyAction,
): Record<string, ProxyLatencyReport> => {
// TODO: We should probably not read from local storage on every action.
const history = loadStoredLatencies()
const proxyHistory = history[action.proxyID] || []
const minReport = proxyHistory.reduce((min, report) => {
if(min.latencyMS === 0) {
// Not yet set, so use the new report.
return report
}
if(min.latencyMS < report.latencyMS) {
return min
}
return report
}, {} as ProxyLatencyReport)

if(minReport.latencyMS > 0 && minReport.latencyMS < action.report.latencyMS) {
// The new report is slower then the min report, so use the min report.
return {
...state,
[action.proxyID]: minReport,
}
}

// Use the new report
return {
...state,
[action.proxyID]: action.report,
Expand Down Expand Up @@ -113,14 +136,17 @@ export const useProxyLatency = (
)
latencyMS = entry.duration
}
dispatchProxyLatencies({
const update = {
proxyID: check.id,
report: {
latencyMS,
accurate,
at: new Date(),
},
})
}
dispatchProxyLatencies(update)
// Also save to local storage to persist the latency across page refreshes.
updateStoredLatencies(update)

return
}
Expand All @@ -140,6 +166,10 @@ export const useProxyLatency = (
const proxyRequests = Object.keys(proxyChecks).map((latencyURL) => {
return axios.get(latencyURL, {
withCredentials: false,
// Must add a custom header to make the request not a "simple request".
// We want to force a preflight request.
// https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#simple_requests
headers: { "X-LATENCY-CHECK": "true" },
})
})

Expand All @@ -156,6 +186,9 @@ export const useProxyLatency = (
// At this point, we can be confident that all the proxy requests have been recorded
// via the performance observer. So we can disconnect the observer.
observer.disconnect()

// Local storage cleanup
garbageCollectStoredLatencies(proxies)
})
}, [proxies, latestFetchRequest])

Expand All @@ -164,3 +197,58 @@ export const useProxyLatency = (
refetch,
}
}

// Local storage functions

// loadStoredLatencies will load the stored latencies from local storage.
// Latencies are stored in local storage to minimize the impact of outliers.
// If a single request is slow, we want to omit that latency check, and go with
// a more accurate latency check.
const loadStoredLatencies = (): Record<string, ProxyLatencyReport[]> => {
const str = localStorage.getItem("workspace-proxy-latencies")
if (!str) {
return {}
}

return JSON.parse(str)
}

const updateStoredLatencies =(action: ProxyLatencyAction): void => {
const latencies = loadStoredLatencies()
const reports = latencies[action.proxyID] || []

reports.push(action.report)
latencies[action.proxyID] = reports
localStorage.setItem("workspace-proxy-latencies", JSON.stringify(latencies))
}

// garbageCollectStoredLatencies will remove any latencies that are older then 1 week or latencies of proxies
// that no longer exist. This is intended to keep the size of local storage down.
const garbageCollectStoredLatencies = (regions: RegionsResponse): void => {
const latencies = loadStoredLatencies()
const now = Date.now()
console.log("Garbage collecting stored latencies")
const cleaned = cleanupLatencies(latencies, regions, new Date(now))

localStorage.setItem("workspace-proxy-latencies", JSON.stringify(cleaned))
}

const cleanupLatencies = (stored: Record<string, ProxyLatencyReport[]>, regions: RegionsResponse, now: Date): Record<string, ProxyLatencyReport[]> => {
Object.keys(stored).forEach((proxyID) => {
if(!regions.regions.find((region) => region.id === proxyID)) {
delete(stored[proxyID])
return
}
const reports = stored[proxyID]
const nowMS = now.getTime()
stored[proxyID] = reports.filter((report) => {
// Only keep the reports that are less then 1 week old.
return new Date(report.at).getTime() > nowMS - (1000 * 60 * 60 * 24 * 7)
})
// Only keep the 5 latest
stored[proxyID] = stored[proxyID].slice(-5)
})
return stored
}


0