rng_utils = {
function mulberry32(seed) {
return function() {
seed |= 0; seed = seed + 0x6D2B79F5 | 0
let t = Math.imul(seed ^ seed >>> 15, 1 | seed)
t = t + Math.imul(t ^ t >>> 7, 61 | t) ^ t
return ((t ^ t >>> 14) >>> 0) / 4294967296
}
}
function boxMuller(rng) {
const u1 = rng(), u2 = rng()
return Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2)
}
function gammaSample(rng, shape) {
if (shape < 1) return gammaSample(rng, shape + 1) * Math.pow(rng(), 1 / shape)
const d = shape - 1 / 3, c = 1 / Math.sqrt(9 * d)
while (true) {
let x, v
do { x = boxMuller(rng); v = 1 + c * x } while (v <= 0)
v = v * v * v; const u = rng()
if (u < 1 - 0.0331 * (x * x) * (x * x)) return d * v
if (Math.log(u) < 0.5 * x * x + d * (1 - v + Math.log(v))) return d * v
}
}
function stdTSample(rng, d) {
const normal = boxMuller(rng)
const chi2 = 2 * gammaSample(rng, d / 2)
return normal / Math.sqrt(chi2 / d) * Math.sqrt((d - 2) / d)
}
return { mulberry32, boxMuller, stdTSample }
}FHS with DCC
Interactive exploration of Filtered Historical Simulation combined with Dynamic Conditional Correlations for portfolio risk with time-varying dependence
When correlations are dynamic, the constant-correlation multivariate FHS must be extended. Historical shocks were correlated according to their historical (time-varying) correlations, so to use them in forward simulation with different current correlations, we must first uncorrelate them and then re-correlate with the simulated dynamic correlations (Christoffersen 2012, chap. 8; Engle 2002).
Recall that the covariance matrix decomposes as \(\Sigma_{t+1} = D_{t+1}\,\Upsilon_{t+1}\,D_{t+1}\), where \(D_{t+1}\) is the diagonal matrix of GARCH standard deviations and \(\Upsilon_{t+1}\) is the DCC correlation matrix. \(\Upsilon^{1/2}\) denotes its Cholesky decomposition (the matrix square root used to correlate shocks).
The DCC-FHS procedure:
- Uncorrelate historical shocks: \(\hat{z}^u_{t+1-\tau} = \Upsilon^{-1/2}_{t+1-\tau}\hat{z}_{t+1-\tau}\)
- Draw uncorrelated shock vectors (entire vector from the same day)
- Re-correlate with current correlations: \(\hat{z}_{i,t+1} = \Upsilon^{1/2}_{t+1}\hat{z}^u_{i,1}\)
- Compute returns: \(\hat{r}_{i,t+1} = D_{t+1}\hat{z}_{i,t+1}\)
- Update both GARCH variances and DCC correlations using simulated shocks
- Repeat for subsequent days \(k = 2, \ldots, K\)
Note
Why uncorrelate? If we drew correlated historical shocks directly and applied them with the current correlation matrix, the shocks would be “double-correlated”: once from their historical correlation structure and once from the re-correlation step. Uncorrelating first removes the historical dependence, creating a clean database from which to re-impose the current (and dynamically evolving) correlation.
qnorm = {
const a1 = -3.969683028665376e+01, a2 = 2.209460984245205e+02
const a3 = -2.759285104469687e+02, a4 = 1.383577518672690e+02
const a5 = -3.066479806614716e+01, a6 = 2.506628277459239e+00
const b1 = -5.447609879822406e+01, b2 = 1.615858368580409e+02
const b3 = -1.556989798598866e+02, b4 = 6.680131188771972e+01
const b5 = -1.328068155288572e+01
const c1 = -7.784894002430293e-03, c2 = -3.223964580411365e-01
const c3 = -2.400758277161838e+00, c4 = -2.549732539343734e+00
const c5 = 4.374664141464968e+00, c6 = 2.938163982698783e+00
const d1 = 7.784695709041462e-03, d2 = 3.224671290700398e-01
const d3 = 2.445134137142996e+00, d4 = 3.754408661907416e+00
const pLow = 0.02425, pHigh = 1 - pLow
return p => {
if (p <= 0) return -Infinity
if (p >= 1) return Infinity
if (p < pLow) {
const q = Math.sqrt(-2 * Math.log(p))
return (((((c1*q+c2)*q+c3)*q+c4)*q+c5)*q+c6) / ((((d1*q+d2)*q+d3)*q+d4)*q+1)
}
if (p <= pHigh) {
const q = p - 0.5, r = q * q
return (((((a1*r+a2)*r+a3)*r+a4)*r+a5)*r+a6)*q / (((((b1*r+b2)*r+b3)*r+b4)*r+b5)*r+1)
}
const q = Math.sqrt(-2 * Math.log(1 - p))
return -(((((c1*q+c2)*q+c3)*q+c4)*q+c5)*q+c6) / ((((d1*q+d2)*q+d3)*q+d4)*q+1)
}
}legend = (items) => {
const el = document.createElement("div")
el.style.cssText = "display:flex; flex-wrap:wrap; margin-top:-4px; margin-bottom:6px;"
const hidden = new Set()
for (const d of items) {
const key = d.key || d.label
const span = document.createElement("span")
span.style.cssText = "display:inline-flex; align-items:center; gap:4px; margin-right:14px; cursor:pointer; user-select:none; transition:opacity 0.15s;"
let swatchHTML
if (d.type === "dashed") swatchHTML = `<svg width="22" height="12"><line x1="0" y1="6" x2="22" y2="6" stroke="${d.color}" stroke-width="2" stroke-dasharray="4 2"/></svg>`
else if (d.type === "area") swatchHTML = `<svg width="14" height="14"><rect width="14" height="14" fill="${d.color}" opacity="0.3"/></svg>`
else swatchHTML = `<svg width="22" height="12"><line x1="0" y1="6" x2="22" y2="6" stroke="${d.color}" stroke-width="2"/></svg>`
span.innerHTML = `${swatchHTML}<span style="font-size:0.82rem;">${d.label}</span>`
span.addEventListener("click", () => {
const nowHidden = !hidden.has(key)
if (nowHidden) hidden.add(key); else hidden.delete(key)
span.style.opacity = nowHidden ? "0.35" : "1"
span.querySelector("span").style.textDecoration = nowHidden ? "line-through" : "none"
el.value = new Set(hidden)
el.dispatchEvent(new Event("input", {bubbles: true}))
})
el.appendChild(span)
}
el.value = new Set(hidden)
return el
}The uncorrelate/re-correlate pipeline
This illustration shows the three stages of the DCC-FHS shock transformation and compares portfolio risk under constant vs. dynamic correlations.
Tip
How to experiment
Set the current conditional correlation \(\rho_{12,t+1}\) to a high “crisis” value (e.g., 0.80) and compare with a “normal” value (0.30) in the stress scenario tab. This is the one-day-ahead correlation forecast from the DCC model at the end of day \(t\), analogous to the GARCH volatility forecast \(\sigma_{i,t+1}\). Increase the simulation horizon to see how the DCC correlation mean-reverts toward \(\bar{\rho}\), reducing the gap between the constant and dynamic approaches. Try different DCC parameters: higher \(\alpha\) makes the correlation more reactive to shocks; higher \(\beta\) makes it more persistent.
{
if (fdDccAlpha + fdDccBeta >= 1) {
return html`<div style="background:#fff3cd;border:1px solid #ffc107;border-radius:6px;padding:8px 12px;margin-bottom:8px;font-size:0.85rem;color:#856404;"><strong>Warning:</strong> α + β = ${fmt(fdDccAlpha + fdDccBeta, 2)} ≥ 1. The mean-reverting DCC requires α + β < 1 for stationarity. The correlation process will not mean-revert to ρ̄ and may become explosive.</div>`
}
return html`<span></span>`
}// Generate historical data with time-varying correlations (DCC)
fdHist = {
fdSeed
const rng = rng_utils.mulberry32(42 + fdSeed)
const N = 1500
const rhoBar = fdRhoBar
const dccA = fdDccAlpha, dccB = fdDccBeta
const R1 = new Array(N), R2 = new Array(N)
const sig2_1 = new Array(N), sig2_2 = new Array(N)
const z1 = new Array(N), z2 = new Array(N)
const rho = new Array(N)
const zu1 = new Array(N), zu2 = new Array(N)
sig2_1[0] = fdVL1; sig2_2[0] = fdVL2
// DCC state
let q11 = 1, q22 = 1, q12 = rhoBar
for (let t = 0; t < N; t++) {
// Current correlation from DCC
rho[t] = q12 / Math.sqrt(q11 * q22)
// Generate uncorrelated shocks
const u1 = rng_utils.boxMuller(rng)
const u2 = rng_utils.boxMuller(rng)
zu1[t] = u1; zu2[t] = u2
// Correlate with current rho via Cholesky
const L = cholL(rho[t])
const s1 = L.a * u1 + L.b * u2
const s2 = L.c * u1 + L.d * u2
R1[t] = Math.sqrt(sig2_1[t]) * s1
R2[t] = Math.sqrt(sig2_2[t]) * s2
z1[t] = s1 // standardized returns
z2[t] = s2
// Update GARCH
if (t < N - 1) {
sig2_1[t + 1] = fdOmega1 + fdAlpha1 * R1[t]**2 + fdBeta1 * sig2_1[t]
sig2_2[t + 1] = fdOmega2 + fdAlpha2 * R2[t]**2 + fdBeta2 * sig2_2[t]
}
// Update DCC
q11 = 1 * (1 - dccA - dccB) + dccA * z1[t]**2 + dccB * q11
q22 = 1 * (1 - dccA - dccB) + dccA * z2[t]**2 + dccB * q22
q12 = rhoBar * (1 - dccA - dccB) + dccA * z1[t] * z2[t] + dccB * q12
}
// Uncorrelate historical shocks for FHS database
const zU1 = new Array(N), zU2 = new Array(N)
for (let t = 0; t < N; t++) {
const Li = cholInvL(rho[t])
zU1[t] = Li.a * z1[t] + Li.b * z2[t]
zU2[t] = Li.c * z1[t] + Li.d * z2[t]
}
const curSig1 = Math.sqrt(sig2_1[N - 1])
const curSig2 = Math.sqrt(sig2_2[N - 1])
return { R1, R2, sig2_1, sig2_2, z1, z2, zu1: zU1, zu2: zU2, rho, curSig1, curSig2, N }
}// Run DCC-FHS and constant-correlation FHS
fdResults = {
const rng = rng_utils.mulberry32(500 + fdSeed)
const { zu1, zu2, z1, z2, curSig1, curSig2, N } = fdHist
const FH = fdFH, K = fdK
const w1 = fdW1, w2 = 1 - fdW1
const p = fdP / 100
const rho0 = fdRho0
const rhoBar = fdRhoBar
const dccA = fdDccAlpha, dccB = fdDccBeta
// Arrays for simulated correlation paths
const corrPaths = []
const pfRetDCC = new Array(FH)
const pfRetConst = new Array(FH)
for (let i = 0; i < FH; i++) {
let cumDCC = 0, cumConst = 0
let s2_1d = curSig1**2, s2_2d = curSig2**2
let s2_1c = curSig1**2, s2_2c = curSig2**2
let q11 = 1, q22 = 1, q12 = rho0 // q12 initialized to starting rho
// Scale q11, q22 so normalization gives rho0
// Actually q12/sqrt(q11*q22) = rho0 when q11=q22=1
let curRho = rho0
const corrPath = new Array(K)
for (let k = 0; k < K; k++) {
const idx = Math.floor(rng() * N)
// DCC-FHS: re-correlate uncorrelated shocks with current rho
const L = cholL(curRho)
const zd1 = L.a * zu1[idx] + L.b * zu2[idx]
const zd2 = L.c * zu1[idx] + L.d * zu2[idx]
const rd1 = Math.sqrt(s2_1d) * zd1
const rd2 = Math.sqrt(s2_2d) * zd2
cumDCC += w1 * rd1 + w2 * rd2
// Update GARCH
s2_1d = fdOmega1 + fdAlpha1 * rd1**2 + fdBeta1 * s2_1d
s2_2d = fdOmega2 + fdAlpha2 * rd2**2 + fdBeta2 * s2_2d
// Update DCC
q11 = 1 * (1 - dccA - dccB) + dccA * zd1**2 + dccB * q11
q22 = 1 * (1 - dccA - dccB) + dccA * zd2**2 + dccB * q22
q12 = rhoBar * (1 - dccA - dccB) + dccA * zd1 * zd2 + dccB * q12
curRho = q12 / Math.sqrt(q11 * q22)
curRho = Math.max(-0.999, Math.min(0.999, curRho))
corrPath[k] = curRho
// Constant-correlation FHS: use same-day draws from original z1, z2
const rc1 = Math.sqrt(s2_1c) * z1[idx]
const rc2 = Math.sqrt(s2_2c) * z2[idx]
cumConst += w1 * rc1 + w2 * rc2
s2_1c = fdOmega1 + fdAlpha1 * rc1**2 + fdBeta1 * s2_1c
s2_2c = fdOmega2 + fdAlpha2 * rc2**2 + fdBeta2 * s2_2c
}
pfRetDCC[i] = cumDCC
pfRetConst[i] = cumConst
if (i < 200) corrPaths.push(corrPath) // store subset for plotting
}
// Compute risk measures
const sortedDCC = pfRetDCC.slice().sort((a, b) => a - b)
const sortedConst = pfRetConst.slice().sort((a, b) => a - b)
const posIdx = Math.max(0, Math.ceil(FH * p) - 1)
const varDCC = -sortedDCC[posIdx]
const varConst = -sortedConst[posIdx]
const tailDCC = sortedDCC.slice(0, posIdx + 1)
const tailConst = sortedConst.slice(0, posIdx + 1)
const esDCC = -tailDCC.reduce((a, b) => a + b, 0) / tailDCC.length
const esConst = -tailConst.reduce((a, b) => a + b, 0) / tailConst.length
// Mean correlation path
const meanCorrPath = []
const p10 = [], p90 = []
for (let k = 0; k < K; k++) {
const vals = corrPaths.map(cp => cp[k]).sort((a, b) => a - b)
const n = vals.length
meanCorrPath.push(vals.reduce((a, b) => a + b, 0) / n)
p10.push(vals[Math.floor(n * 0.1)])
p90.push(vals[Math.floor(n * 0.9)])
}
return {
pfRetDCC, pfRetConst, sortedDCC, sortedConst,
varDCC, varConst, esDCC, esConst,
corrPaths, meanCorrPath, p10, p90,
p, K, FH
}
}- Pipeline visualization
- Simulated correlation paths
- Constant vs. dynamic correlation
- Stress scenario
- Summary
{
const { zu1, zu2, z1, z2 } = fdHist
const N = Math.min(fdHist.N, 800)
const lim = 5
// Original correlated
const corrData = Array.from({length: N}, (_, i) => ({ z1: z1[i], z2: z2[i] }))
// Uncorrelated
const uncorrData = Array.from({length: N}, (_, i) => ({ z1: zu1[i], z2: zu2[i] }))
// Re-correlated with current rho
const L = cholL(fdRho0)
const recorrData = Array.from({length: N}, (_, i) => ({
z1: L.a * zu1[i] + L.b * zu2[i],
z2: L.c * zu1[i] + L.d * zu2[i]
}))
const makePlot = (data, color, title) => {
const plot = Plot.plot({
width: 280, height: 280, marginLeft: 40,
x: { label: "z₁", domain: [-lim, lim], grid: true, ticks: [-4, -2, 0, 2, 4] },
y: { label: "z₂", domain: [-lim, lim], grid: true, ticks: [-4, -2, 0, 2, 4] },
marks: [
Plot.ruleX([0], { stroke: "#eee" }),
Plot.ruleY([0], { stroke: "#eee" }),
Plot.dot(data, { x: "z1", y: "z2", r: 1.2, fill: color, fillOpacity: 0.2 })
]
})
const div = html`<div style="flex:1;min-width:250px;max-width:300px;text-align:center;">
<h4 style="margin-bottom:4px;font-size:0.9rem;">${title}</h4>
</div>`
div.appendChild(plot)
return div
}
const container = html`<div style="display:flex;gap:12px;flex-wrap:wrap;justify-content:center;"></div>`
container.appendChild(makePlot(corrData, "#999", "1. Historical correlated"))
container.appendChild(makePlot(uncorrData, "#2e7d32", "2. After uncorrelation"))
container.appendChild(makePlot(recorrData, "#2f71d5", `3. Re-correlated (ρ₀ = ${fmt(fdRho0, 2)})`))
return container
}html`<p style="color:#666;font-size:0.85rem;"><strong>Step 1:</strong> Historical standardized shocks with time-varying correlation. <strong>Step 2:</strong> After applying Υ<sup>−1/2</sup><sub>t</sub> to each day's shocks, the cloud becomes approximately circular (uncorrelated). <strong>Step 3:</strong> After applying Υ<sup>1/2</sup><sub>t+1</sub> with the current starting correlation ρ₀ = ${fmt(fdRho0, 2)}, the cloud tilts to match the desired dependence.</p>`{
const K = fdK
if (K <= 1) {
return html`<div style="background:#e3f2fd;border-radius:8px;padding:16px;text-align:center;margin:20px 0;">
<div style="font-size:1.1rem;font-weight:600;margin-bottom:6px;">One-day-ahead correlation is deterministic</div>
<p style="font-size:0.9rem;color:#555;max-width:500px;margin:0 auto;">For K = 1, the correlation ρ₁₂,ₜ₊₁ is already known from the DCC model at the end of day t. Correlation paths only become meaningful for K ≥ 2, where future correlations depend on simulated shocks and have no closed-form solution. Increase the horizon to see the paths.</p>
</div>`
}
const pathData = []
const nShow = Math.min(fdResults.corrPaths.length, 50)
for (let i = 0; i < nShow; i++) {
for (let k = 0; k < K; k++) {
pathData.push({ day: k + 1, rho: fdResults.corrPaths[i][k], path: i })
}
}
const meanData = fdResults.meanCorrPath.map((r, k) => ({ day: k + 1, rho: r }))
const bandData = fdResults.meanCorrPath.map((r, k) => ({
day: k + 1, lo: fdResults.p10[k], hi: fdResults.p90[k]
}))
// Dynamic y-axis: encompass all paths, reference lines, and add padding
const allRho = pathData.map(d => d.rho)
const yVals = [...allRho, fdRhoBar, fdRho0, ...bandData.map(d => d.lo), ...bandData.map(d => d.hi)]
const yMin = Math.min(...yVals), yMax = Math.max(...yVals)
const yPad = Math.max(0.05, (yMax - yMin) * 0.15)
const yDomain = [Math.max(-1, yMin - yPad), Math.min(1, yMax + yPad)]
return Plot.plot({
height: 400, marginLeft: 60,
x: { label: "Day ahead", grid: false },
y: { label: "Simulated ρ₁₂", grid: true, domain: yDomain },
marks: [
Plot.ruleY([fdRhoBar], { stroke: "#d62728", strokeWidth: 1, strokeDasharray: "6 3" }),
Plot.ruleY([fdRho0], { stroke: "#2f71d5", strokeWidth: 1, strokeDasharray: "3 3" }),
// Individual paths
Plot.line(pathData, { x: "day", y: "rho", z: "path", stroke: "#aaa", strokeWidth: 0.3, strokeOpacity: 0.4 }),
// Confidence band
Plot.areaY(bandData, { x: "day", y1: "lo", y2: "hi", fill: "#2f71d5", fillOpacity: 0.15 }),
// Mean path
Plot.line(meanData, { x: "day", y: "rho", stroke: "#2f71d5", strokeWidth: 2.5 })
]
})
}{
if (fdK <= 1) return html`<span></span>`
return html`<p style="color:#666;font-size:0.85rem;">Simulated correlation paths over the ${fdK}-day horizon. The <span style="color:#2f71d5;font-weight:700;">blue line</span> is the mean path, the <span style="color:#2f71d5;">shaded band</span> covers the 10th to 90th percentile. Gray lines show individual simulation paths. The <span style="color:#2f71d5;">dashed blue line</span> marks the starting correlation ρ₀ = ${fmt(fdRho0, 2)}, and the <span style="color:#d62728;">dashed red line</span> marks the unconditional level ρ̄ = ${fmt(fdRhoBar, 2)} toward which the mean path gravitates.</p>`
}{
const h = fdCompLegend
const marks = [Plot.ruleY([0])]
if (!h.has("dcc"))
marks.push(Plot.rectY(fdResults.pfRetDCC, Plot.binX({ y: "count" }, {
x: d => d, fill: "#2f71d5", fillOpacity: 0.4, thresholds: 80
})))
if (!h.has("const"))
marks.push(Plot.rectY(fdResults.pfRetConst, Plot.binX({ y: "count" }, {
x: d => d, fill: "#e67e22", fillOpacity: 0.3, thresholds: 80
})))
if (!h.has("dcc"))
marks.push(Plot.ruleX([-fdResults.varDCC], { stroke: "#2f71d5", strokeWidth: 2 }))
if (!h.has("const"))
marks.push(Plot.ruleX([-fdResults.varConst], { stroke: "#e67e22", strokeWidth: 2, strokeDasharray: "6 3" }))
return Plot.plot({
height: 400, marginLeft: 60,
x: { label: `${fdK}-day portfolio return`, grid: false, tickFormat: pctFmt },
y: { label: "Count", grid: true },
marks
})
}{
const diff = (fdResults.varDCC - fdResults.varConst) / fdResults.varConst * 100
const higher = diff > 0 ? "higher" : "lower"
return html`<p style="color:#666;font-size:0.85rem;">Distribution of ${fdK}-day portfolio returns. <span style="color:#2f71d5;font-weight:700;">DCC-FHS</span> accounts for dynamic correlations starting at ρ₀ = ${fmt(fdRho0, 2)}. <span style="color:#e67e22;font-weight:700;">Constant-correlation FHS</span> uses historical same-day draws without adjusting for the current correlation. DCC VaR is ${fmt(Math.abs(diff), 1)}% ${higher} than constant-correlation VaR.</p>`
}// Compare risk at different starting correlations
fdStress = {
const rng = rng_utils.mulberry32(700 + fdSeed)
const { zu1, zu2, curSig1, curSig2, N } = fdHist
const FH = Math.max(fdFH, 5000), K = fdK
const w1 = fdW1, w2 = 1 - fdW1
const p = fdP / 100
const rhoBar = fdRhoBar
const dccA = fdDccAlpha, dccB = fdDccBeta
const scenarios = [
{ label: "Low (ρ₀ = 0.10)", rho0: 0.10 },
{ label: "Normal (ρ₀ = 0.30)", rho0: 0.30 },
{ label: "Elevated (ρ₀ = 0.60)", rho0: 0.60 },
{ label: "Crisis (ρ₀ = 0.80)", rho0: 0.80 }
]
const results = scenarios.map(sc => {
const rets = new Array(FH)
for (let i = 0; i < FH; i++) {
let cum = 0
let s2_1 = curSig1**2, s2_2 = curSig2**2
let q11 = 1, q22 = 1, q12 = sc.rho0, curRho = sc.rho0
for (let k = 0; k < K; k++) {
const idx = Math.floor(rng() * N)
const L = cholL(curRho)
const zd1 = L.a * zu1[idx] + L.b * zu2[idx]
const zd2 = L.c * zu1[idx] + L.d * zu2[idx]
const rd1 = Math.sqrt(s2_1) * zd1
const rd2 = Math.sqrt(s2_2) * zd2
cum += w1 * rd1 + w2 * rd2
s2_1 = fdOmega1 + fdAlpha1 * rd1**2 + fdBeta1 * s2_1
s2_2 = fdOmega2 + fdAlpha2 * rd2**2 + fdBeta2 * s2_2
q11 = 1 * (1 - dccA - dccB) + dccA * zd1**2 + dccB * q11
q22 = 1 * (1 - dccA - dccB) + dccA * zd2**2 + dccB * q22
q12 = rhoBar * (1 - dccA - dccB) + dccA * zd1 * zd2 + dccB * q12
curRho = Math.max(-0.999, Math.min(0.999, q12 / Math.sqrt(q11 * q22)))
}
rets[i] = cum
}
rets.sort((a, b) => a - b)
const posIdx = Math.max(0, Math.ceil(FH * p) - 1)
const varVal = -rets[posIdx]
const tail = rets.slice(0, posIdx + 1)
const esVal = -tail.reduce((a, b) => a + b, 0) / tail.length
return { ...sc, var: varVal, es: esVal }
})
return results
}{
const scenarios = fdStress.map(s => s.label)
const barWidth = 0.35
const bars = []
for (let si = 0; si < fdStress.length; si++) {
const sc = fdStress[si]
bars.push({ x1: si - barWidth, x2: si, y: sc.var * 100, measure: "VaR" })
bars.push({ x1: si, x2: si + barWidth, y: sc.es * 100, measure: "ES" })
}
const colors = { "VaR": "#2f71d5", "ES": "#d62728" }
return Plot.plot({
height: 400, marginLeft: 60, marginBottom: 60,
x: { label: null, domain: [-0.5, scenarios.length - 0.5],
ticks: scenarios.map((_, i) => i), tickFormat: i => scenarios[i], tickRotate: -20 },
y: { label: `${fdK}-day risk measure (%)`, grid: true },
marks: [
Plot.ruleY([0]),
Plot.rectY(bars, {
x1: "x1", x2: "x2", y: "y", fill: d => colors[d.measure], fillOpacity: 0.85
}),
Plot.text(bars.filter(d => d.measure === "VaR").slice(0, 1).concat(bars.filter(d => d.measure === "ES").slice(0, 1)), {
x: d => (d.x1 + d.x2) / 2, y: "y", text: "measure", dy: -8, fontSize: 10,
fill: d => colors[d.measure], fontWeight: 600
})
]
})
}{
const low = fdStress[0], crisis = fdStress[3]
const ratio = crisis.var / low.var
return html`<p style="color:#666;font-size:0.85rem;">Portfolio ${fdK}-day VaR and ES under four starting correlation scenarios, all using DCC-FHS with the same volatility levels. The crisis scenario (ρ₀ = 0.80) produces a VaR that is <strong>${fmt(ratio, 2)}x</strong> the low-correlation scenario (ρ₀ = 0.10), illustrating how correlation dynamics amplify portfolio risk during market stress.</p>`
}{
const r = fdResults
return html`<table class="table" style="width:100%;">
<thead><tr>
<th>Measure</th>
<th style="color:#2f71d5;">DCC-FHS</th>
<th style="color:#e67e22;">Constant-correlation FHS</th>
<th>Difference</th>
</tr></thead>
<tbody>
<tr><td style="font-weight:500;">${fdK}-day VaR (${fmt(fdP,1)}%)</td>
<td style="font-weight:700;">${fmt(r.varDCC * 100, 3)}%</td>
<td>${fmt(r.varConst * 100, 3)}%</td>
<td>${fmt((r.varDCC - r.varConst) * 100, 3)}%</td></tr>
<tr><td style="font-weight:500;">${fdK}-day ES (${fmt(fdP,1)}%)</td>
<td style="font-weight:700;">${fmt(r.esDCC * 100, 3)}%</td>
<td>${fmt(r.esConst * 100, 3)}%</td>
<td>${fmt((r.esDCC - r.esConst) * 100, 3)}%</td></tr>
<tr><td style="font-weight:500;">Mean simulated ρ (day ${fdK})</td>
<td colspan="3">${fmt(r.meanCorrPath[r.meanCorrPath.length - 1], 4)} (started at ${fmt(fdRho0, 2)}, long-run ρ̄ = ${fmt(fdRhoBar, 2)})</td></tr>
</tbody></table>
<p style="color:#666;font-size:0.85rem;">DCC-FHS adjusts for the current correlation level and lets it evolve dynamically. When the starting correlation differs significantly from the historical average, the two approaches will produce meaningfully different risk estimates. The gap is largest at short horizons and when the starting correlation is far from ρ̄.</p>`
}References
Christoffersen, Peter F. 2012. Elements of Financial Risk Management. 2nd ed. Academic Press.
Engle, Robert. 2002. “Dynamic Conditional Correlation: A Simple Class of Multivariate Generalized Autoregressive Conditional Heteroskedasticity Models.” Journal of Business & Economic Statistics 20 (3): 339–50.