Scenario 3: Limiting Deaths

Load packages:

using EasyModelAnalysis
using AlgebraicPetri
using UnPack

Generate the Model and Dataset

function formSEIRHD()
    SEIRHD = LabelledPetriNet([:S, :E, :I, :R, :H, :D],
        :expo => ((:S, :I) => (:E, :I)),
        :conv => (:E => :I),
        :rec => (:I => :R),
        :hosp => (:I => :H),
        :death => (:H => :D))
    return SEIRHD
end

seirhd = formSEIRHD()
sys1 = ODESystem(seirhd)

\[ \begin{align} \frac{\mathrm{d} S\left( t \right)}{\mathrm{d}t} =& - expo S\left( t \right) I\left( t \right) \\ \frac{\mathrm{d} E\left( t \right)}{\mathrm{d}t} =& - conv E\left( t \right) + expo S\left( t \right) I\left( t \right) \\ \frac{\mathrm{d} I\left( t \right)}{\mathrm{d}t} =& conv E\left( t \right) - hosp I\left( t \right) - rec I\left( t \right) \\ \frac{\mathrm{d} R\left( t \right)}{\mathrm{d}t} =& rec I\left( t \right) \\ \frac{\mathrm{d} H\left( t \right)}{\mathrm{d}t} =& - death H\left( t \right) + hosp I\left( t \right) \\ \frac{\mathrm{d} D\left( t \right)}{\mathrm{d}t} =& death H\left( t \right) \end{align} \]

function formSEIRD()
    SEIRD = LabelledPetriNet([:S, :E, :I, :R, :D],
        :expo => ((:S, :I) => (:E, :I)),
        :conv => (:E => :I),
        :rec => (:I => :R),
        :death => (:I => :D))
    return SEIRD
end

seird = formSEIRD()
sys2 = ODESystem(seird)

\[ \begin{align} \frac{\mathrm{d} S\left( t \right)}{\mathrm{d}t} =& - expo S\left( t \right) I\left( t \right) \\ \frac{\mathrm{d} E\left( t \right)}{\mathrm{d}t} =& - conv E\left( t \right) + expo S\left( t \right) I\left( t \right) \\ \frac{\mathrm{d} I\left( t \right)}{\mathrm{d}t} =& conv E\left( t \right) - death I\left( t \right) - rec I\left( t \right) \\ \frac{\mathrm{d} R\left( t \right)}{\mathrm{d}t} =& rec I\left( t \right) \\ \frac{\mathrm{d} D\left( t \right)}{\mathrm{d}t} =& death I\left( t \right) \end{align} \]

function formSIRHD()
    SIRHD = LabelledPetriNet([:S, :I, :R, :H, :D],
        :expo => ((:S, :I) => (:I, :I)),
        :rec => (:I => :R),
        :hosp => (:I => :H),
        :death => (:H => :D))
    return SIRHD
end

sirhd = formSIRHD()
sys3 = ODESystem(sirhd)

\[ \begin{align} \frac{\mathrm{d} S\left( t \right)}{\mathrm{d}t} =& - expo S\left( t \right) I\left( t \right) \\ \frac{\mathrm{d} I\left( t \right)}{\mathrm{d}t} =& - hosp I\left( t \right) - rec I\left( t \right) + expo S\left( t \right) I\left( t \right) \\ \frac{\mathrm{d} R\left( t \right)}{\mathrm{d}t} =& rec I\left( t \right) \\ \frac{\mathrm{d} H\left( t \right)}{\mathrm{d}t} =& - death H\left( t \right) + hosp I\left( t \right) \\ \frac{\mathrm{d} D\left( t \right)}{\mathrm{d}t} =& death H\left( t \right) \end{align} \]

function form_seird_renew()
    seird_renew = LabelledPetriNet([:S, :E, :I, :R, :D],
        :expo => ((:S, :I) => (:E, :I)),
        :conv => (:E => :I),
        :rec => (:I => :R),
        :death => (:I => :D),
        :renew => (:R => :S))
    return seird_renew
end

seird_renew = form_seird_renew()
sys4 = ODESystem(seird_renew)

\[ \begin{align} \frac{\mathrm{d} S\left( t \right)}{\mathrm{d}t} =& renew R\left( t \right) - expo S\left( t \right) I\left( t \right) \\ \frac{\mathrm{d} E\left( t \right)}{\mathrm{d}t} =& - conv E\left( t \right) + expo S\left( t \right) I\left( t \right) \\ \frac{\mathrm{d} I\left( t \right)}{\mathrm{d}t} =& conv E\left( t \right) - death I\left( t \right) - rec I\left( t \right) \\ \frac{\mathrm{d} R\left( t \right)}{\mathrm{d}t} =& rec I\left( t \right) - renew R\left( t \right) \\ \frac{\mathrm{d} D\left( t \right)}{\mathrm{d}t} =& death I\left( t \right) \end{align} \]

using ASKEM # Hack, remove when merged
max_e_h = mca(seird, sirhd)
AlgebraicPetri.Graph(max_e_h[1])
max_3way = mca(max_e_h[1], seirhd)
AlgebraicPetri.Graph(max_3way[1])
max_seird_renew = mca(seird, seird_renew)
AlgebraicPetri.Graph(max_seird_renew[1])
t = ModelingToolkit.get_iv(sys1)
@unpack S, E, I, R, H, D = sys1
@unpack expo, conv, rec, hosp, death = sys1
NN = 10.0
@parameters u_expo=0.2 * NN u_conv=0.2 * NN u_rec=0.8 * NN u_hosp=0.2 * NN u_death=0.1 * NN N=NN
translate_params = [expo => u_expo / N,
    conv => u_conv / N,
    rec => u_rec / N,
    hosp => u_hosp / N,
    death => u_death / N]
subed_sys = substitute(sys1, translate_params)
sys = add_accumulations(subed_sys, [I])
@unpack accumulation_I = sys

\[ \begin{align} \frac{\mathrm{d} S\left( t \right)}{\mathrm{d}t} =& \frac{ - u_{expo} S\left( t \right) I\left( t \right)}{N} \\ \frac{\mathrm{d} E\left( t \right)}{\mathrm{d}t} =& \frac{u_{expo} S\left( t \right) I\left( t \right)}{N} + \frac{ - u_{conv} E\left( t \right)}{N} \\ \frac{\mathrm{d} I\left( t \right)}{\mathrm{d}t} =& \frac{u_{conv} E\left( t \right)}{N} + \frac{ - u_{rec} I\left( t \right)}{N} + \frac{ - u_{hosp} I\left( t \right)}{N} \\ \frac{\mathrm{d} R\left( t \right)}{\mathrm{d}t} =& \frac{u_{rec} I\left( t \right)}{N} \\ \frac{\mathrm{d} H\left( t \right)}{\mathrm{d}t} =& \frac{ - u_{death} H\left( t \right)}{N} + \frac{u_{hosp} I\left( t \right)}{N} \\ \frac{\mathrm{d} D\left( t \right)}{\mathrm{d}t} =& \frac{u_{death} H\left( t \right)}{N} \\ \frac{\mathrm{d} \mathrm{accumulation}_{I}\left( t \right)}{\mathrm{d}t} =& I\left( t \right) \end{align} \]

u0init = [
    S => 0.9 * NN,
    E => 0.05 * NN,
    I => 0.01 * NN,
    R => 0.02 * NN,
    H => 0.01 * NN,
    D => 0.01 * NN
]

tend = 6 * 7
ts = 0:tend
prob = ODEProblem(sys, u0init, (0.0, tend))
sol = solve(prob)
plot(sol)
Example block output

Model Analysis

Question 1

Provide a forecast of cumulative Covid-19 cases and deaths over the 6-week period from May 1 – June 15, 2020 under no interventions, including 90% prediction intervals in your forecasts. Compare the accuracy of the forecasts with true data over the six-week timespan.

get_uncertainty_forecast(prob, [accumulation_I], ts, [u_conv => Uniform(0.0, 1.0)], 6 * 7)
42-element Vector{Matrix{Float64}}:
 [0.0 0.06859915653154794 … 1.0912045083115203 1.1242136179045317]
 [0.0 0.06560553807764849 … 0.471991956655876 0.48293750527584006]
 [0.0 0.07934106518148466 … 4.073541914499148 4.189837629456124]
 [0.0 0.08017673658028851 … 4.2942179724841205 4.412496740406427]
 [0.0 0.08294637729165522 … 4.966360327469066 5.0856030728257355]
 [0.0 0.07952698691846789 … 4.123278657368528 4.240088666328663]
 [0.0 0.07980948318221215 … 4.198160323282694 4.315671258468448]
 [0.0 0.077856477824428 … 3.6647544453910266 3.775409382749572]
 [0.0 0.0709166718007686 … 1.6758842606697346 1.7305698300090897]
 [0.0 0.081138637877811 … 4.538473906396091 4.658018946604242]
 ⋮
 [0.0 0.07992146522247878 … 4.227607124215804 4.345369252094923]
 [0.0 0.06665470017758945 … 0.6697484840283441 0.6874252397532291]
 [0.0 0.07043208147570248 … 1.547279183777716 1.597256984872199]
 [0.0 0.07975468703957515 … 4.183701738890995 4.3010842626662775]
 [0.0 0.07090558520310755 … 1.6729093076354 1.727486696049502]
 [0.0 0.07954040955583062 … 4.126855624523702 4.2437011292229725]
 [0.0 0.08239428846087123 … 4.840178478550491 4.9598586745066555]
 [0.0 0.07105242540394231 … 1.7124307747837981 1.7684422025474054]
 [0.0 0.06485329559811687 … 0.34331980015013275 0.35018143310191957]
plot_uncertainty_forecast(prob, [accumulation_I], ts, [u_conv => Uniform(0.0, 1.0)], 6 * 7)
get_uncertainty_forecast_quantiles(prob, [accumulation_I], ts,
    [u_conv => Uniform(0.0, 1.0)],
    6 * 7)
2-element Vector{Matrix{Float64}}:
 [0.0; 0.06444283952144138; … ; 0.2777714689307762; 0.2826581909782595;;]
 [0.0; 0.08095812471648772; … ; 4.4934615539329235; 4.612847746732835;;]
plot_uncertainty_forecast_quantiles(prob, [accumulation_I], ts,
    [u_conv => Uniform(0.0, 1.0)],
    6 * 7)
Example block output

Question 2

Based on the forecasts, do we need additional interventions to keep cumulative Covid deaths under 6000 total? Provide a probability that the cumulative number of Covid deaths will stay under 6000 for the next 6 weeks without any additional interventions.

_prob = remake(prob, tspan = (0.0, 6 * 7.0))
prob_violating_threshold(_prob, [u_conv => Uniform(0.0, 1.0)], [accumulation_I > 0.4 * NN]) # TODO: explain 0.4*NN
0.22614782187965526

Question 3

We are interested in determining how effective it would be to institute a mandatory mask mandate for the duration of the next six weeks. What is the probability of staying below 6000 cumulative deaths if we institute an indefinite mask mandate starting May 1, 2020?

_prob = remake(_prob, p = [u_expo => 0.02])
prob_violating_threshold(_prob, [u_conv => Uniform(0.0, 1.0)], [accumulation_I > 0.4 * NN])
0.0

Question 4

We are interested in determining how detection rate can affect the accuracy and uncertainty in our forecasts. In particular, suppose we can improve the baseline detection rate by 20%, and the detection rate stays constant throughout the duration of the forecast. Assuming no additional interventions (ignoring Question 3), does that increase the amount of cumulative forecasted cases and deaths after six weeks? How does an increase in the detection rate affect the uncertainty in our estimates? Can you characterize the relationship between detection rate and our forecasts and their uncertainties, and comment on whether improving detection rates would provide decision-makers with better information (i.e., more accurate forecasts and/or narrower prediction intervals)?

# these new equations add I->D and H->R  to the model.
# this says now, that all I are undetected and u_hosp is the detection rate.
# this assumes there is always hospital capacity
eqs2 = [Differential(t)(S) ~ -(u_expo / N) * I * S
        Differential(t)(E) ~ (u_expo / N) * I * S - (u_conv / N) * E
        Differential(t)(I) ~ (u_conv / N) * E - (u_hosp / N) * I - (u_rec / N) * I -
                             (u_death / N) * I
        Differential(t)(R) ~ (u_rec / N) * I + (u_rec / N) * H
        Differential(t)(H) ~ (u_hosp / N) * I - (u_death / N) * H - (u_rec / N) * H
        Differential(t)(D) ~ (u_death / N) * H + (u_death / N) * I]
@named seirhd_detect = ODESystem(eqs2)
sys2 = add_accumulations(seirhd_detect, [I])
u0init2 = [
    S => 0.9 * NN,
    E => 0.05 * NN,
    I => 0.01 * NN,
    R => 0.02 * NN,
    H => 0.01 * NN,
    D => 0.01 * NN
]
sys2_ = structural_simplify(sys2)
@unpack accumulation_I = sys2_

probd = ODEProblem(sys2_, u0init2, (0.0, tend))
sold = solve(probd; saveat = ts)
plot(sold)
Example block output
sols = []
u_detects = 0:0.1:1
for x in u_detects
    probd = remake(probd, p = [u_hosp => x])
    sold = solve(probd; saveat = sold.t)
    push!(sols, sold)
end

# demonstrate that the total infected count is strictly decreasing with increasing detection rate
is = map(x -> x[accumulation_I][end], sols)
plot(is)
@test issorted(is; rev = true)

# deaths decrease with increasing detection rate
ds = map(x -> x[D][end], sols)
plot(ds)
@test issorted(ds; rev = true)
get_uncertainty_forecast(_prob, accumulation_I, 0:100,
    [u_hosp => Uniform(0.0, 1.0), u_conv => Uniform(0.0, 1.0)],
    6 * 7)

plot_uncertainty_forecast(probd, accumulation_I, 0:100,
    [
        u_hosp => Uniform(0.0, 1.0),
        u_conv => Uniform(0.0, 1.0)
    ],
    6 * 7)

Compute the accuracy of the forecast assuming no mask mandate (ignoring Question 3) in the same way as you did in Question 1 and determine if improving the detection rate improves forecast accuracy.

Question 5

Convert the MechBayes SEIRHD model to an SIRHD model by removing the E compartment. Compute the same six-week forecast that you had done in Question 1a and compare the accuracy of the six-week forecasts with the forecasts done in Question 1a.

prob2 = prob
get_uncertainty_forecast(prob2, [accumulation_I], 0:100, [u_conv => Uniform(0.0, 1.0)],
    6 * 7)
plot_uncertainty_forecast(prob2, [accumulation_I], 0:100, [u_conv => Uniform(0.0, 1.0)],
    6 * 7)
get_uncertainty_forecast_quantiles(prob2, [accumulation_I], 0:100,
    [u_conv => Uniform(0.0, 1.0)],
    6 * 7)
plot_uncertainty_forecast_quantiles(prob2, [accumulation_I], 0:100,
    [u_conv => Uniform(0.0, 1.0)],
    6 * 7)

Further modify the MechBayes SEIRHD model and do a model space exploration and model selection from the following models, based on comparing forecasts of cases and deaths to actual data: SEIRD, SEIRHD, and SIRHD models. Use data from April 1, 2020 – April 30, 2020 from the scenario location (Massachusetts) for fitting these models. Then make out-of-sample forecasts from the same 6-week period from May 1 – June 15, 2020, and compare with actual data. Comment on the quality of the fit for each of these models.

prob3 = prob
get_uncertainty_forecast(prob2, [accumulation_I], 0:100, [u_conv => Uniform(0.0, 1.0)],
    6 * 7)
plot_uncertainty_forecast(prob2, [accumulation_I], 0:100, [u_conv => Uniform(0.0, 1.0)],
    6 * 7)
get_uncertainty_forecast_quantiles(prob2, [accumulation_I], 0:100,
    [u_conv => Uniform(0.0, 1.0)],
    6 * 7)
plot_uncertainty_forecast_quantiles(prob2, [accumulation_I], 0:100,
    [u_conv => Uniform(0.0, 1.0)],
    6 * 7)

Do a 3-way structural model comparison between the SEIRD, SEIRHD, and SIRHD models.

#

https://github.com/SciML/EasyModelAnalysis.jl/issues/22

Question 7

What is the latest date we can impose a mandatory mask mandate over the next six weeks to ensure, with 90% probability, that cumulative deaths do not exceed 6000? Can you characterize the following relationship: for every day that we delay implementing a mask mandate, we expect cumulative deaths (over the six-week timeframe) to go up by X?