Run chosen pre-built model in Stan
Examples
# basic usage of hmde_run
hmde_model("constant_single_ind") |>
hmde_assign_data(Trout_Size_Data)|>
hmde_run(chains = 1, iter = 1000,
verbose = FALSE, show_messages = FALSE)
#>
#> SAMPLING FOR MODEL 'constant_single_ind' NOW (CHAIN 1).
#> Chain 1:
#> Chain 1: Gradient evaluation took 1.5e-05 seconds
#> Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.15 seconds.
#> Chain 1: Adjust your expectations accordingly!
#> Chain 1:
#> Chain 1:
#> Chain 1: Iteration: 1 / 1000 [ 0%] (Warmup)
#> Chain 1: Iteration: 100 / 1000 [ 10%] (Warmup)
#> Chain 1: Iteration: 200 / 1000 [ 20%] (Warmup)
#> Chain 1: Iteration: 300 / 1000 [ 30%] (Warmup)
#> Chain 1: Iteration: 400 / 1000 [ 40%] (Warmup)
#> Chain 1: Iteration: 500 / 1000 [ 50%] (Warmup)
#> Chain 1: Iteration: 501 / 1000 [ 50%] (Sampling)
#> Chain 1: Iteration: 600 / 1000 [ 60%] (Sampling)
#> Chain 1: Iteration: 700 / 1000 [ 70%] (Sampling)
#> Chain 1: Iteration: 800 / 1000 [ 80%] (Sampling)
#> Chain 1: Iteration: 900 / 1000 [ 90%] (Sampling)
#> Chain 1: Iteration: 1000 / 1000 [100%] (Sampling)
#> Chain 1:
#> Chain 1: Elapsed Time: 0.052 seconds (Warm-up)
#> Chain 1: 0.034 seconds (Sampling)
#> Chain 1: 0.086 seconds (Total)
#> Chain 1:
#> Inference for Stan model: constant_single_ind.
#> 1 chains, each with iter=1000; warmup=500; thin=1;
#> post-warmup draws per chain=500, total post-warmup draws=500.
#>
#> mean se_mean sd 2.5% 25%
#> ind_y_0 65.93 0.07 1.24 63.56 65.02
#> ind_beta 3.10 0.03 0.48 2.09 2.78
#> global_error_sigma 11.09 0.04 0.69 9.81 10.64
#> y_hat[1] 65.93 0.07 1.24 63.56 65.02
#> y_hat[2] 71.87 0.03 0.97 69.97 71.22
#> y_hat[3] 78.39 0.06 1.49 75.33 77.40
#> y_hat[4] 84.65 0.12 2.31 79.68 83.14
#> y_hat[5] 65.93 0.07 1.24 63.56 65.02
#> y_hat[6] 71.82 0.03 0.97 69.93 71.18
#> y_hat[7] 78.14 0.06 1.46 75.14 77.14
#> y_hat[8] 84.42 0.11 2.28 79.50 82.96
#> y_hat[9] 65.93 0.07 1.24 63.56 65.02
#> y_hat[10] 72.22 0.03 0.98 70.31 71.55
#> y_hat[11] 79.06 0.07 1.57 75.78 78.03
#> y_hat[12] 82.74 0.10 2.05 78.29 81.41
#> y_hat[13] 65.93 0.07 1.24 63.56 65.02
#> y_hat[14] 68.40 0.05 1.04 66.41 67.64
#> y_hat[15] 65.93 0.07 1.24 63.56 65.02
#> y_hat[16] 72.05 0.03 0.97 70.14 71.40
#> y_hat[17] 78.00 0.06 1.44 75.03 77.01
#> y_hat[18] 84.47 0.11 2.29 79.54 83.00
#> y_hat[19] 65.93 0.07 1.24 63.56 65.02
#> y_hat[20] 68.57 0.05 1.03 66.61 67.84
#> y_hat[21] 65.93 0.07 1.24 63.56 65.02
#> y_hat[22] 65.94 0.07 1.24 63.57 65.03
#> y_hat[23] 65.93 0.07 1.24 63.56 65.02
#> y_hat[24] 71.38 0.04 0.96 69.48 70.72
#> y_hat[25] 65.93 0.07 1.24 63.56 65.02
#> y_hat[26] 71.52 0.03 0.96 69.62 70.88
#> y_hat[27] 65.93 0.07 1.24 63.56 65.02
#> y_hat[28] 71.80 0.03 0.97 69.91 71.16
#> y_hat[29] 73.75 0.03 1.05 71.71 73.03
#> y_hat[30] 65.93 0.07 1.24 63.56 65.02
#> y_hat[31] 68.23 0.06 1.05 66.23 67.48
#> y_hat[32] 65.93 0.07 1.24 63.56 65.02
#> y_hat[33] 72.28 0.03 0.98 70.38 71.62
#> y_hat[34] 65.93 0.07 1.24 63.56 65.02
#> y_hat[35] 72.23 0.03 0.98 70.33 71.57
#> y_hat[36] 78.35 0.06 1.48 75.30 77.36
#> y_hat[37] 84.02 0.11 2.22 79.19 82.60
#> y_hat[38] 65.93 0.07 1.24 63.56 65.02
#> y_hat[39] 72.34 0.03 0.98 70.44 71.67
#> y_hat[40] 65.93 0.07 1.24 63.56 65.02
#> y_hat[41] 72.60 0.03 0.99 70.67 71.92
#> y_hat[42] 78.39 0.06 1.49 75.32 77.39
#> y_hat[43] 65.93 0.07 1.24 63.56 65.02
#> y_hat[44] 71.79 0.03 0.97 69.90 71.16
#> y_hat[45] 78.55 0.06 1.51 75.44 77.55
#> y_hat[46] 65.93 0.07 1.24 63.56 65.02
#> y_hat[47] 72.15 0.03 0.98 70.24 71.49
#> y_hat[48] 77.54 0.05 1.39 74.71 76.58
#> y_hat[49] 65.93 0.07 1.24 63.56 65.02
#> y_hat[50] 72.42 0.03 0.99 70.51 71.75
#> y_hat[51] 65.93 0.07 1.24 63.56 65.02
#> y_hat[52] 72.14 0.03 0.98 70.23 71.48
#> y_hat[53] 78.11 0.06 1.46 75.11 77.11
#> y_hat[54] 65.93 0.07 1.24 63.56 65.02
#> y_hat[55] 72.16 0.03 0.98 70.26 71.51
#> y_hat[56] 72.18 0.03 0.98 70.27 71.52
#> y_hat[57] 65.93 0.07 1.24 63.56 65.02
#> y_hat[58] 78.50 0.06 1.50 75.41 77.51
#> y_hat[59] 65.93 0.07 1.24 63.56 65.02
#> y_hat[60] 72.28 0.03 0.98 70.38 71.62
#> y_hat[61] 65.93 0.07 1.24 63.56 65.02
#> y_hat[62] 69.69 0.04 0.98 67.80 69.00
#> y_hat[63] 65.93 0.07 1.24 63.56 65.02
#> y_hat[64] 72.06 0.03 0.97 70.15 71.41
#> y_hat[65] 65.93 0.07 1.24 63.56 65.02
#> y_hat[66] 72.58 0.03 0.99 70.65 71.90
#> y_hat[67] 65.93 0.07 1.24 63.56 65.02
#> y_hat[68] 71.68 0.03 0.97 69.79 71.05
#> y_hat[69] 78.03 0.06 1.45 75.06 77.04
#> y_hat[70] 81.20 0.08 1.84 77.21 79.96
#> y_hat[71] 65.93 0.07 1.24 63.56 65.02
#> y_hat[72] 72.55 0.03 0.99 70.62 71.87
#> y_hat[73] 74.75 0.04 1.12 72.54 73.97
#> y_hat[74] 65.93 0.07 1.24 63.56 65.02
#> y_hat[75] 71.95 0.03 0.97 70.04 71.29
#> y_hat[76] 65.93 0.07 1.24 63.56 65.02
#> y_hat[77] 68.74 0.05 1.02 66.77 68.03
#> y_hat[78] 65.93 0.07 1.24 63.56 65.02
#> y_hat[79] 68.78 0.05 1.02 66.82 68.07
#> y_hat[80] 65.93 0.07 1.24 63.56 65.02
#> y_hat[81] 68.57 0.05 1.03 66.60 67.83
#> y_hat[82] 65.93 0.07 1.24 63.56 65.02
#> y_hat[83] 74.49 0.03 1.10 72.31 73.72
#> y_hat[84] 65.93 0.07 1.24 63.56 65.02
#> y_hat[85] 68.65 0.05 1.03 66.68 67.93
#> y_hat[86] 65.93 0.07 1.24 63.56 65.02
#> y_hat[87] 72.16 0.03 0.98 70.26 71.51
#> y_hat[88] 78.28 0.06 1.48 75.25 77.29
#> y_hat[89] 84.24 0.11 2.25 79.36 82.80
#> y_hat[90] 65.93 0.07 1.24 63.56 65.02
#> y_hat[91] 71.11 0.04 0.96 69.22 70.44
#> y_hat[92] 65.93 0.07 1.24 63.56 65.02
#> y_hat[93] 68.47 0.05 1.04 66.50 67.73
#> y_hat[94] 65.93 0.07 1.24 63.56 65.02
#> y_hat[95] 76.16 0.04 1.25 73.71 75.26
#> y_hat[96] 65.93 0.07 1.24 63.56 65.02
#> y_hat[97] 71.99 0.03 0.97 70.08 71.35
#> y_hat[98] 75.93 0.04 1.22 73.57 75.06
#> y_hat[99] 65.93 0.07 1.24 63.56 65.02
#> y_hat[100] 71.98 0.03 0.97 70.06 71.32
#> y_hat[101] 81.96 0.09 1.94 77.76 80.67
#> y_hat[102] 65.93 0.07 1.24 63.56 65.02
#> y_hat[103] 71.39 0.04 0.96 69.50 70.74
#> y_hat[104] 77.94 0.06 1.44 74.99 76.96
#> y_hat[105] 81.57 0.09 1.89 77.48 80.29
#> y_hat[106] 65.93 0.07 1.24 63.56 65.02
#> y_hat[107] 70.84 0.04 0.96 68.94 70.16
#> y_hat[108] 65.93 0.07 1.24 63.56 65.02
#> y_hat[109] 71.57 0.03 0.96 69.68 70.93
#> y_hat[110] 72.13 0.03 0.98 70.22 71.47
#> y_hat[111] 65.93 0.07 1.24 63.56 65.02
#> y_hat[112] 72.73 0.03 1.00 70.79 72.05
#> y_hat[113] 75.96 0.04 1.23 73.58 75.08
#> y_hat[114] 65.93 0.07 1.24 63.56 65.02
#> y_hat[115] 72.20 0.03 0.98 70.29 71.54
#> y_hat[116] 83.94 0.11 2.21 79.13 82.53
#> y_hat[117] 65.93 0.07 1.24 63.56 65.02
#> y_hat[118] 66.65 0.07 1.17 64.36 65.77
#> y_hat[119] 65.93 0.07 1.24 63.56 65.02
#> y_hat[120] 71.88 0.03 0.97 69.98 71.23
#> y_hat[121] 78.29 0.06 1.48 75.25 77.30
#> y_hat[122] 65.93 0.07 1.24 63.56 65.02
#> y_hat[123] 71.76 0.03 0.97 69.87 71.13
#> y_hat[124] 77.68 0.05 1.41 74.81 76.73
#> y_hat[125] 65.93 0.07 1.24 63.56 65.02
#> y_hat[126] 71.31 0.04 0.96 69.41 70.65
#> y_hat[127] 77.78 0.06 1.42 74.88 76.82
#> y_hat[128] 93.15 0.19 3.55 86.14 90.84
#> y_hat[129] 65.93 0.07 1.24 63.56 65.02
#> y_hat[130] 71.90 0.03 0.97 70.00 71.25
#> y_hat[131] 78.35 0.06 1.48 75.30 77.36
#> y_hat[132] 87.62 0.14 2.73 81.98 85.85
#> y_hat[133] 65.93 0.07 1.24 63.56 65.02
#> y_hat[134] 78.35 0.06 1.48 75.30 77.36
#> y_hat[135] 90.75 0.17 3.19 84.38 88.69
#> check_prior_pars_ind_beta[1] 0.00 NaN 0.00 0.00 0.00
#> check_prior_pars_ind_beta[2] 2.00 NaN 0.00 2.00 2.00
#> check_prior_pars_global_error_sigma[1] 0.00 NaN 0.00 0.00 0.00
#> check_prior_pars_global_error_sigma[2] 2.00 NaN 0.00 2.00 2.00
#> lp__ -391.12 0.09 1.28 -394.42 -391.87
#> 50% 75% 97.5% n_eff Rhat
#> ind_y_0 65.89 66.80 68.55 285 1.00
#> ind_beta 3.09 3.43 4.03 260 1.00
#> global_error_sigma 11.04 11.48 12.54 337 1.00
#> y_hat[1] 65.89 66.80 68.55 285 1.00
#> y_hat[2] 71.91 72.49 73.92 851 1.00
#> y_hat[3] 78.44 79.36 81.43 612 1.00
#> y_hat[4] 84.72 86.15 89.51 401 1.00
#> y_hat[5] 65.89 66.80 68.55 285 1.00
#> y_hat[6] 71.86 72.44 73.87 842 1.00
#> y_hat[7] 78.18 79.09 81.09 631 1.00
#> y_hat[8] 84.47 85.89 89.22 405 1.00
#> y_hat[9] 65.89 66.80 68.55 285 1.00
#> y_hat[10] 72.25 72.87 74.22 902 1.00
#> y_hat[11] 79.09 80.08 82.31 570 1.00
#> y_hat[12] 82.81 84.08 87.05 436 1.00
#> y_hat[13] 65.89 66.80 68.55 285 1.00
#> y_hat[14] 68.39 69.08 70.57 376 1.00
#> y_hat[15] 65.89 66.80 68.55 285 1.00
#> y_hat[16] 72.10 72.70 74.09 879 1.00
#> y_hat[17] 78.04 78.93 80.92 642 1.00
#> y_hat[18] 84.53 85.95 89.29 404 1.00
#> y_hat[19] 65.89 66.80 68.55 285 1.00
#> y_hat[20] 68.57 69.24 70.74 387 1.00
#> y_hat[21] 65.89 66.80 68.55 285 1.00
#> y_hat[22] 65.90 66.81 68.56 285 1.00
#> y_hat[23] 65.89 66.80 68.55 285 1.00
#> y_hat[24] 71.42 71.99 73.40 751 1.00
#> y_hat[25] 65.89 66.80 68.55 285 1.00
#> y_hat[26] 71.56 72.13 73.55 781 1.00
#> y_hat[27] 65.89 66.80 68.55 285 1.00
#> y_hat[28] 71.84 72.42 73.85 839 1.00
#> y_hat[29] 73.79 74.43 75.79 1037 1.00
#> y_hat[30] 65.89 66.80 68.55 285 1.00
#> y_hat[31] 68.23 68.92 70.44 366 1.00
#> y_hat[32] 65.89 66.80 68.55 285 1.00
#> y_hat[33] 72.31 72.94 74.27 912 1.00
#> y_hat[34] 65.89 66.80 68.55 285 1.00
#> y_hat[35] 72.26 72.89 74.23 905 1.00
#> y_hat[36] 78.39 79.32 81.38 615 1.00
#> y_hat[37] 84.08 85.47 88.71 411 1.00
#> y_hat[38] 65.89 66.80 68.55 285 1.00
#> y_hat[39] 72.37 73.00 74.32 920 1.00
#> y_hat[40] 65.89 66.80 68.55 285 1.00
#> y_hat[41] 72.62 73.28 74.52 953 1.00
#> y_hat[42] 78.43 79.35 81.42 612 1.00
#> y_hat[43] 65.89 66.80 68.55 285 1.00
#> y_hat[44] 71.83 72.42 73.84 837 1.00
#> y_hat[45] 78.59 79.53 81.64 601 1.00
#> y_hat[46] 65.89 66.80 68.55 285 1.00
#> y_hat[47] 72.18 72.80 74.16 892 1.00
#> y_hat[48] 77.59 78.46 80.37 692 1.00
#> y_hat[49] 65.89 66.80 68.55 285 1.00
#> y_hat[50] 72.44 73.08 74.38 930 1.00
#> y_hat[51] 65.89 66.80 68.55 285 1.00
#> y_hat[52] 72.18 72.79 74.16 891 1.00
#> y_hat[53] 78.15 79.05 81.05 633 1.00
#> y_hat[54] 65.89 66.80 68.55 285 1.00
#> y_hat[55] 72.20 72.82 74.18 895 1.00
#> y_hat[56] 72.22 72.84 74.19 897 1.00
#> y_hat[57] 65.89 66.80 68.55 285 1.00
#> y_hat[58] 78.55 79.48 81.58 604 1.00
#> y_hat[59] 65.89 66.80 68.55 285 1.00
#> y_hat[60] 72.31 72.94 74.27 912 1.00
#> y_hat[61] 65.89 66.80 68.55 285 1.00
#> y_hat[62] 69.71 70.30 71.75 486 1.00
#> y_hat[63] 65.89 66.80 68.55 285 1.00
#> y_hat[64] 72.10 72.71 74.09 880 1.00
#> y_hat[65] 65.89 66.80 68.55 285 1.00
#> y_hat[66] 72.60 73.26 74.51 951 1.00
#> y_hat[67] 65.89 66.80 68.55 285 1.00
#> y_hat[68] 71.72 72.30 73.72 814 1.00
#> y_hat[69] 78.07 78.96 80.96 639 1.00
#> y_hat[70] 81.30 82.38 85.16 477 1.00
#> y_hat[71] 65.89 66.80 68.55 285 1.00
#> y_hat[72] 72.57 73.23 74.48 947 1.00
#> y_hat[73] 74.82 75.46 76.99 1017 1.00
#> y_hat[74] 65.89 66.80 68.55 285 1.00
#> y_hat[75] 72.00 72.59 74.01 864 1.00
#> y_hat[76] 65.89 66.80 68.55 285 1.00
#> y_hat[77] 68.74 69.40 70.89 398 1.00
#> y_hat[78] 65.89 66.80 68.55 285 1.00
#> y_hat[79] 68.79 69.44 70.93 401 1.00
#> y_hat[80] 65.89 66.80 68.55 285 1.00
#> y_hat[81] 68.56 69.24 70.73 387 1.00
#> y_hat[82] 65.89 66.80 68.55 285 1.00
#> y_hat[83] 74.56 75.21 76.69 1029 1.00
#> y_hat[84] 65.89 66.80 68.55 285 1.00
#> y_hat[85] 68.65 69.31 70.81 392 1.00
#> y_hat[86] 65.89 66.80 68.55 285 1.00
#> y_hat[87] 72.20 72.82 74.18 895 1.00
#> y_hat[88] 78.33 79.24 81.29 620 1.00
#> y_hat[89] 84.30 85.68 88.99 408 1.00
#> y_hat[90] 65.89 66.80 68.55 285 1.00
#> y_hat[91] 71.15 71.72 73.15 700 1.00
#> y_hat[92] 65.89 66.80 68.55 285 1.00
#> y_hat[93] 68.46 69.16 70.64 380 1.00
#> y_hat[94] 65.89 66.80 68.55 285 1.00
#> y_hat[95] 76.25 76.98 78.72 909 1.00
#> y_hat[96] 65.89 66.80 68.55 285 1.00
#> y_hat[97] 72.04 72.63 74.04 870 1.00
#> y_hat[98] 76.01 76.73 78.41 929 1.00
#> y_hat[99] 65.89 66.80 68.55 285 1.00
#> y_hat[100] 72.02 72.61 74.03 867 1.00
#> y_hat[101] 82.02 83.18 86.09 455 1.00
#> y_hat[102] 65.89 66.80 68.55 285 1.00
#> y_hat[103] 71.44 72.00 73.42 755 1.00
#> y_hat[104] 77.98 78.88 80.86 646 1.00
#> y_hat[105] 81.65 82.77 85.61 466 1.00
#> y_hat[106] 65.89 66.80 68.55 285 1.00
#> y_hat[107] 70.87 71.44 72.89 650 1.00
#> y_hat[108] 65.89 66.80 68.55 285 1.00
#> y_hat[109] 71.61 72.18 73.60 791 1.00
#> y_hat[110] 72.17 72.78 74.15 890 1.00
#> y_hat[111] 65.89 66.80 68.55 285 1.00
#> y_hat[112] 72.75 73.40 74.64 969 1.00
#> y_hat[113] 76.04 76.76 78.44 927 1.00
#> y_hat[114] 65.89 66.80 68.55 285 1.00
#> y_hat[115] 72.23 72.85 74.20 900 1.00
#> y_hat[116] 84.00 85.40 88.62 413 1.00
#> y_hat[117] 65.89 66.80 68.55 285 1.00
#> y_hat[118] 66.62 67.48 69.16 302 1.00
#> y_hat[119] 65.89 66.80 68.55 285 1.00
#> y_hat[120] 71.92 72.50 73.93 852 1.00
#> y_hat[121] 78.34 79.25 81.30 619 1.00
#> y_hat[122] 65.89 66.80 68.55 285 1.00
#> y_hat[123] 71.80 72.38 73.80 830 1.00
#> y_hat[124] 77.73 78.60 80.54 674 1.00
#> y_hat[125] 65.89 66.80 68.55 285 1.00
#> y_hat[126] 71.35 71.92 73.34 738 1.00
#> y_hat[127] 77.82 78.71 80.66 662 1.00
#> y_hat[128] 93.14 95.54 100.70 334 1.00
#> y_hat[129] 65.89 66.80 68.55 285 1.00
#> y_hat[130] 71.94 72.53 73.96 856 1.00
#> y_hat[131] 78.39 79.32 81.38 615 1.00
#> y_hat[132] 87.72 89.42 93.37 367 1.00
#> y_hat[133] 65.89 66.80 68.55 285 1.00
#> y_hat[134] 78.39 79.32 81.38 615 1.00
#> y_hat[135] 90.78 92.89 97.58 346 1.00
#> check_prior_pars_ind_beta[1] 0.00 0.00 0.00 NaN NaN
#> check_prior_pars_ind_beta[2] 2.00 2.00 2.00 NaN NaN
#> check_prior_pars_global_error_sigma[1] 0.00 0.00 0.00 NaN NaN
#> check_prior_pars_global_error_sigma[2] 2.00 2.00 2.00 NaN NaN
#> lp__ -390.70 -390.16 -389.72 225 1.01
#>
#> Samples were drawn using NUTS(diag_e) at Tue Jun 24 01:18:42 2025.
#> For each parameter, n_eff is a crude measure of effective sample size,
#> and Rhat is the potential scale reduction factor on split chains (at
#> convergence, Rhat=1).