Run chosen pre-built model in Stan
Examples
# basic usage of hmde_run
hmde_model("constant_single_ind") |>
hmde_assign_data(Trout_Size_Data)|>
hmde_run(chains = 1, iter = 1000,
verbose = FALSE, show_messages = FALSE)
#>
#> SAMPLING FOR MODEL 'constant_single_ind' NOW (CHAIN 1).
#> Chain 1:
#> Chain 1: Gradient evaluation took 1.6e-05 seconds
#> Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.16 seconds.
#> Chain 1: Adjust your expectations accordingly!
#> Chain 1:
#> Chain 1:
#> Chain 1: Iteration: 1 / 1000 [ 0%] (Warmup)
#> Chain 1: Iteration: 100 / 1000 [ 10%] (Warmup)
#> Chain 1: Iteration: 200 / 1000 [ 20%] (Warmup)
#> Chain 1: Iteration: 300 / 1000 [ 30%] (Warmup)
#> Chain 1: Iteration: 400 / 1000 [ 40%] (Warmup)
#> Chain 1: Iteration: 500 / 1000 [ 50%] (Warmup)
#> Chain 1: Iteration: 501 / 1000 [ 50%] (Sampling)
#> Chain 1: Iteration: 600 / 1000 [ 60%] (Sampling)
#> Chain 1: Iteration: 700 / 1000 [ 70%] (Sampling)
#> Chain 1: Iteration: 800 / 1000 [ 80%] (Sampling)
#> Chain 1: Iteration: 900 / 1000 [ 90%] (Sampling)
#> Chain 1: Iteration: 1000 / 1000 [100%] (Sampling)
#> Chain 1:
#> Chain 1: Elapsed Time: 0.051 seconds (Warm-up)
#> Chain 1: 0.038 seconds (Sampling)
#> Chain 1: 0.089 seconds (Total)
#> Chain 1:
#> Inference for Stan model: constant_single_ind.
#> 1 chains, each with iter=1000; warmup=500; thin=1;
#> post-warmup draws per chain=500, total post-warmup draws=500.
#>
#> mean se_mean sd 2.5% 25%
#> ind_y_0 65.83 0.08 1.35 63.20 64.91
#> ind_beta 3.14 0.03 0.50 2.14 2.80
#> global_error_sigma 11.19 0.04 0.74 9.85 10.69
#> y_hat[1] 65.83 0.08 1.35 63.20 64.91
#> y_hat[2] 71.83 0.03 0.92 69.98 71.25
#> y_hat[3] 78.44 0.06 1.39 75.73 77.53
#> y_hat[4] 84.77 0.12 2.26 80.46 83.23
#> y_hat[5] 65.83 0.08 1.35 63.20 64.91
#> y_hat[6] 71.78 0.03 0.92 69.93 71.20
#> y_hat[7] 78.18 0.06 1.36 75.53 77.31
#> y_hat[8] 84.54 0.11 2.23 80.28 83.03
#> y_hat[9] 65.83 0.08 1.35 63.20 64.91
#> y_hat[10] 72.19 0.03 0.92 70.28 71.61
#> y_hat[11] 79.11 0.06 1.48 76.21 78.12
#> y_hat[12] 82.84 0.10 1.98 79.03 81.50
#> y_hat[13] 65.83 0.08 1.35 63.20 64.91
#> y_hat[14] 68.32 0.06 1.09 66.20 67.62
#> y_hat[15] 65.83 0.08 1.35 63.20 64.91
#> y_hat[16] 72.02 0.03 0.92 70.14 71.44
#> y_hat[17] 78.04 0.06 1.35 75.42 77.18
#> y_hat[18] 84.59 0.12 2.23 80.32 83.07
#> y_hat[19] 65.83 0.08 1.35 63.20 64.91
#> y_hat[20] 68.50 0.06 1.08 66.41 67.81
#> y_hat[21] 65.83 0.08 1.35 63.20 64.91
#> y_hat[22] 65.84 0.08 1.35 63.21 64.92
#> y_hat[23] 65.83 0.08 1.35 63.20 64.91
#> y_hat[24] 71.34 0.04 0.93 69.50 70.74
#> y_hat[25] 65.83 0.08 1.35 63.20 64.91
#> y_hat[26] 71.48 0.03 0.93 69.64 70.89
#> y_hat[27] 65.83 0.08 1.35 63.20 64.91
#> y_hat[28] 71.76 0.03 0.92 69.91 71.18
#> y_hat[29] 73.74 0.03 0.97 71.82 73.10
#> y_hat[30] 65.83 0.08 1.35 63.20 64.91
#> y_hat[31] 68.16 0.06 1.11 66.02 67.43
#> y_hat[32] 65.83 0.08 1.35 63.20 64.91
#> y_hat[33] 72.25 0.03 0.93 70.35 71.66
#> y_hat[34] 65.83 0.08 1.35 63.20 64.91
#> y_hat[35] 72.20 0.03 0.93 70.30 71.62
#> y_hat[36] 78.40 0.06 1.39 75.69 77.49
#> y_hat[37] 84.13 0.11 2.17 79.97 82.67
#> y_hat[38] 65.83 0.08 1.35 63.20 64.91
#> y_hat[39] 72.31 0.03 0.93 70.40 71.71
#> y_hat[40] 65.83 0.08 1.35 63.20 64.91
#> y_hat[41] 72.57 0.03 0.93 70.69 71.97
#> y_hat[42] 78.43 0.06 1.39 75.72 77.52
#> y_hat[43] 65.83 0.08 1.35 63.20 64.91
#> y_hat[44] 71.76 0.03 0.92 69.91 71.17
#> y_hat[45] 78.59 0.06 1.41 75.84 77.65
#> y_hat[46] 65.83 0.08 1.35 63.20 64.91
#> y_hat[47] 72.12 0.03 0.92 70.22 71.54
#> y_hat[48] 77.57 0.05 1.29 75.05 76.78
#> y_hat[49] 65.83 0.08 1.35 63.20 64.91
#> y_hat[50] 72.39 0.03 0.93 70.49 71.79
#> y_hat[51] 65.83 0.08 1.35 63.20 64.91
#> y_hat[52] 72.11 0.03 0.92 70.21 71.53
#> y_hat[53] 78.15 0.06 1.36 75.50 77.28
#> y_hat[54] 65.83 0.08 1.35 63.20 64.91
#> y_hat[55] 72.13 0.03 0.92 70.24 71.56
#> y_hat[56] 72.15 0.03 0.92 70.25 71.58
#> y_hat[57] 65.83 0.08 1.35 63.20 64.91
#> y_hat[58] 78.55 0.06 1.41 75.81 77.62
#> y_hat[59] 65.83 0.08 1.35 63.20 64.91
#> y_hat[60] 72.25 0.03 0.93 70.35 71.66
#> y_hat[61] 65.83 0.08 1.35 63.20 64.91
#> y_hat[62] 69.63 0.05 1.00 67.67 68.98
#> y_hat[63] 65.83 0.08 1.35 63.20 64.91
#> y_hat[64] 72.03 0.03 0.92 70.15 71.45
#> y_hat[65] 65.83 0.08 1.35 63.20 64.91
#> y_hat[66] 72.56 0.03 0.93 70.67 71.95
#> y_hat[67] 65.83 0.08 1.35 63.20 64.91
#> y_hat[68] 71.64 0.03 0.93 69.80 71.04
#> y_hat[69] 78.07 0.06 1.35 75.44 77.21
#> y_hat[70] 81.28 0.08 1.76 77.84 80.12
#> y_hat[71] 65.83 0.08 1.35 63.20 64.91
#> y_hat[72] 72.52 0.03 0.93 70.63 71.92
#> y_hat[73] 74.75 0.03 1.03 72.68 74.08
#> y_hat[74] 65.83 0.08 1.35 63.20 64.91
#> y_hat[75] 71.92 0.03 0.92 70.05 71.34
#> y_hat[76] 65.83 0.08 1.35 63.20 64.91
#> y_hat[77] 68.66 0.05 1.06 66.60 67.95
#> y_hat[78] 65.83 0.08 1.35 63.20 64.91
#> y_hat[79] 68.71 0.05 1.06 66.65 68.00
#> y_hat[80] 65.83 0.08 1.35 63.20 64.91
#> y_hat[81] 68.49 0.06 1.08 66.40 67.80
#> y_hat[82] 65.83 0.08 1.35 63.20 64.91
#> y_hat[83] 74.49 0.03 1.01 72.43 73.83
#> y_hat[84] 65.83 0.08 1.35 63.20 64.91
#> y_hat[85] 68.58 0.05 1.07 66.50 67.87
#> y_hat[86] 65.83 0.08 1.35 63.20 64.91
#> y_hat[87] 72.13 0.03 0.92 70.24 71.56
#> y_hat[88] 78.33 0.06 1.38 75.64 77.43
#> y_hat[89] 84.36 0.11 2.20 80.15 82.87
#> y_hat[90] 65.83 0.08 1.35 63.20 64.91
#> y_hat[91] 71.07 0.04 0.93 69.20 70.45
#> y_hat[92] 65.83 0.08 1.35 63.20 64.91
#> y_hat[93] 68.40 0.06 1.09 66.29 67.70
#> y_hat[94] 65.83 0.08 1.35 63.20 64.91
#> y_hat[95] 76.18 0.04 1.15 74.10 75.41
#> y_hat[96] 65.83 0.08 1.35 63.20 64.91
#> y_hat[97] 71.96 0.03 0.92 70.09 71.38
#> y_hat[98] 75.95 0.04 1.13 73.92 75.20
#> y_hat[99] 65.83 0.08 1.35 63.20 64.91
#> y_hat[100] 71.95 0.03 0.92 70.07 71.37
#> y_hat[101] 82.05 0.09 1.87 78.40 80.81
#> y_hat[102] 65.83 0.08 1.35 63.20 64.91
#> y_hat[103] 71.35 0.04 0.93 69.51 70.75
#> y_hat[104] 77.98 0.05 1.34 75.37 77.14
#> y_hat[105] 81.65 0.09 1.81 78.11 80.44
#> y_hat[106] 65.83 0.08 1.35 63.20 64.91
#> y_hat[107] 70.79 0.04 0.94 68.92 70.18
#> y_hat[108] 65.83 0.08 1.35 63.20 64.91
#> y_hat[109] 71.53 0.03 0.93 69.69 70.93
#> y_hat[110] 72.10 0.03 0.92 70.20 71.52
#> y_hat[111] 65.83 0.08 1.35 63.20 64.91
#> y_hat[112] 72.71 0.03 0.93 70.85 72.08
#> y_hat[113] 75.97 0.04 1.13 73.94 75.23
#> y_hat[114] 65.83 0.08 1.35 63.20 64.91
#> y_hat[115] 72.17 0.03 0.92 70.27 71.60
#> y_hat[116] 84.06 0.11 2.16 79.91 82.60
#> y_hat[117] 65.83 0.08 1.35 63.20 64.91
#> y_hat[118] 66.55 0.07 1.27 64.04 65.66
#> y_hat[119] 65.83 0.08 1.35 63.20 64.91
#> y_hat[120] 71.84 0.03 0.92 69.99 71.26
#> y_hat[121] 78.34 0.06 1.38 75.65 77.43
#> y_hat[122] 65.83 0.08 1.35 63.20 64.91
#> y_hat[123] 71.72 0.03 0.92 69.87 71.13
#> y_hat[124] 77.72 0.05 1.31 75.17 76.91
#> y_hat[125] 65.83 0.08 1.35 63.20 64.91
#> y_hat[126] 71.27 0.04 0.93 69.43 70.66
#> y_hat[127] 77.82 0.05 1.32 75.25 77.00
#> y_hat[128] 93.38 0.20 3.57 86.61 90.99
#> y_hat[129] 65.83 0.08 1.35 63.20 64.91
#> y_hat[130] 71.87 0.03 0.92 70.01 71.28
#> y_hat[131] 78.40 0.06 1.39 75.69 77.49
#> y_hat[132] 87.78 0.15 2.71 82.57 85.97
#> y_hat[133] 65.83 0.08 1.35 63.20 64.91
#> y_hat[134] 78.40 0.06 1.39 75.69 77.49
#> y_hat[135] 90.95 0.18 3.19 84.85 88.83
#> check_prior_pars_ind_beta[1] 0.00 NaN 0.00 0.00 0.00
#> check_prior_pars_ind_beta[2] 2.00 NaN 0.00 2.00 2.00
#> check_prior_pars_global_error_sigma[1] 0.00 NaN 0.00 0.00 0.00
#> check_prior_pars_global_error_sigma[2] 2.00 NaN 0.00 2.00 2.00
#> lp__ -391.19 0.08 1.26 -394.40 -391.73
#> 50% 75% 97.5% n_eff Rhat
#> ind_y_0 65.87 66.69 68.54 286 1.00
#> ind_beta 3.11 3.48 4.13 266 1.00
#> global_error_sigma 11.19 11.68 12.73 320 1.01
#> y_hat[1] 65.87 66.69 68.54 286 1.00
#> y_hat[2] 71.81 72.46 73.74 778 1.00
#> y_hat[3] 78.35 79.34 81.29 569 1.00
#> y_hat[4] 84.79 86.24 89.33 373 1.00
#> y_hat[5] 65.87 66.69 68.54 286 1.00
#> y_hat[6] 71.76 72.41 73.70 769 1.00
#> y_hat[7] 78.09 79.07 80.97 588 1.00
#> y_hat[8] 84.57 85.99 89.03 376 1.00
#> y_hat[9] 65.87 66.69 68.54 286 1.00
#> y_hat[10] 72.16 72.81 74.07 833 1.00
#> y_hat[11] 79.04 80.02 82.08 527 1.00
#> y_hat[12] 82.86 84.09 86.83 403 1.00
#> y_hat[13] 65.87 66.69 68.54 286 1.00
#> y_hat[14] 68.33 69.01 70.44 365 1.00
#> y_hat[15] 65.87 66.69 68.54 286 1.00
#> y_hat[16] 72.00 72.65 73.92 808 1.00
#> y_hat[17] 77.95 78.92 80.78 599 1.00
#> y_hat[18] 84.62 86.04 89.09 375 1.00
#> y_hat[19] 65.87 66.69 68.54 286 1.00
#> y_hat[20] 68.50 69.17 70.60 377 1.00
#> y_hat[21] 65.87 66.69 68.54 286 1.00
#> y_hat[22] 65.88 66.70 68.54 287 1.00
#> y_hat[23] 65.87 66.69 68.54 286 1.00
#> y_hat[24] 71.31 71.94 73.29 699 1.00
#> y_hat[25] 65.87 66.69 68.54 286 1.00
#> y_hat[26] 71.46 72.08 73.43 721 1.00
#> y_hat[27] 65.87 66.69 68.54 286 1.00
#> y_hat[28] 71.75 72.39 73.68 767 1.00
#> y_hat[29] 73.71 74.38 75.70 977 1.00
#> y_hat[30] 65.87 66.69 68.54 286 1.00
#> y_hat[31] 68.17 68.87 70.30 355 1.00
#> y_hat[32] 65.87 66.69 68.54 286 1.00
#> y_hat[33] 72.23 72.87 74.13 844 1.00
#> y_hat[34] 65.87 66.69 68.54 286 1.00
#> y_hat[35] 72.18 72.82 74.08 836 1.00
#> y_hat[36] 78.31 79.30 81.24 572 1.00
#> y_hat[37] 84.18 85.53 88.49 381 1.00
#> y_hat[38] 65.87 66.69 68.54 286 1.00
#> y_hat[39] 72.29 72.94 74.19 853 1.00
#> y_hat[40] 65.87 66.69 68.54 286 1.00
#> y_hat[41] 72.55 73.19 74.47 889 1.00
#> y_hat[42] 78.35 79.33 81.28 570 1.00
#> y_hat[43] 65.87 66.69 68.54 286 1.00
#> y_hat[44] 71.74 72.38 73.67 765 1.00
#> y_hat[45] 78.51 79.50 81.48 558 1.00
#> y_hat[46] 65.87 66.69 68.54 286 1.00
#> y_hat[47] 72.09 72.75 74.00 823 1.00
#> y_hat[48] 77.51 78.42 80.17 640 1.00
#> y_hat[49] 65.87 66.69 68.54 286 1.00
#> y_hat[50] 72.37 73.02 74.28 864 1.00
#> y_hat[51] 65.87 66.69 68.54 286 1.00
#> y_hat[52] 72.09 72.74 74.00 821 1.00
#> y_hat[53] 78.06 79.04 80.92 590 1.00
#> y_hat[54] 65.87 66.69 68.54 286 1.00
#> y_hat[55] 72.11 72.76 74.02 825 1.00
#> y_hat[56] 72.13 72.78 74.03 828 1.00
#> y_hat[57] 65.87 66.69 68.54 286 1.00
#> y_hat[58] 78.46 79.45 81.43 561 1.00
#> y_hat[59] 65.87 66.69 68.54 286 1.00
#> y_hat[60] 72.23 72.87 74.13 844 1.00
#> y_hat[61] 65.87 66.69 68.54 286 1.00
#> y_hat[62] 69.62 70.29 71.63 485 1.00
#> y_hat[63] 65.87 66.69 68.54 286 1.00
#> y_hat[64] 72.01 72.66 73.93 809 1.00
#> y_hat[65] 65.87 66.69 68.54 286 1.00
#> y_hat[66] 72.53 73.17 74.45 887 1.00
#> y_hat[67] 65.87 66.69 68.54 286 1.00
#> y_hat[68] 71.63 72.25 73.57 747 1.00
#> y_hat[69] 77.99 78.96 80.83 597 1.00
#> y_hat[70] 81.32 82.40 84.80 439 1.00
#> y_hat[71] 65.87 66.69 68.54 286 1.00
#> y_hat[72] 72.50 73.14 74.42 882 1.00
#> y_hat[73] 74.69 75.45 76.83 931 1.00
#> y_hat[74] 65.87 66.69 68.54 286 1.00
#> y_hat[75] 71.89 72.55 73.82 792 1.00
#> y_hat[76] 65.87 66.69 68.54 286 1.00
#> y_hat[77] 68.66 69.32 70.78 389 1.00
#> y_hat[78] 65.87 66.69 68.54 286 1.00
#> y_hat[79] 68.71 69.37 70.82 392 1.00
#> y_hat[80] 65.87 66.69 68.54 286 1.00
#> y_hat[81] 68.49 69.16 70.59 376 1.00
#> y_hat[82] 65.87 66.69 68.54 286 1.00
#> y_hat[83] 74.43 75.16 76.51 951 1.00
#> y_hat[84] 65.87 66.69 68.54 286 1.00
#> y_hat[85] 68.57 69.24 70.69 383 1.00
#> y_hat[86] 65.87 66.69 68.54 286 1.00
#> y_hat[87] 72.11 72.76 74.02 825 1.00
#> y_hat[88] 78.24 79.22 81.15 577 1.00
#> y_hat[89] 84.40 85.78 88.78 378 1.00
#> y_hat[90] 65.87 66.69 68.54 286 1.00
#> y_hat[91] 71.04 71.68 73.05 659 1.00
#> y_hat[92] 65.87 66.69 68.54 286 1.00
#> y_hat[93] 68.41 69.08 70.51 370 1.00
#> y_hat[94] 65.87 66.69 68.54 286 1.00
#> y_hat[95] 76.12 76.93 78.48 785 1.00
#> y_hat[96] 65.87 66.69 68.54 286 1.00
#> y_hat[97] 71.93 72.60 73.86 798 1.00
#> y_hat[98] 75.87 76.69 78.24 809 1.00
#> y_hat[99] 65.87 66.69 68.54 286 1.00
#> y_hat[100] 71.92 72.58 73.84 796 1.00
#> y_hat[101] 82.09 83.24 85.79 419 1.00
#> y_hat[102] 65.87 66.69 68.54 286 1.00
#> y_hat[103] 71.33 71.95 73.31 701 1.00
#> y_hat[104] 77.90 78.87 80.72 604 1.00
#> y_hat[105] 81.70 82.80 85.27 429 1.00
#> y_hat[106] 65.87 66.69 68.54 286 1.00
#> y_hat[107] 70.77 71.40 72.78 620 1.00
#> y_hat[108] 65.87 66.69 68.54 286 1.00
#> y_hat[109] 71.51 72.14 73.47 730 1.00
#> y_hat[110] 72.08 72.73 73.99 820 1.00
#> y_hat[111] 65.87 66.69 68.54 286 1.00
#> y_hat[112] 72.68 73.32 74.61 907 1.00
#> y_hat[113] 75.90 76.71 78.26 806 1.00
#> y_hat[114] 65.87 66.69 68.54 286 1.00
#> y_hat[115] 72.15 72.80 74.05 831 1.00
#> y_hat[116] 84.10 85.45 88.39 382 1.00
#> y_hat[117] 65.87 66.69 68.54 286 1.00
#> y_hat[118] 66.58 67.37 69.03 298 1.00
#> y_hat[119] 65.87 66.69 68.54 286 1.00
#> y_hat[120] 71.81 72.47 73.75 779 1.00
#> y_hat[121] 78.25 79.23 81.16 576 1.00
#> y_hat[122] 65.87 66.69 68.54 286 1.00
#> y_hat[123] 71.70 72.35 73.64 760 1.00
#> y_hat[124] 77.64 78.59 80.36 626 1.00
#> y_hat[125] 65.87 66.69 68.54 286 1.00
#> y_hat[126] 71.25 71.87 73.23 688 1.00
#> y_hat[127] 77.75 78.70 80.50 617 1.00
#> y_hat[128] 93.35 95.72 100.24 317 1.00
#> y_hat[129] 65.87 66.69 68.54 286 1.00
#> y_hat[130] 71.84 72.50 73.77 783 1.00
#> y_hat[131] 78.31 79.30 81.24 572 1.00
#> y_hat[132] 87.75 89.55 93.22 344 1.00
#> y_hat[133] 65.87 66.69 68.54 286 1.00
#> y_hat[134] 78.31 79.30 81.24 572 1.00
#> y_hat[135] 90.92 93.04 97.17 326 1.00
#> check_prior_pars_ind_beta[1] 0.00 0.00 0.00 NaN NaN
#> check_prior_pars_ind_beta[2] 2.00 2.00 2.00 NaN NaN
#> check_prior_pars_global_error_sigma[1] 0.00 0.00 0.00 NaN NaN
#> check_prior_pars_global_error_sigma[2] 2.00 2.00 2.00 NaN NaN
#> lp__ -390.86 -390.25 -389.74 263 1.00
#>
#> Samples were drawn using NUTS(diag_e) at Sat Dec 27 03:12:50 2025.
#> For each parameter, n_eff is a crude measure of effective sample size,
#> and Rhat is the potential scale reduction factor on split chains (at
#> convergence, Rhat=1).