高校数学の質問スレ(医者・東大卒専用) Part438 (991レス)
高校数学の質問スレ(医者・東大卒専用) Part438 http://rio2016.5ch.net/test/read.cgi/math/1723152147/
上
下
前次
1-
新
通常表示
512バイト分割
レス栞
抽出解除
必死チェッカー(本家)
(べ)
自ID
レス栞
あぼーん
842: 132人目の素数さん [sage] 2025/05/24(土) 02:35:14.53 ID:VetM3rz7 LearnBayes::beta.selectをoptimを使って算出 beta.optim <- function(x1, p1, x2, p2, verbose = TRUE) { # ------------------------- # モーメント近似による初期値推定 # ------------------------- mu0 <- (x1 + x2) / 2 # 仮の平均 sigma2 <- ((x2 - x1) / 4)^2 # 仮の分散(中間50%幅から) v <- mu0 * (1 - mu0) / sigma2 - 1 a0 <- mu0 * v b0 <- (1 - mu0) * v start <- c(a0, b0) # ------------------------- # 最適化対象の誤差関数定義 # ------------------------- objective <- function(params) { a <- params[1] b <- params[2] # pbeta による累積確率との差を2乗誤差で評価 err1 <- pbeta(x1, a, b) - p1 err2 <- pbeta(x2, a, b) - p2 return(err1^2 + err2^2) } # ------------------------- # 最適化(境界付き) # ------------------------- result <- optim( par = start, fn = objective, method = "L-BFGS-B", lower = c(1e-4, 1e-4) # ベータ分布は a, b > 0 ) # ------------------------- # 結果の取り出し # ------------------------- a_hat <- result$par[1] b_hat <- result$par[2] if (verbose) { cat("推定されたパラメータ: a =", round(a_hat, 4), ", b =", round(b_hat, 4), "\n") cat("pbeta(x1) =", round(pbeta(x1, a_hat, b_hat), 4), "(目標:", p1, ")\n") cat("pbeta(x2) =", round(pbeta(x2, a_hat, b_hat), 4), "(目標:", p2, ")\n") } # ------------------------- # 結果を返す # ------------------------- return(list(a = a_hat, b = b_hat, ss_value = result$value)) } http://rio2016.5ch.net/test/read.cgi/math/1723152147/842
843: 132人目の素数さん [sage] 2025/05/24(土) 08:17:52.86 ID:VetM3rz7 library(rjags) # Fit a Bayesian logistic regression model using JAGS and return predictions and posterior summaries fit_bayesian_logistic_jags <- function(data, formula, newdata, n.chains = 3, n.iter = 5000, n.burnin = 1000) { # Extract response variable name from the formula response_var <- all.vars(formula)[1] y <- data[[response_var]] # Convert factor response to binary numeric (0/1) if (is.factor(y)) y <- as.numeric(y) - 1 y <- as.numeric(y) # Construct design matrices for training and new data X <- model.matrix(formula, data) new_X <- model.matrix(delete.response(terms(formula)), newdata) # Prepare data list for JAGS jags_data <- list( y = y, X = X, n = nrow(X), p = ncol(X), new_X = new_X, scale_beta = rep(2.5, ncol(X)) # Prior scale for each coefficient ) # Define the JAGS model model_string <- " model { for (j in 1:p) { beta[j] ~ dt(0, 1 / pow(scale_beta[j], 2), 1) } for (i in 1:n) { logit_p[i] <- inprod(X[i,], beta[]) y[i] ~ dbern(1 / (1 + exp(-logit_p[i]))) } new_logit <- inprod(new_X[1,], beta[]) new_p <- 1 / (1 + exp(-new_logit)) } " # Initialize and run the JAGS model model <- jags.model(textConnection(model_string), data = jags_data, n.chains = n.chains, quiet = TRUE) update(model, n.burnin) # Draw posterior samples samples <- coda.samples(model, c("beta", "new_p"), n.iter - n.burnin) mat <- as.matrix(samples) # Return results list( model = samples, predicted_prob = mean(mat[, "new_p"]), summary = summary(samples) ) } http://rio2016.5ch.net/test/read.cgi/math/1723152147/843
844: 132人目の素数さん [sage] 2025/05/24(土) 08:18:19.92 ID:VetM3rz7 # Example data data <- data.frame( donation = c(0, 1000, 2000, 0, 3000, 0, 4000, 0, 5000, 0), score = c(90, 40, 35, 88, 30, 85, 25, 92, 20, 89), parent = c(0, 1, 1, 0, 1, 0, 1, 0, 1, 0), admission = as.factor(c(0, 1, 1, 0, 1, 0, 1, 0, 1, 0)) ) # New observation to predict newdata <- data.frame( donation = 2500, score = 40, parent = 1 ) # Fit model and obtain results set.seed(123) result <- fit_bayesian_logistic_jags( data = data, formula = admission ~ donation + score + parent, newdata = newdata ) # Extract variable names including intercept var_names <- colnames(model.matrix(admission ~ donation + score + parent, data)) # Extract beta coefficient summaries beta_stats <- result$summary$statistics[grep("^beta\\[", rownames(result$summary$statistics)), c("Mean", "SD")] beta_quants <- result$summary$quantiles[grep("^beta\\[", rownames(result$summary$quantiles)), c("2.5%", "97.5%")] # Rename row names using variable names rownames(beta_stats) <- var_names rownames(beta_quants) <- var_names # Display results print(beta_stats) print(beta_quants) cat("Predicted probability:", round(result$predicted_prob, 3), "\n") http://rio2016.5ch.net/test/read.cgi/math/1723152147/844
845: 132人目の素数さん [sage] 2025/05/24(土) 08:37:05.75 ID:VetM3rz7 library(rjags) # Fit a Bayesian logistic regression model using JAGS and return predictions and posterior summaries fit_bayesian_logistic_jags <- function(data, formula, newdata, n.chains = 3, n.iter = 5000, n.burnin = 1000) { # Extract response variable name from the formula response_var <- all.vars(formula)[1] y <- data[[response_var]] # Convert factor response to binary numeric (0/1) if (is.factor(y)) y <- as.numeric(y) - 1 y <- as.numeric(y) # Construct design matrices for training and new data X <- model.matrix(formula, data) new_X <- model.matrix(delete.response(terms(formula)), newdata) # Prepare data list for JAGS jags_data <- list( y = y, X = X, n = nrow(X), p = ncol(X), new_X = new_X, scale_beta = rep(2.5, ncol(X)) # Prior scale for each coefficient ) # Define the JAGS model model_string <- " model { for (j in 1:p) { beta[j] ~ dt(0, 1 / pow(scale_beta[j], 2), 1) } for (i in 1:n) { logit_p[i] <- inprod(X[i,], beta[]) y[i] ~ dbern(1 / (1 + exp(-logit_p[i]))) } new_logit <- inprod(new_X[1,], beta[]) new_p <- 1 / (1 + exp(-new_logit)) } " # Initialize and run the JAGS model model <- jags.model(textConnection(model_string), data = jags_data, n.chains = n.chains, quiet = TRUE) update(model, n.burnin) # Draw posterior samples samples <- coda.samples(model, c("beta", "new_p"), n.iter - n.burnin) mat <- as.matrix(samples) # Return results list( model = samples, predicted_prob = mean(mat[, "new_p"]), summary = summary(samples) ) } http://rio2016.5ch.net/test/read.cgi/math/1723152147/845
846: 132人目の素数さん [sage] 2025/05/24(土) 21:16:24.84 ID:VetM3rz7 # dbeta(L,a,b) == dbbeta(U,a,b) # Solve[L^(a-1)(1-L)^(b-1)==U^(a-1)(1-U)^(b-1), b] L=1/7 U=1/5 credMass = 0.95 f = function(a) 1 + ((a - 1) * log(U / L)) / log((1 - L) / (1 - U)) g = function(a) pbeta(U,a,f(a)) - pbeta(L,a,f(a)) - credMass (re=uniroot(g,c(1,1e5))) curve(g(x),1,150,bty="l") ; abline(h=0,lty=3) c(re$root,f(re$root)) http://rio2016.5ch.net/test/read.cgi/math/1723152147/846
メモ帳
(0/65535文字)
上
下
前次
1-
新
書
関
写
板
覧
索
設
栞
歴
スレ情報
赤レス抽出
画像レス抽出
歴の未読スレ
AAサムネイル
Google検索
Wikipedia
ぬこの手
ぬこTOP
1.613s*