## ---- Shared test data -------------------------------------------------------- p3 <- c(0.2, 0.5, 0.3) # valid 3-outcome distribution p4u <- rep(0.25, 4) # uniform over 4 outcomes p2 <- c(0.7, 0.3) # valid binary distribution p_degen <- c(1, 0, 0) # degenerate (all mass on one outcome) ## ---- shannon_entropy --------------------------------------------------------- test_that("shannon_entropy: correct values", { expect_equal(shannon_entropy(p3), -sum(p3 * log(p3))) # Uniform: H = log(n) expect_equal(shannon_entropy(p4u), log(4)) # Degenerate: H = 0 expect_equal(shannon_entropy(p_degen), 0) # Binary expect_equal(shannon_entropy(p2), -sum(p2 * log(p2))) }) test_that("shannon_entropy: input validation", { # Not numeric expect_error(shannon_entropy("a"), "'p' must be a numeric vector") # Length < 2 expect_error(shannon_entropy(1), "'p' must have at least 2 elements") # NA present expect_error(shannon_entropy(c(0.5, NA)), "'p' must not contain NA") # Negative value expect_error(shannon_entropy(c(-0.1, 1.1)), "must be in \\[0, 1\\]") # Does not sum to 1 expect_error(shannon_entropy(c(0.2, 0.3, 0.1)), "must sum to 1") }) ## ---- renyi_entropy ----------------------------------------------------------- test_that("renyi_entropy: correct values", { q2_expected <- (1 / (1 - 2)) * log(sum(p3^2)) expect_equal(renyi_entropy(p3, 2), q2_expected) # q near 1 -> Shannon entropy expect_equal(renyi_entropy(p3, 1), shannon_entropy(p3), tolerance = 1e-8) expect_equal(renyi_entropy(p3, 1 + 1e-9), shannon_entropy(p3), tolerance = 1e-6) # Uniform distribution: H_q = log(n) for all q expect_equal(renyi_entropy(p4u, 0.5), log(4), tolerance = 1e-10) expect_equal(renyi_entropy(p4u, 2), log(4), tolerance = 1e-10) }) test_that("renyi_entropy: input validation", { expect_error(renyi_entropy("a", 2), "'p' must be a numeric vector") expect_error(renyi_entropy(p3, "a"), "'q' must be a single numeric value") expect_error(renyi_entropy(p3, NA), "'q' must not be NA") expect_error(renyi_entropy(p3, -1), "'q' must be strictly positive") expect_error(renyi_entropy(p3, 0), "'q' must be strictly positive") expect_error(renyi_entropy(c(0.2, 0.3, 0.1), 2), "must sum to 1") }) ## ---- extropy ----------------------------------------------------------------- test_that("extropy: correct values", { expect_equal(extropy(p3), -sum((1 - p3) * log(1 - p3))) # Uniform distribution expect_equal(extropy(p4u), -sum((1 - p4u) * log(1 - p4u))) # Degenerate: extropy = 0 (1-p_i = 0 for the dominant outcome) expect_equal(extropy(p_degen), 0) }) test_that("extropy: input validation", { expect_error(extropy("a"), "'p' must be a numeric vector") expect_error(extropy(0.5), "'p' must have at least 2 elements") expect_error(extropy(c(NA, 0.5)), "'p' must not contain NA") expect_error(extropy(c(-0.1, 1.1)), "must be in \\[0, 1\\]") expect_error(extropy(c(0.2, 0.3, 0.1)), "must sum to 1") }) ## ---- renyi_extropy ----------------------------------------------------------- test_that("renyi_extropy: correct values", { n <- length(p3) q2_expected <- (-(n - 1) * log(n - 1) + (n - 1) * log(sum((1 - p3)^2))) / (1 - 2) expect_equal(renyi_extropy(p3, 2), q2_expected) # q near 1 -> classical extropy expect_equal(renyi_extropy(p3, 1), extropy(p3), tolerance = 1e-8) expect_equal(renyi_extropy(p3, 1 + 1e-9), extropy(p3), tolerance = 1e-6) # n = 2: renyi_extropy == renyi_entropy expect_equal(renyi_extropy(p2, 2), renyi_entropy(p2, 2), tolerance = 1e-10) }) test_that("renyi_extropy: input validation", { expect_error(renyi_extropy("a", 2), "'p' must be a numeric vector") expect_error(renyi_extropy(p3, NaN), "'q' must not be NA") expect_error(renyi_extropy(p3, NA), "'q' must not be NA") expect_error(renyi_extropy(p3, -1), "'q' must be strictly positive") expect_error(renyi_extropy(c(0.2, 0.3, 0.1), 2), "must sum to 1") }) ## ---- tsallis_entropy --------------------------------------------------------- test_that("tsallis_entropy: correct values", { q2_expected <- (1 - sum(p3^2)) / (2 - 1) expect_equal(tsallis_entropy(p3, 2), q2_expected) # q near 1 -> Shannon entropy expect_equal(tsallis_entropy(p3, 1), shannon_entropy(p3), tolerance = 1e-8) expect_equal(tsallis_entropy(p3, 1 + 1e-9), shannon_entropy(p3), tolerance = 1e-6) # q = 2, binary expect_equal(tsallis_entropy(p2, 2), (1 - sum(p2^2)) / 1) }) test_that("tsallis_entropy: input validation", { expect_error(tsallis_entropy("a", 2), "'p' must be a numeric vector") expect_error(tsallis_entropy(p3, "a"), "'q' must be a single numeric value") expect_error(tsallis_entropy(p3, NA), "'q' must not be NA") expect_error(tsallis_entropy(p3, -2), "'q' must be strictly positive") expect_error(tsallis_entropy(c(0.2, 0.8, 0.1), 2), "must sum to 1") }) ## ---- shannon_extropy --------------------------------------------------------- test_that("shannon_extropy: correct values and alias", { # Numerically identical to extropy() expect_equal(shannon_extropy(p3), extropy(p3)) expect_equal(shannon_extropy(p4u), extropy(p4u)) expect_equal(shannon_extropy(p2), extropy(p2)) }) test_that("shannon_extropy: input validation", { expect_error(shannon_extropy("a"), "'p' must be a numeric vector") expect_error(shannon_extropy(0.5), "'p' must have at least 2 elements") expect_error(shannon_extropy(c(NA, 0.5)), "'p' must not contain NA") expect_error(shannon_extropy(c(-0.1, 1.1)), "must be in \\[0, 1\\]") expect_error(shannon_extropy(c(0.2, 0.3, 0.1)), "must sum to 1") }) ## ---- joint_entropy ----------------------------------------------------------- test_that("joint_entropy: correct values", { Pxy <- matrix(c(0.2, 0.3, 0.1, 0.4), nrow = 2, byrow = TRUE) p_vec <- as.vector(Pxy) expect_equal(joint_entropy(Pxy), -sum(p_vec[p_vec > 0] * log(p_vec[p_vec > 0]))) # Independent: H(X,Y) = H(X) + H(Y) px <- c(0.4, 0.6) py <- c(0.3, 0.7) Pxy_indep <- outer(px, py) expect_equal(joint_entropy(Pxy_indep), shannon_entropy(px) + shannon_entropy(py), tolerance = 1e-10) }) test_that("joint_entropy: input validation", { expect_error(joint_entropy(c(0.2, 0.8)), "'joint_matrix' must be a numeric matrix") expect_error(joint_entropy(matrix(c(NA, 0.5, 0.3, 0.2), 2, 2)), "'joint_matrix' must not contain NA") expect_error(joint_entropy(matrix(c(-0.1, 0.5, 0.3, 0.3), 2, 2)), "must be non-negative") expect_error(joint_entropy(matrix(c(0.2, 0.3, 0.1, 0.1), 2, 2)), "must sum to 1") }) ## ---- conditional_entropy ----------------------------------------------------- test_that("conditional_entropy: correct values", { Pxy <- matrix(c(0.2, 0.3, 0.1, 0.4), nrow = 2, byrow = TRUE) px <- rowSums(Pxy) expect_equal(conditional_entropy(Pxy), joint_entropy(Pxy) - shannon_entropy(px)) # Independent vars: H(Y|X) = H(Y) px <- c(0.4, 0.6) py <- c(0.3, 0.7) Pxy_indep <- outer(px, py) expect_equal(conditional_entropy(Pxy_indep), shannon_entropy(py), tolerance = 1e-10) # H(Y|X) >= 0 expect_gte(conditional_entropy(Pxy), 0) }) test_that("conditional_entropy: input validation", { expect_error(conditional_entropy(c(0.5, 0.5)), "'joint_matrix' must be a numeric matrix") expect_error( conditional_entropy(matrix(c(NA, 0.5, 0.3, 0.2), 2, 2)), "'joint_matrix' must not contain NA" ) expect_error( conditional_entropy(matrix(c(-0.1, 0.5, 0.3, 0.3), 2, 2)), "must be non-negative" ) }) ## ---- kl_divergence ----------------------------------------------------------- test_that("kl_divergence: correct values", { q_dist <- c(0.3, 0.4, 0.3) expected <- sum(p3 * log(p3 / q_dist)) expect_equal(kl_divergence(p3, q_dist), expected) # KL(P, P) = 0 expect_equal(kl_divergence(p3, p3), 0, tolerance = 1e-10) # Asymmetry expect_false(isTRUE(all.equal(kl_divergence(p3, q_dist), kl_divergence(q_dist, p3)))) # KL >= 0 (Gibbs inequality) expect_gte(kl_divergence(p3, q_dist), 0) }) test_that("kl_divergence: warns on q_i = 0 where p_i > 0", { p_w <- c(0.5, 0.5) q_w <- c(1.0, 0.0) expect_warning(kl_divergence(p_w, q_w), "q_i = 0") }) test_that("kl_divergence: input validation", { expect_error(kl_divergence("a", p3), "'p' must be a numeric vector") expect_error(kl_divergence(p3, "a"), "'q' must be a numeric vector") expect_error(kl_divergence(p3, c(0.5, 0.5)), "same length") expect_error(kl_divergence(c(NA, 0.5), c(0.5, 0.5)), "'p' must not contain NA") expect_error(kl_divergence(c(0.2, 0.3, 0.1), p3), "must sum to 1") }) ## ---- cross_entropy ----------------------------------------------------------- test_that("cross_entropy: correct values", { q_dist <- c(0.3, 0.4, 0.3) expected <- -sum(p3 * log(q_dist)) expect_equal(cross_entropy(p3, q_dist), expected) # H(P, P) = H(P) expect_equal(cross_entropy(p3, p3), shannon_entropy(p3), tolerance = 1e-10) # H(P,Q) = H(P) + KL(P||Q) expect_equal(cross_entropy(p3, q_dist), shannon_entropy(p3) + kl_divergence(p3, q_dist), tolerance = 1e-10) }) test_that("cross_entropy: warns on q_i = 0 where p_i > 0", { p_w <- c(0.5, 0.5) q_w <- c(1.0, 0.0) expect_warning(cross_entropy(p_w, q_w), "q_i = 0") }) test_that("cross_entropy: input validation", { expect_error(cross_entropy("a", p3), "'p' must be a numeric vector") expect_error(cross_entropy(p3, "a"), "'q' must be a numeric vector") expect_error(cross_entropy(p3, c(0.5, 0.5)),"same length") expect_error(cross_entropy(c(0.2, 0.3, 0.1), p3), "must sum to 1") }) ## ---- js_divergence ----------------------------------------------------------- test_that("js_divergence: correct values", { q_dist <- c(0.3, 0.4, 0.3) m <- 0.5 * (p3 + q_dist) expected <- 0.5 * kl_divergence(p3, m) + 0.5 * kl_divergence(q_dist, m) expect_equal(js_divergence(p3, q_dist), expected) # Symmetry expect_equal(js_divergence(p3, q_dist), js_divergence(q_dist, p3), tolerance = 1e-10) # JSD(P, P) = 0 expect_equal(js_divergence(p3, p3), 0, tolerance = 1e-10) # JSD <= log(2) expect_lte(js_divergence(c(1, 0), c(0, 1)), log(2) + 1e-10) # JSD >= 0 expect_gte(js_divergence(p3, q_dist), 0) }) test_that("js_divergence: input validation", { expect_error(js_divergence("a", p3), "'p' must be a numeric vector") expect_error(js_divergence(p3, "a"), "'q' must be a numeric vector") expect_error(js_divergence(p3, c(0.5, 0.5)), "same length") expect_error(js_divergence(c(NA, 0.5), c(0.5, 0.5)), "'p' must not contain NA") expect_error(js_divergence(c(0.2, 0.3, 0.1), p3), "must sum to 1") }) ## ---- normalized_entropy ------------------------------------------------------ test_that("normalized_entropy: correct values", { # Uniform: normalized = 1 expect_equal(normalized_entropy(p4u), 1, tolerance = 1e-10) # Degenerate: normalized = 0 expect_equal(normalized_entropy(p_degen), 0, tolerance = 1e-10) # General: in [0,1] val <- normalized_entropy(p3) expect_gte(val, 0) expect_lte(val, 1) # Equals H(p) / log(n) expect_equal(normalized_entropy(p3), shannon_entropy(p3) / log(length(p3)), tolerance = 1e-10) }) test_that("normalized_entropy: inherits validation from shannon_entropy", { expect_error(normalized_entropy("a")) expect_error(normalized_entropy(c(0.2, 0.3, 0.1))) }) ## ---- max_renyi_extropy ------------------------------------------------------- test_that("max_renyi_extropy: correct values", { expect_equal(max_renyi_extropy(2), (2 - 1) * log(2 / 1), tolerance = 1e-10) expect_equal(max_renyi_extropy(3), 2 * log(3 / 2), tolerance = 1e-10) expect_equal(max_renyi_extropy(10), 9 * log(10 / 9), tolerance = 1e-10) # Result is positive expect_gt(max_renyi_extropy(5), 0) # Verify against renyi_extropy() with uniform distribution n <- 4L expect_equal(max_renyi_extropy(n), renyi_extropy(rep(1 / n, n), q = 2), tolerance = 1e-10) }) test_that("max_renyi_extropy: input validation", { expect_error(max_renyi_extropy("a"), "'n' must be a single numeric value") expect_error(max_renyi_extropy(NA), "'n' must not be NA") expect_error(max_renyi_extropy(1.5), "'n' must be an integer value") expect_error(max_renyi_extropy(1), "'n' must be at least 2") expect_error(max_renyi_extropy(-3), "'n' must be at least 2") expect_error(max_renyi_extropy(c(3, 4)), "'n' must be a single numeric value") }) ## ---- conditional_renyi_extropy ----------------------------------------------- test_that("conditional_renyi_extropy: correct values", { Pxy <- matrix(c(0.2, 0.3, 0.1, 0.4), nrow = 2, byrow = TRUE) # Numerical value for q = 2 result <- conditional_renyi_extropy(Pxy, q = 2) expect_true(is.numeric(result) && length(result) == 1L) # Limiting case q -> 1: should be close to conditional Shannon extropy # (i.e., joint extropy - marginal extropy) result_lim <- conditional_renyi_extropy(Pxy, q = 1 + 1e-9) Px <- rowSums(Pxy) cond_ext_expected <- extropy(as.vector(Pxy)) - extropy(Px) expect_equal(result_lim, cond_ext_expected, tolerance = 1e-5) # 3x3 joint distribution Pxy3 <- matrix(c(0.1, 0.05, 0.15, 0.05, 0.2, 0.1, 0.1, 0.15, 0.1), nrow = 3, byrow = TRUE) result3 <- conditional_renyi_extropy(Pxy3, q = 2) expect_true(is.numeric(result3) && length(result3) == 1L) }) test_that("conditional_renyi_extropy: input validation", { Pxy <- matrix(c(0.2, 0.3, 0.1, 0.4), nrow = 2, byrow = TRUE) expect_error(conditional_renyi_extropy(c(0.5, 0.5), 2), "'Pxy' must be a numeric matrix") expect_error(conditional_renyi_extropy(Pxy, NA), "'q' must not be NA") expect_error(conditional_renyi_extropy(Pxy, -1), "'q' must be strictly positive") expect_error( conditional_renyi_extropy(matrix(c(NA, 0.5, 0.3, 0.2), 2, 2), 2), "'Pxy' must not contain NA" ) expect_error( conditional_renyi_extropy(matrix(c(-0.1, 0.5, 0.3, 0.3), 2, 2), 2), "must be non-negative" ) expect_error( conditional_renyi_extropy(matrix(c(0.1, 0.2, 0.1, 0.1), 2, 2), 2), "must sum to 1" ) })