# Example dataset with categorical, ordinal, and continuous variables
set.seed(123)
data_mix <- data.frame(
cat_var = factor(sample(letters[1:3], 100, replace = TRUE)), # Nominal categorical variable
ord_var = factor(sample(c("low", "medium", "high"), 100, replace = TRUE),
levels = c("low", "medium", "high"),
ordered = TRUE), # Ordinal variable
cont_var1 = rnorm(100), # Continuous variable 1
cont_var2 = runif(100) # Continuous variable 2
)
# Perform Mixed-Type Fuzzy Clustering with Generalised IB
result_mix <- GIBmix(X = data_mix, ncl = 3, beta = 2, alpha = 0.5, nstart = 5)
# Print clustering results
print(result_mix$Cluster) # Cluster membership matrix
print(result_mix$Entropy) # Entropy of final clustering
print(result_mix$CondEntropy) # Conditional entropy of final clustering
print(result_mix$MutualInfo) # Mutual information between Y and T
# Summary of output
summary(result_mix)
# Simulated categorical data example
set.seed(123)
data_cat <- data.frame(
Var1 = as.factor(sample(letters[1:3], 200, replace = TRUE)), # Nominal variable
Var2 = as.factor(sample(letters[4:6], 200, replace = TRUE)), # Nominal variable
Var3 = factor(sample(c("low", "medium", "high"), 200, replace = TRUE),
levels = c("low", "medium", "high"), ordered = TRUE) # Ordinal variable
)
# Perform Fuzzy Clustering on categorical data with Generalised IB
result_cat <- GIBmix(X = data_cat, ncl = 2, beta = 25, alpha = 0.75, lambda = -1, nstart = 5)
# Print clustering results
print(result_cat$Cluster) # Cluster membership matrix
print(result_cat$Entropy) # Entropy of final clustering
print(result_cat$CondEntropy) # Conditional entropy of final clustering
print(result_cat$MutualInfo) # Mutual information between Y and T
# Simulated continuous data example
set.seed(123)
# Continuous data with 200 observations, 5 features
data_cont <- as.data.frame(matrix(rnorm(1000), ncol = 5))
# Perform Fuzzy Clustering on continuous data with Generalised IB
result_cont <- GIBmix(X = data_cont, ncl = 2, beta = 50, alpha = 0.75, s = -1, nstart = 5)
# Print clustering results
print(result_cont$Cluster) # Cluster membership matrix
print(result_cont$Entropy) # Entropy of final clustering
print(result_cont$CondEntropy) # Conditional entropy of final clustering
print(result_cont$MutualInfo) # Mutual information between Y and T
plot(result_cont, type = "sizes") # Bar plot of cluster sizes (hardened assignments)
plot(result_cont, type = "info") # Information-theoretic quantities plot
Run the code above in your browser using DataLab