lm_model <- lm(y ~ x1 + x2, data=as.data.frame(cbind(y,x1,x2)))
glm_model <- glm(y ~ x1+x2, family=binomial(link="logit"), data=as.data.frame(cbind(y,x1,x2)))
kmeans_model <- kmeans(x=X, centers=m)
knn_model <- knn(train=X_train, test=X_test, cl=as.factor(labels), k=K)
naiveBayes_model <- naiveBayes(y ~ x1 + x2, data=as.data.frame(cbind(y,x1,x2)))
cart_model <- rpart(y ~ x1 + x2, data=as.data.frame(cbind(y,x1,x2)), method="class")
svm_model <- svm(x=X, y=as.factor(labels), kernel ="radial", cost=C)
ann_model <- neuralnet( y ~ x1 + x2 + x3, data=as.data.frame(cbind(y,x1,x2, x3)), hidden = 1)
Prediction could be made using following formula:
p <- compute( ann_model, as.data.frame(cbind(x1,x2)) )
apriori_model <- apriori(as.matrix(sampleDataset), parameter = list(supp = 0.8, conf = 0.9))
boost_model <- ada(x=X, y=labels)
For most of the above formulas including linear regression model, one could use following function to predict:
predicted_values <- predict(some_model, newdata=as.data.frame(cbind(x1_test, x2_test)))
Last updated: 2nd May, 2024 The success of machine learning models often depends on the…
When working on a machine learning project, one of the key challenges faced by data…
Last updated: 1st May, 2024 The bias-variance trade-off is a fundamental concept in machine learning…
Last updated: 1st May, 2024 As a data scientist, understanding the nuances of various cost…
Last updated: 1st May, 2024 In this post, you will learn the concepts related to…
Last updated: 26th April, 2024 In this blog post, we will discuss the logistic regression…