2
votes

I am using Benchmark Experiments on a task. I am using a nested re-sampling strategy (https://mlr-org.github.io/mlr-tutorial/devel/html/nested_resampling/index.html). I create a learner using an inner resampling strategy. For example here is a crude one for c50:

### C50 ############################################################################################################################
classif_c50 = makeLearner("classif.C50", predict.type="prob")

##The wrappers are presented in reverse order of application
###One-Hot Encoding
classif_c50 = makeDummyFeaturesWrapper(classif_c50, method = "1-of-n")
###Missing Data Imputation
classif_c50 = makeImputeWrapper(classif_c50, classes = list(numeric = imputeConstant(-99999), integer = imputeConstant(-99999), factor = imputeConstant("==Missing==")), dummy.type = "numeric", dummy.classes = c("numeric","integer"))

##### Tuning #####
inner_resamp = makeResampleDesc("CV", iters=3)
ctrl = makeTuneControlRandom(maxit=3L)
hypss = makeParamSet(
  makeIntegerParam("trials", lower = 1, upper = 30)
  ,makeNumericParam("CF", lower = 0, upper = 1)
)
classif_c50 = makeTuneWrapper(classif_c50, resampling = inner_resamp, par.set = hypss, control = ctrl, measures = list(auc, logloss, f1, ber, acc, bac, mmce, timetrain), show.info=TRUE)
### C50 ############################################################################################################################

I then create a benchmark experiment with an outer re-sampling strategy as follows (bench_data is my data.frame):

outer_resampling = makeFixedHoldoutInstance(train_indices, valid_indices, nrow(bench_data))
trainTask = makeClassifTask(id=training_task_name, data=bench_data, target=target_feature, positive=1, fixup.data="warn", check.data=TRUE)
res = benchmark(tasks = trainTask, learners = lrns, resampling = outer_resampling, measures = list(auc, logloss, f1, ber, acc, bac, mmce, timetrain), show.info = TRUE)

I cannot find a way using the getBMR<> functions to extract the inner-resampling results? Is there a way to do this that I am missing?

EDIT: Reproducible Example

# Required Packages
# Load required packages
library(mlr)
#library(dplyr)
library(parallelMap)
library(parallel)

# Algorithms
iterations = 10L
cv_iters = 2
### classif.gamboost ############################################################################################################################
classif_gamboost = makeLearner("classif.gamboost", predict.type="prob")

##The wrappers are presented in reverse order of application
###One-Hot Encoding
classif_gamboost = makeDummyFeaturesWrapper(classif_gamboost, method = "1-of-n")
###Missing Data Imputation
classif_gamboost = makeImputeWrapper(classif_gamboost, classes = list(numeric = imputeConstant(-99999), integer = imputeConstant(-99999), factor = imputeConstant("==Missing==")), dummy.type = "numeric", dummy.classes = c("numeric","integer"))

##### Tuning #####
inner_resamp = makeResampleDesc("CV", iters=cv_iters)
ctrl = makeTuneControlRandom(maxit=iterations)
hypss = makeParamSet(
  makeDiscreteParam("baselearner", values=c("btree")), #,"bols","btree","bbs"
  makeIntegerParam("dfbase", lower = 1, upper = 5),
  makeDiscreteParam("family", values=c("Binomial")),
  makeDiscreteParam("mstop", values=c(10,50,100,250,500,1000))
)
classif_gamboost = makeTuneWrapper(classif_gamboost, resampling = inner_resamp, par.set = hypss, control = ctrl, measures = list(auc, logloss, f1, ber, acc, bac, mmce, timetrain), show.info=TRUE)
### classif.gamboost ############################################################################################################################

### Random Forest ############################################################################################################################
classif_rforest = makeLearner("classif.randomForestSRC", predict.type="prob")

##The wrappers are presented in reverse order of application
###One-Hot Encoding
classif_rforest = makeDummyFeaturesWrapper(classif_rforest, method = "1-of-n")
###Missing Data Imputation
classif_rforest = makeImputeWrapper(classif_rforest, classes = list(numeric = imputeConstant(-99999), integer = imputeConstant(-99999), factor = imputeConstant("==Missing==")), dummy.type = "numeric", dummy.classes = c("numeric","integer"))

##### Tuning #####
inner_resamp = makeResampleDesc("CV", iters=cv_iters)
ctrl = makeTuneControlRandom(maxit=iterations)
hypss = makeParamSet(
  makeIntegerParam("mtry", lower = 1, upper = 30)
  ,makeIntegerParam("ntree", lower = 100, upper = 500)
  ,makeIntegerParam("nodesize", lower = 1, upper = 100)
)
classif_rforest = makeTuneWrapper(classif_rforest, resampling = inner_resamp, par.set = hypss, control = ctrl, measures = list(auc, logloss, f1, ber, acc, bac, mmce, timetrain), show.info=TRUE)
### Random Forest ############################################################################################################################

trainData = mtcars
target_feature = "am"
training_task_name = "trainingTask"
trainData[[target_feature]] = as.factor(trainData[[target_feature]])
trainTask = makeClassifTask(id=training_task_name, data=trainData, target=target_feature, positive=1, fixup.data="warn", check.data=TRUE)

train_indices = 1:25
valid_indices = 26:32
outer_resampling = makeFixedHoldoutInstance(train_indices, valid_indices, nrow(trainData))

no_of_cores = detectCores()
parallelStartSocket(no_of_cores, level=c("mlr.tuneParams"), logging = TRUE)
lrns = list(classif_gamboost, classif_rforest)
res = benchmark(tasks = trainTask, learners = lrns, resampling = outer_resampling, measures = list(logloss, auc, f1, ber, acc, bac, mmce, timetrain), show.info = TRUE)
parallelStop()

getBMRPerformances(res, as.df=TRUE)
1
could you provide a reproducible example with an inbuilt data set? did you try using resample with extract = getTuneResult and then getNestedTuneResultsOptPathDf?missuse
Maybe this does the trick: models = getBMRModels(res, drop = TRUE), then tune.result = lapply(models, getTuneResult) and getting the tune path (with inner resampling results) of the first outer iteration using as.data.frame(tune.result[[1]]$opt.path) `Giuseppe
@missuse Provided a reproducible example.opening-the-black-box
@Giuseppe - throws an error "Error in lapply(models, getTuneResult) : Assertion on 'object' failed: Must have class 'TuneModel', but has class 'list'."opening-the-black-box

1 Answers

1
votes

Here are two approaches to extract the optimization path from the benchmark object:

by getting the benchmark tune result:

z <- getBMRTuneResults(res)

and then going through the optimization paths of each tune result and extracting the hyper parameter effects by using generateHyperParsEffectData:

lapply(z$trainingTask, function(x) generateHyperParsEffectData(x[[1]], partial.dep = T))

or just to get the data:

lapply(z$trainingTask, function(x) generateHyperParsEffectData(x[[1]], partial.dep = T)$data)

Or with a little modification of @Giuseppe's suggestion in the comment by getting the BMRmodels and then extracting the tune results:

models <- getBMRModels(res, drop = TRUE)

tune.result = lapply(models, function(x) getTuneResult(x[[1]])) 

lapply(tune.result, function(x) as.data.frame(x$opt.path))