-
Notifications
You must be signed in to change notification settings - Fork 1
/
var_importance_Niter.R
159 lines (128 loc) · 5.48 KB
/
var_importance_Niter.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
##-------------------------------------------------------------------------------
## var_importance_Niter: test contribution of postnatal age (PNA) feature in
## gradient boosting algorithm assessing both relative
## influence and loss in performance from a permutation test
##
## Syntax: var_importance_Niter()
##
## Inputs:
## none
##
## Outputs:
## none
##
## REQUIRES:
## gbm (version 2.1.3)
## foreach (version 1.4.4)
## doParallel (version 1.0.11)
##
## and local functions:
## utils/load_feature_set.R
## utils/set_parameters_EMA.R
## John M. O' Toole, University College Cork
## Started: 23-02-2018
##
## last update: Time-stamp: <2019-01-10 16:20:41 (otoolej)>
##-------------------------------------------------------------------------------
var_importance_Niter <- function(){
##-------------------------------------------------------------------
## 0. load packages and local functions
##-------------------------------------------------------------------
data_dir <- './data/'
utils_dir <- './utils/'
library('gbm')
library('foreach')
library('doParallel')
source(paste(utils_dir, 'load_feature_set.R', sep=""))
source(paste(utils_dir, 'set_parameters_EMA.R', sep=""))
##-------------------------------------------------------------------
## 1. load the parameters
## (set in set_parameters.R for details)
##-------------------------------------------------------------------
params <- set_parameters_EMA()
##-------------------------------------------------------------------
## 2. load the feature set
##-------------------------------------------------------------------
data_dir <- './data/'
fin_feat_set <- paste(data_dir, 'subset_features_v3.csv', sep='')
dfFeats <- load_feature_set(fin_feat_set)
dfFeats <- droplevels(dfFeats[, !(names(dfFeats) %in% "c_code")])
## speed up processing by using multiple CPU cores (use max. cores - 1)
cores = detectCores()
cl <- makeCluster(cores[1]-1)
registerDoParallel(cl)
LOG_TRANSFORM <- 1
if(LOG_TRANSFORM){
dfFeats$GA <- log( dfFeats$GA )
}
## set number of iterations:
Niter <- 1000
##-------------------------------------------------------------------
## 3. monte-Carlo type simulation, train and assess variable importance:
##-------------------------------------------------------------------
r.all <- foreach(p=1:Niter,
.combine=rbind,
.export='train_var_importance',
.packages=c('gbm')) %dopar% {
rank.all <- train_var_importance(dfFeats, params, 1)
rank.all
}
##-------------------------------------------------------------------
## 4. show results:
##-------------------------------------------------------------------
cat('\n __ RESULTS __\n')
cat(sprintf(' + rank: mean (SD), (range) = %.2f (%.2f) (%.2f, %.2f)\n',
mean(r.all[, 1]), sd(r.all[, 1]), range(r.all[, 1])[1],
range(r.all[, 1])[2])
);
cat(sprintf(' + reduction in performance: mean (SD), (range) = %.2f%% (%.2f)%% (%.2f, %.2f)%%\n',
mean(r.all[, 3]), sd(r.all[, 3]), range(r.all[, 3])[1],
range(r.all[, 3])[2])
);
cat('\n')
## remove the CPU cluster
stopCluster(cl)
}
train_var_importance <- function(dfData, params, DBverbose=0){
##-------------------------------------------------------------------------------
## train_var_importance: assess importance of PNA (postnatal age) feature when
## building gradient boosting model
##
## Syntax: performance_PNA <- train_var_importance(dfData, params, DBverbose)
##
## Inputs:
## dfData - data frame of features and gestational age (GA)
## params - parameter set for gradient boosting algorithm (see set_parameters.R)
## DBverbose - yes/no (1/0) verbose (default=0)
##
## Outputs:
## performance_PNA - array:
## [1] = rank PNA of relative influence
## [2] = total number of features
## [3] = % drop in performance when applying permutation
## test on PNA
##-------------------------------------------------------------------------------
## a) train with gradient boosting algorithm
gboost=gbm(GA ~ ., data=dfData,
distribution=params$loss_fn,
n.trees=params$N_trees,
shrinkage=params$shrinkage,
interaction.depth=params$int_depth,
bag.fraction=params$bag_fraction)
## b) find relative influence and permutation loss for PNA feature:
rel_inf <- summary(gboost, method=relative.influence, plotit=FALSE)
perm_inf <- summary(gboost, method=permutation.test.gbm, plotit=FALSE)
pna_rel_ranking <- which( rel_inf[, 1]=="EEG_PNA" )
pna_perm_loss <- perm_inf[perm_inf$var=="EEG_PNA", 2]
performance_PNA <- numeric(3)
performance_PNA[1] <- pna_rel_ranking
performance_PNA[2] <- length(rel_inf[, 1])
performance_PNA[3] <- pna_perm_loss
if(DBverbose){
cat(sprintf('\n ** EEG_PNA ranking = %d/%d\n',
performance_PNA[1], performance_PNA[2]))
cat(sprintf(' EEG_PNA permutation loss in performance = %.2f%%\n',
performance_PNA[3]))
}
return(performance_PNA)
}