-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathExp3_Main.R
168 lines (140 loc) · 5.12 KB
/
Exp3_Main.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
##Absolutely necessary to run initialization BEFORE this file
##Import data
data <- read.csv("LIDC dataset with full annotations.csv",header=TRUE)
img_fs <- data[,c(5:18, 43:69)]
img_fs <- data.frame(img_fs, Avg.Gabor(data))
#Df for results
col <- c("Mode 1", "Mode 2", "Mode 3", "Max Mode", "Set",
"I1 Label", "I1 Pred", "I1 Label Num", "I2 Label",
"I2 Pred","I2 Label Num", "I3 Label", "I3 Pred",
"I3 Label Num", "I4 Label", "I4 Pred", "A1.Pred",
"A2.Pred","A3.Pred","A4.Pred")
cont = rpart.control(minsplit = 250, minbucket= 58, maxdepth = 5)
t = 20
pb <- txtProgressBar(min = 0, max = t, style = 3)
allaccs <- vector(mode="list",length=t)
allresults <- vector(mode="list",length=t)
allmodels <- vector(mode="list",length=t)
labelorder <- vector(mode="list",length=t)
for (k in 1:t){
set.seed(k)
results <- data.frame(data.frame(matrix(vector(), 810, 20, dimnames=list(c(), col))))
##Process labels
#currently iterative labeling for both trail and test
labels <- data[,70:73]
#shuffles labels
labels <- t(apply(labels,1,sample))
labelorder[[k]] = apply(labels,c(1,2),rescale)
#this may not be perfect since the actual labels
#are rescaled after the mode is taken
#takes the mode for each iteration
labels <- cbind(labels[,1],apply(labels[,1:2],1,mode),
apply(labels[,1:3],1,mode),apply(labels,1,mode))
labels <- apply(labels,c(1,2),rescale)
results[1:4] <- labels
## Label tracker
label.tracker <- rep(1,nrow(labels))
labelsum <- list()
#Separate training, testing and valid
index <- bal_strat(labels)
#Get image features
train = NULL
test = NULL
valid = NULL
models = vector(mode="list",length=4)
train$img <- as.matrix(img_fs[index$train,])
test$img <- as.matrix(img_fs[index$test,])
valid$img <- as.matrix(img_fs[index$valid,])
results[index$train, "Set"] <- "train"
results[index$test, "Set"] <- "test"
results[index$valid, "Set"] <- "valid"
##Iterations
for(r in 1:4)
{
set.seed(r)
#Different iterative label vector for each iteration
iterlabel <- label.selector(labels,label.tracker)
results[paste("I", r, ".Label", sep = "")] <- iterlabel
train$iterl <- iterlabel[index$train]
test$iterl <- iterlabel[index$test]
valid$iterl <- iterlabel[index$valid]
#Make dataframes work for decision trees
train$data <- data.frame(cbind(train$iterl, train$img))
colnames(train$data)[1] <- "label"
test$data <- data.frame(cbind(test$iterl, test$img))
colnames(test$data)[1] <- "label"
valid$data <- data.frame(cbind(valid$iterl, valid$img))
colnames(valid$data)[1] <- "label"
#THIS IS WHERE CLASSIFICATION ACTUALLY HAPPENS
model <- rpart(formula, method = "class", data = train$data, control = ics[r])
models[[r]] <- model
results[paste("I", r, ".Pred", sep = "")] <-
as.integer(predict(model, img_fs, type="class"))
#sum labels at used indices
labelsum[[r]] = sum(label.tracker[c(index$train, index$test, index$valid)])
## Update the label tracker
if(r!=4)
{
miss.iter <- which(results[,paste("I", r, ".Pred", sep = "")]!=
results[,paste("I", r, ".Label", sep = "")])
label.tracker[miss.iter] <- label.tracker[miss.iter]+1
results[paste("I", r, ".Label.Num", sep = "")] <- label.tracker
}
}
#Comparison Consensus Classification
for(a in 1:4){
#Different iterative label vector for each iteration
conslabel <- label.selector(labels,rep(a, times = length(labels[,1])))
train$data <- data.frame(cbind(conslabel[index$train], train$img))
colnames(train$data)[1] <- "label"
model <- rpart(formula, method = "class", data = train$data, control = ics[a+4])
results[paste("A", a, ".Pred", sep = "")] <-
as.integer(predict(model, img_fs, type="class"))
models[[a+4]] = model
}
allaccs[[k]] = calcacc(results, index, g=8)
allmodels[[k]] = models
allresults[[k]] = results
setTxtProgressBar(pb, k)
}
test4accs <- (lapply(allaccs, function(x) x["Test", "I4"]))
deviance <- (lapply(test4accs, function(x, y = mean(as.numeric(test4accs))) abs(x-y)))
best.trial <- max(which(deviance==min(as.numeric(deviance))))
View(allresults[[best.trial]])
best = allresults[[best.trial]]
save(best, file = "best.Rda")
bestorder = labelorder[[best.trial]]
save(bestorder, file = "bestorder.Rda")
agg = avgacc(allaccs)
View(agg)
for (i in 1:8){
png(paste("tree", i, ".png", sep = ""))
if (i >=5) {m = paste("Non-Selective Tree", i-4)
}else{ m = paste("Selective Tree", i)
}
tree = allmodels[[best.trial]][[i]]
rpart.plot(tree, under = TRUE, extra = 1,
box.col=c("palegreen3", "yellow", "indianred2")[tree$frame$yval],
main = m)
dev.off()
}
pdf("selectiveTrees.pdf")
par(mfrow=c(2,2))
for (i in 1:4){
c=.75
tree = allmodels[[best.trial]][[i]]
rpart.plot(tree, under = TRUE, extra = 2,
box.col=c("palegreen3", "yellow", "indianred2")[tree$frame$yval],
main = paste("Selective Tree", i), cex =c)
}
dev.off()
pdf("nonselectiveTrees.pdf")
par(mfrow=c(2,2))
for (i in 5:8){
c=.75
tree = allmodels[[best.trial]][[i]]
rpart.plot(tree, under = TRUE, extra =2,
box.col=c("palegreen3", "yellow", "indianred2")[tree$frame$yval],
main = paste("Non-Selective Tree", i-4), cex = c)
}
dev.off()