📄 ksvm.r
字号:
as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking), .kernelin, environment(.kernelin)) alpha(ret)[p] <- list(resv[-(li+lj+1)]) ## alpha*y whithout including zeros (smaller kernel matrixes) coeff(ret)[p] <- list(alpha(ret)[[p]][alpha(ret)[[p]]!= 0]) ##store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[which(resv[-(li+lj+1)]!= 0)]) ##alphaindex(ret)[p] <- list(c(which(alpha(ret)[[p]][1:li]!=0)+li*(i-1),which(alpha(ret)[[p]][-(1:li)]!=0)+li+lj*(j-2))) ##save the indexes from all the SV in a vector (use unique!) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## used to reconstruct indexes for the patterns matrix x from "indexes" problem[p] <- list(c(i,j)) param(ret)$nu <- nu } } } if(type(ret) =="spoc-classification") { if(!is.null(class.weights)) weightedC <- weightlabels * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x<-x[yd$ix,] count <- 0 resv <- .Call("tron_optim", as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(rep(yd$x-1,2)), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(2), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), as.double(2), #Cstep as.integer(0), #weightlabel as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking), .kernelin, environment(.kernelin)) alpha(ret) <- t(matrix(resv,nclass(ret))) coeff(ret) <- lapply(1:nclass(ret), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) names(coeff(ret)) <- lev(ret) alphaindex(ret) <- lapply(1:nclass(ret), function(x) which(alpha(ret)[,x]!=0)) names(alphaindex(ret)) <- lev(ret) svindex <- which(alpha(ret)!=0) b(ret) <- 0 param(ret)$C <- C }if(type(ret) =="kbb-classification") { if(!is.null(class.weights)) weightedC <- weightlabels * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x<-x[yd$ix,] count <- sapply(unique(yd$x), function(c) length(yd$x[yd$x==c])) resv <- .Call("tron_optim", as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(yd$x-1), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(1), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking), .kernelin, environment(.kernelin)) start <-rep(0,nclass(ret)) start[1]<-0 start2<-rep(0,nclass(ret)) alpha(ret)<-matrix(0,nrow(x),nclass(ret)-1) start[2:nclass(ret)]<-cumsum(count)[1:nclass(ret)-1] for (i in 2:nclass(ret)) start2[i] <- start2[i-1] + nrow(x) - count[i] p<-1 se<-1:nclass(ret) for(i in se){ for(j in (start[i]+1):(start[i]+count[i])) { for(k in se[se<i]) alpha(ret)[p,k] <- resv[start2[k]+j-count[k]] for(k in se[se>i]) alpha(ret)[p,k-1] <- resv[start2[k]+j] p <- p+1 } } coeff(ret) <- lapply(1:(nclass(ret)-1), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) alphaindex(ret) <- lapply(1:(nclass(ret)-1), function(x) which(alpha(ret)[,x]!=0)) svindex <- which(resv !=0) ## have to figure out what to do with this...! b(ret) <- 0 } if(type(ret) =="one-classification") { resv <- .Call("smo_optim", as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(matrix(rep(1,m))), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(2), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking), .kernelin, environment(.kernelin)) alpha(ret) <- resv[-(m+1)] coeff(ret) <- alpha(ret)[alpha(ret)!=0] alphaindex(ret) <- which(alpha(ret)!=0) ## in this case and in regr. the same with svindex svindex <- which(alpha(ret) !=0) b(ret) <- resv[(m+1)] param(ret)$nu <- nu } if(type(ret) =="eps-regression") { resv <- .Call("smo_optim", as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(y), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(3), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking), .kernelin, environment(.kernelin)) alpha(ret) <- resv[-(m+1)] coeff(ret) <- alpha(ret)[alpha(ret)!=0] alphaindex(ret) <- which(alpha(ret)!=0) svindex <- which(alpha(ret) !=0) b(ret) <- resv[(m+1)] param(ret)$epsilon <- epsilon } if(type(ret) =="nu-regression") { resv <- .Call("smo_optim", as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(y), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(4), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking), .kernelin, environment(.kernelin)) alpha(ret) <- resv[-(m+1)] coeff(ret) <- alpha(ret)[alpha(ret)!=0] alphaindex(ret) <- which(alpha(ret)!=0) svindex <- which(alpha(ret) !=0) b(ret) <- resv[(m+1)] param(ret)$epsilon <- epsilon param(ret)$nu <- nu } kcall(ret) <- match.call() kernelf(ret) <- kernel ## param(ret) <- list(C=C, nu = nu, epsilon = epsilon) xmatrix(ret) <- x ymatrix(ret) <- y SVindex(ret) <- unique(svindex) nSV(ret) <- length(unique(svindex)) if(nSV(ret)==0) stop("No Support Vectors found.") fit(ret) <- if (fit) predict(ret, unscaledx) else NA if (fit){ if(type(ret)=="C-classification"||type(ret)=="nu-classification"||type(ret)=="spoc-classification"||type(ret)=="kbb-classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fit(ret)))) if(type(ret)=="eps-regression"||type(ret)=="nu-regression") error(ret) <- drop(crossprod(fit(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross!=0) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],1:(m-length(vgr[[i]]))) if(type(ret)=="C-classification"||type(ret)=="nu-classification"||type(ret)=="spoc-classification"||type(ret)=="kbb-classification") { cret <- ksvm(x[cind,],factor (lev(ret)[y[cind]], levels = lev(ret)),type=type(ret),kernel=kernel,C=C,nu=nu,tol=tol,scaled=FALSE, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="eps-regression"||type(ret)=="nu-regression") { cret <- ksvm(x[cind,],y[cind],type=type(ret),kernel=kernel,C=C,nu=nu,epsilon=epsilon,tol=tol,scaled=FALSE, cross = 0, fit = FALSE, cache = cache) cres <- predict(cret, x[vgr[[i]],]) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m)/cross + cerror } } cross(ret) <- cerror } xmatrix(ret) <- x ## loss(ret) <- sum((1 - y * fitted(ret))[(1 - y * fitted(ret))>0]/m) return(ret)}).classAgreement <- function (tab) { n <- sum(tab) if (!is.null(dimnames(tab))) { lev <- intersect(colnames(tab), rownames(tab)) p0 <- sum(diag(tab[lev, lev])) / n } else { m <- min(dim(tab)) p0 <- sum(diag(tab[1:m, 1:m])) / n } return(p0)}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -