haifengl/smile · Validation.java
java logo
public static <T> double loocv(RegressionTrainer<T> trainer, T[] x, double[] y, RegressionMeasure measure) {
        int n = x.length;
        double[] predictions = new double[n];
        
        LOOCV loocv = new LOOCV(n);
        for (int i = 0; i < n; i++) {
            T[] trainx = Math.slice(x, loocv.train[i]);
            double[] trainy = Math.slice(y, loocv.train[i]);
            
            Regression<T> model = trainer.train(trainx, trainy);

            predictions[loocv.test[i]] = model.predict(x[loocv.test[i]]);
        }
        
        return measure.measure(y, predictions);
    }
Similar code snippets
1.
haifengl/smile · Validation.java
Match rating: 81.24% · See similar code snippets
java logo
public static <T> double loocv(RegressionTrainer<T> trainer, T[] x, double[] y) {
        double rmse = 0.0;
        int n = x.length;        
        LOOCV loocv = new LOOCV(n);
        for (int i = 0; i < n; i++) {
            T[] trainx = Math.slice(x, loocv.train[i]);
            double[] trainy = Math.slice(y, loocv.train[i]);
            
            Regression<T> model = trainer.train(trainx, trainy);

            rmse += Math.sqr(model.predict(x[loocv.test[i]]) - y[loocv.test[i]]);
        }
        
        return Math.sqrt(rmse / n);
    }
2.
haifengl/smile · Validation.java
Match rating: 77.11% · See similar code snippets
java logo
public static <T> double loocv(ClassifierTrainer<T> trainer, T[] x, int[] y) {
        int m = 0;
        int n = x.length;
        
        LOOCV loocv = new LOOCV(n);
        for (int i = 0; i < n; i++) {
            T[] trainx = Math.slice(x, loocv.train[i]);
            int[] trainy = Math.slice(y, loocv.train[i]);
            
            Classifier<T> classifier = trainer.train(trainx, trainy);

            if (classifier.predict(x[loocv.test[i]]) == y[loocv.test[i]]) {
                m++;
            }
        }
        
        return (double) m / n;
    }
3.
haifengl/smile · Validation.java
Match rating: 70.96% · See similar code snippets
java logo
public static <T> double cv(int k, RegressionTrainer<T> trainer, T[] x, double[] y, RegressionMeasure measure) {
        if (k < 2) {
            throw new IllegalArgumentException("Invalid k for k-fold cross validation: " + k);
        }
        
        int n = x.length;
        double[] predictions = new double[n];
        
        CrossValidation cv = new CrossValidation(n, k);
        for (int i = 0; i < k; i++) {
            T[] trainx = Math.slice(x, cv.train[i]);
            double[] trainy = Math.slice(y, cv.train[i]);
            
            Regression<T> model = trainer.train(trainx, trainy);

            for (int j : cv.test[i]) {
                predictions[j] = model.predict(x[j]);
            }
        }
        
        return measure.measure(y, predictions);
    }
4.
haifengl/smile · Validation.java
Match rating: 66.12% · See similar code snippets
java logo
public static <T> double cv(int k, ClassifierTrainer<T> trainer, T[] x, int[] y, ClassificationMeasure measure) {
        if (k < 2) {
            throw new IllegalArgumentException("Invalid k for k-fold cross validation: " + k);
        }
        
        int n = x.length;
        int[] predictions = new int[n];
        
        CrossValidation cv = new CrossValidation(n, k);
        for (int i = 0; i < k; i++) {
            T[] trainx = Math.slice(x, cv.train[i]);
            int[] trainy = Math.slice(y, cv.train[i]);
            
            Classifier<T> classifier = trainer.train(trainx, trainy);

            for (int j : cv.test[i]) {
                predictions[j] = classifier.predict(x[j]);
            }
        }
        
        return measure.measure(y, predictions);
    }
5.
haifengl/smile · Validation.java
Match rating: 63.2% · See similar code snippets
java logo
public static <T> double[] bootstrap(int k, RegressionTrainer<T> trainer, T[] x, double[] y, RegressionMeasure measure) {
        if (k < 2) {
            throw new IllegalArgumentException("Invalid k for k-fold bootstrap: " + k);
        }
        
        int n = x.length;
        double[] results = new double[k];
        
        Bootstrap bootstrap = new Bootstrap(n, k);
        for (int i = 0; i < k; i++) {
            T[] trainx = Math.slice(x, bootstrap.train[i]);
            double[] trainy = Math.slice(y, bootstrap.train[i]);
            
            Regression<T> model = trainer.train(trainx, trainy);

            int nt = bootstrap.test[i].length;
            double[] truth = new double[nt];
            double[] predictions = new double[nt];
            for (int j = 0; j < nt; j++) {
                int l = bootstrap.test[i][j];
                truth[j] = y[l];
                predictions[j] = model.predict(x[l]);
            }

            results[i] = measure.measure(truth, predictions);
        }
        
        return results;
    }
6.
deeplearning4j/deeplearning4j · MultiLayerNetwork.java
Match rating: 60.76% · See similar code snippets
java logo
public INDArray output(INDArray input, TrainingMode train) {
        return output(input, train == TrainingMode.TRAIN);
    }
7.
haifengl/smile · Validation.java
Match rating: 60.31% · See similar code snippets
java logo
public static <T> double[] bootstrap(int k, ClassifierTrainer<T> trainer, T[] x, int[] y) {
        if (k < 2) {
            throw new IllegalArgumentException("Invalid k for k-fold bootstrap: " + k);
        }
        
        int n = x.length;
        double[] results = new double[k];
        Accuracy measure = new Accuracy();
        
        Bootstrap bootstrap = new Bootstrap(n, k);
        for (int i = 0; i < k; i++) {
            T[] trainx = Math.slice(x, bootstrap.train[i]);
            int[] trainy = Math.slice(y, bootstrap.train[i]);
            
            Classifier<T> classifier = trainer.train(trainx, trainy);

            int nt = bootstrap.test[i].length;
            int[] truth = new int[nt];
            int[] predictions = new int[nt];
            for (int j = 0; j < nt; j++) {
                int l = bootstrap.test[i][j];
                truth[j] = y[l];
                predictions[j] = classifier.predict(x[l]);
            }

            results[i] = measure.measure(truth, predictions);
        }
        
        return results;
    }
8.
Match rating: 59.76% · See similar code snippets
java logo
@Override
    public IClassifier train(List<Tuple> trainingData) {
        this.model = new PerceptronModel();
        setParameter(properties);
        this.model.init(trainingData, initWeightRandom);

        int errCount;
        int iter = 0;
        do {
            LOG.debug("Iteration " + (++iter));
            Collections.shuffle(trainingData);

            for (Tuple entry : trainingData) {
                onlineTrain(entry.vector.getVector(), model.labelIndexer.getIndex(entry.label)); // for Xi
            }

            errCount = (int) trainingData.stream().filter(entry -> predictMax(entry.vector.getVector()).getLeft() != model.labelIndexer.getIndex(entry.label)).count();
        } while (errCount != 0 && iter < MAX_ITERATION);

        LOG.debug("Err size: " + errCount);
        return this;
    }
9.
deeplearning4j/deeplearning4j · DataSet.java
Match rating: 59.56% · See similar code snippets
java logo
@Override
    public SplitTestAndTrain splitTestAndTrain(double fractionTrain) {
        Preconditions.checkArgument(fractionTrain > 0.0 && fractionTrain < 1.0,
                "Train fraction must be > 0.0 and < 1.0 - got %s", fractionTrain);
        int numTrain = (int) (fractionTrain * numExamples());
        if (numTrain <= 0)
            numTrain = 1;
        return splitTestAndTrain(numTrain);
    }
10.
FudanNLP/fnlp · PATrainer.java
Match rating: 59.19% · See similar code snippets
java logo
public Linear train(InstanceSet trainingList, Evaluation eval) {
		System.out.println("Sample Size: " + trainingList.size());
		LabelAlphabet labels = trainingList.getAlphabetFactory().DefaultLabelAlphabet();

		System.out.println("Class Size: " + labels.size());

		if(!incremental){
			// 初始化权重向量到类中心
			weights = Mean.mean(trainingList, tree);
			msolver.setWeight(weights);
		}
		
		
		float[] hisErrRate = new float[historyNum];
		int numSamples = trainingList.size();
		int frac = numSamples / 10;

		// 开始循环
		System.out.println("Begin Training...");
		long beginTime = System.currentTimeMillis();
		int loops = 0; //循环计数
		while (loops++ < maxIter) {
			System.out.print("Loop: " + loops);
			float totalerror = 0;
			trainingList.shuffle();
			long beginTimeInner = System.currentTimeMillis();
			for (int ii = 0; ii < numSamples; ii++) {

				Instance inst = trainingList.getInstance(ii);
				int maxC = (Integer) inst.getTarget();
				
//				HashSet<Integer> t = new HashSet<Integer>();
//				t.add(maxC);
				Predict pred = (Predict) msolver.getBest(inst, 1);
				
				//从临时数据中取出正确标签打分信息,并删除
				Predict oracle = (Predict) inst.getTempData();
				inst.deleteTempData();
				
				int maxE = pred.getLabel(0);
				int error;
				if (tree == null) {
					error = (pred.getLabel(0) == maxC) ? 0 : 1;
				} else {
					error = tree.dist(maxE, maxC);
				}
				float loss = error- (oracle.getScore(0) - pred.getScore(0));

				if (loss > 0) {// 预测错误,更新权重

					totalerror += 1;
					// 计算含层次信息的内积
					// 计算步长
					float phi = featureGen.getVector(inst).l2Norm2();
					float alpha = (float) Math.min(c, loss / (phi * error));
					if (tree != null) {
						int[] anc = tree.getPath(maxC);
						for (int j = 0; j < anc.length; j++) {
							weights[anc[j]].plus(featureGen.getVector(inst), alpha);
						}
						anc = tree.getPath(maxE);
						for (int j = 0; j < anc.length; j++) {
							weights[anc[j]].plus(featureGen.getVector(inst), -alpha);
						}
					} else {
						weights[maxC].plus(featureGen.getVector(inst), alpha);
						weights[maxE].plus(featureGen.getVector(inst), -alpha);
					}

				}
				if (frac==0||ii % frac == 0) {// 显示进度
					System.out.print('.');
				}
			}
			float acc = 1 - totalerror / numSamples;
			System.out.print("\t Accuracy:" + acc);
			System.out.println("\t Time(s):"
					+ (System.currentTimeMillis() - beginTimeInner) / 1000);
			
			if(optim&&loops<=2){
				int oldnum = 0;
				int newnum = 0;
				for(int i = 0;i<weights.length;i++){
					oldnum += weights[i].size();
					MyHashSparseArrays.trim(weights[i],0.99f);
					newnum += weights[i].size();
				}
				System.out.println("优化:\t原特征数:"+oldnum + "\t新特征数:"+newnum);					
			}
			

			if (interim) {
				Linear  p = new Linear(weights, msolver, featureGen, trainingList.getPipes(), trainingList.getAlphabetFactory());
				try {
					p.saveTo("./tmp/model.gz");
				} catch (IOException e) {
					System.err.println("write model error!");
				}
				msolver.isUseTarget(true);
			}

			if (eval != null) {
				System.out.print("Test:\t");
				Linear classifier = new Linear(weights, msolver);
				eval.eval(classifier,2);
				msolver.isUseTarget(true);
			}
			hisErrRate[loops%historyNum] = acc;
			if(MyArrays.viarance(hisErrRate) < eps){
				System.out.println("convergence!");
				break;	
			}
		}
		System.out.println("Training End");
		System.out.println("Training Time(s):"
				+ (System.currentTimeMillis() - beginTime) / 1000);		

		classifier = new Linear(weights, msolver, featureGen, trainingList.getPipes(), trainingList.getAlphabetFactory());
		return classifier;
	}