haifengl/smile · Validation.java
java logo
public static <T> double cv(int k, RegressionTrainer<T> trainer, T[] x, double[] y, RegressionMeasure measure) {
        if (k < 2) {
            throw new IllegalArgumentException("Invalid k for k-fold cross validation: " + k);
        }
        
        int n = x.length;
        double[] predictions = new double[n];
        
        CrossValidation cv = new CrossValidation(n, k);
        for (int i = 0; i < k; i++) {
            T[] trainx = Math.slice(x, cv.train[i]);
            double[] trainy = Math.slice(y, cv.train[i]);
            
            Regression<T> model = trainer.train(trainx, trainy);

            for (int j : cv.test[i]) {
                predictions[j] = model.predict(x[j]);
            }
        }
        
        return measure.measure(y, predictions);
    }
Similar code snippets
1.
haifengl/smile · Validation.java
Match rating: 83.98% · See similar code snippets
java logo
public static <T> double cv(int k, ClassifierTrainer<T> trainer, T[] x, int[] y, ClassificationMeasure measure) {
        if (k < 2) {
            throw new IllegalArgumentException("Invalid k for k-fold cross validation: " + k);
        }
        
        int n = x.length;
        int[] predictions = new int[n];
        
        CrossValidation cv = new CrossValidation(n, k);
        for (int i = 0; i < k; i++) {
            T[] trainx = Math.slice(x, cv.train[i]);
            int[] trainy = Math.slice(y, cv.train[i]);
            
            Classifier<T> classifier = trainer.train(trainx, trainy);

            for (int j : cv.test[i]) {
                predictions[j] = classifier.predict(x[j]);
            }
        }
        
        return measure.measure(y, predictions);
    }
2.
haifengl/smile · Validation.java
Match rating: 70.96% · See similar code snippets
java logo
public static <T> double loocv(RegressionTrainer<T> trainer, T[] x, double[] y, RegressionMeasure measure) {
        int n = x.length;
        double[] predictions = new double[n];
        
        LOOCV loocv = new LOOCV(n);
        for (int i = 0; i < n; i++) {
            T[] trainx = Math.slice(x, loocv.train[i]);
            double[] trainy = Math.slice(y, loocv.train[i]);
            
            Regression<T> model = trainer.train(trainx, trainy);

            predictions[loocv.test[i]] = model.predict(x[loocv.test[i]]);
        }
        
        return measure.measure(y, predictions);
    }
3.
haifengl/smile · Validation.java
Match rating: 67.11% · See similar code snippets
java logo
public static <T> double[] bootstrap(int k, RegressionTrainer<T> trainer, T[] x, double[] y, RegressionMeasure measure) {
        if (k < 2) {
            throw new IllegalArgumentException("Invalid k for k-fold bootstrap: " + k);
        }
        
        int n = x.length;
        double[] results = new double[k];
        
        Bootstrap bootstrap = new Bootstrap(n, k);
        for (int i = 0; i < k; i++) {
            T[] trainx = Math.slice(x, bootstrap.train[i]);
            double[] trainy = Math.slice(y, bootstrap.train[i]);
            
            Regression<T> model = trainer.train(trainx, trainy);

            int nt = bootstrap.test[i].length;
            double[] truth = new double[nt];
            double[] predictions = new double[nt];
            for (int j = 0; j < nt; j++) {
                int l = bootstrap.test[i][j];
                truth[j] = y[l];
                predictions[j] = model.predict(x[l]);
            }

            results[i] = measure.measure(truth, predictions);
        }
        
        return results;
    }
4.
haifengl/smile · Validation.java
Match rating: 64.77% · See similar code snippets
java logo
public static <T> double[] bootstrap(int k, ClassifierTrainer<T> trainer, T[] x, int[] y) {
        if (k < 2) {
            throw new IllegalArgumentException("Invalid k for k-fold bootstrap: " + k);
        }
        
        int n = x.length;
        double[] results = new double[k];
        Accuracy measure = new Accuracy();
        
        Bootstrap bootstrap = new Bootstrap(n, k);
        for (int i = 0; i < k; i++) {
            T[] trainx = Math.slice(x, bootstrap.train[i]);
            int[] trainy = Math.slice(y, bootstrap.train[i]);
            
            Classifier<T> classifier = trainer.train(trainx, trainy);

            int nt = bootstrap.test[i].length;
            int[] truth = new int[nt];
            int[] predictions = new int[nt];
            for (int j = 0; j < nt; j++) {
                int l = bootstrap.test[i][j];
                truth[j] = y[l];
                predictions[j] = classifier.predict(x[l]);
            }

            results[i] = measure.measure(truth, predictions);
        }
        
        return results;
    }
5.
haifengl/smile · Validation.java
Match rating: 63.16% · See similar code snippets
java logo
public static <T> double loocv(RegressionTrainer<T> trainer, T[] x, double[] y) {
        double rmse = 0.0;
        int n = x.length;        
        LOOCV loocv = new LOOCV(n);
        for (int i = 0; i < n; i++) {
            T[] trainx = Math.slice(x, loocv.train[i]);
            double[] trainy = Math.slice(y, loocv.train[i]);
            
            Regression<T> model = trainer.train(trainx, trainy);

            rmse += Math.sqr(model.predict(x[loocv.test[i]]) - y[loocv.test[i]]);
        }
        
        return Math.sqrt(rmse / n);
    }
6.
haifengl/smile · Validation.java
Match rating: 61.73% · See similar code snippets
java logo
public static <T> double loocv(ClassifierTrainer<T> trainer, T[] x, int[] y) {
        int m = 0;
        int n = x.length;
        
        LOOCV loocv = new LOOCV(n);
        for (int i = 0; i < n; i++) {
            T[] trainx = Math.slice(x, loocv.train[i]);
            int[] trainy = Math.slice(y, loocv.train[i]);
            
            Classifier<T> classifier = trainer.train(trainx, trainy);

            if (classifier.predict(x[loocv.test[i]]) == y[loocv.test[i]]) {
                m++;
            }
        }
        
        return (double) m / n;
    }
7.
Match rating: 61.24% · See similar code snippets
java logo
public static Trial testTrainSplit(InstanceList instances) {

        InstanceList[] instanceLists = instances.split(new Randoms(),
                new double[] { 0.9, 0.1, 0.0 });

        // LOG.debug("{} training instance, {} testing instances",
        // instanceLists[0].size(), instanceLists[1].size());

        @SuppressWarnings("rawtypes")
        ClassifierTrainer trainer = new MaxEntTrainer();
        Classifier classifier = trainer.train(instanceLists[TRAINING]);
        return new Trial(classifier, instanceLists[TESTING]);
    }
8.
h2oai/h2o-2 · SpeeDRF.java
Match rating: 57.34% · See similar code snippets
java logo
private void buildForest() {
    logStart();
    SpeeDRFModel model = null;
    try {
      Frame train = setTrain();
      Frame test  = setTest();
      Vec resp = regression ? null : train.lastVec().toEnum();
      if (resp != null) gtrash(resp);
      float[] priorDist = setPriorDist(train);
      train = setStrat(train, test, resp);
      model = initModel(train, test, priorDist);
      model.start_training(null);
      model.write_lock(self());
      drfParams = DRFParams.create(train.find(resp), model.N, model.max_depth, (int) train.numRows(), model.nbins,
              model.statType, use_seed, model.weights, mtries, model.sampling_strategy, (float) sample_rate, model.strata_samples, model.verbose ? 100 : 1, _exclusiveSplitLimit, true, regression);

      DRFTask tsk = new DRFTask(self(), train, drfParams, model._key, model.src_key);
      tsk.validateInputData(train);
      tsk.invokeOnAllNodes();
      Log.info("Tree building complete. Scoring...");
      model = UKV.get(dest());
      model.scoreAllTrees(test == null ? train : test, resp);
      // Launch a Variable Importance Task
      if (importance && !regression) {
        Log.info("Scoring complete. Performing Variable Importance Calculations.");
        model.current_status = "Performing Variable Importance Calculation.";
        Timer VITimer = new Timer();
        model.variableImportanceCalc(train, resp);
        Log.info("Variable Importance on "+(train.numCols()-1)+" variables and "+ ntrees +" trees done in " + VITimer);
      }
      Log.info("Generating Tree Stats");
      JsonObject trees = new JsonObject();
      trees.addProperty(Constants.TREE_COUNT, model.size());
      if( model.size() > 0 ) {
        trees.add(Constants.TREE_DEPTH, model.depth().toJson());
        trees.add(Constants.TREE_LEAVES, model.leaves().toJson());
      }
      model.generateHTMLTreeStats(new StringBuilder(), trees);
      model.current_status = "Model Complete";
    } finally {
      if (model != null) {
        model.unlock(self());
        model.stop_training();
      }
    }
  }
9.
lessthanoptimal/BoofCV · LearnSceneFromFiles.java
Match rating: 56.44% · See similar code snippets
java logo
public void loadThenSplit( File directory ) {
		Map<String,List<String>> all = findImages(directory);
		train = new HashMap<>();
		if( fractionCross != 0 )
			cross = new HashMap<>();
		test = new HashMap<>();

		Set<String> keys = all.keySet();

		for( String key : keys ) {
			List<String> allImages = all.get(key);

			// randomize the ordering to remove bias
			Collections.shuffle(allImages,rand);

			int numTrain = (int)(allImages.size()*fractionTrain);
			numTrain = Math.max(minimumTrain,numTrain);
			int numCross = (int)(allImages.size()*fractionCross);
			numCross = Math.max(minimumCross,numCross);
			int numTest = allImages.size()-numTrain-numCross;

			if( numTest < minimumTest )
				throw new RuntimeException("Not enough images to create test set. "+key+" total = "+allImages.size());

			createSubSet(key, allImages, train, 0, numTrain);
			if( cross != null ) {
				createSubSet(key, allImages, cross , numTrain, numCross+numTrain);
			}
			createSubSet(key, allImages, test, numCross+numTrain,allImages.size());
		}

		scenes.addAll(keys);
	}
10.
Waikato/moa · OzaBagAdwinML.java
Match rating: 56.38% · See similar code snippets
java logo
@Override
    public void trainOnInstanceImpl(Instance inst) {
		// train
		try {
			super.trainOnInstanceImpl(inst);
		} catch(NullPointerException e) {
			System.err.println("[Warning] NullPointer on train.");
			//e.printStackTrace();
		}

		for (int i = 0; i < this.ensemble.length; i++) {

			// get prediction
			double prediction[] = this.ensemble[i].getVotesForInstance(inst);
			if (prediction == null) {
				prediction = new double[]{};
			}

			// get true value
			double actual[] = new double[prediction.length];
			for (int j = 0; j < prediction.length; j++) {
				actual[j] = (double)inst.classValue(j);
			}

			// compute loss
			double loss = Metrics.L_ZeroOne(A.toIntArray(actual,0.5), A.toIntArray(prediction,0.5));
			//System.err.println("loss["+i+"] = "+loss);

			// adwin stuff
			double ErrEstim = this.ADError[i].getEstimation();
			if (this.ADError[i].setInput(loss)) {
				if (this.ADError[i].getEstimation() > ErrEstim) {
					System.err.println("Change model "+i+"!");
					this.ensemble[i].resetLearning();
					this.ensemble[i].trainOnInstance(inst);
					this.ADError[i] = new ADWIN();
				}
			}
		}
	}