日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 编程语言 > java >内容正文

java

OpenCV3 Java 机器学习使用方法汇总

發布時間:2023/12/31 java 34 豆豆
生活随笔 收集整理的這篇文章主要介紹了 OpenCV3 Java 机器学习使用方法汇总 小編覺得挺不錯的,現在分享給大家,幫大家做個參考.

?????????????? 原文鏈接:OpenCV3 Java 機器學習使用方法匯總??????????????????????????????

?前言

????????? 按道理來說,C++版本的OpenCV訓練的版本XML文件,在java中可以無縫使用。但要注意OpenCV本身的版本問題。從2.4 到3.x版本出現了很大的改變,XML文件本身的存儲格式本身也不同,不能通用。


????????? opencv提供了非常多的機器學習算法用于研究。這里對這些算法進行分類學習和研究,以拋磚引玉。這里使用的機器學習算法包括:人工神經網絡,boost,決策樹,最近鄰,邏輯回歸,貝葉斯,隨機森林,SVM等算法等。

????????? 機器學習的過程相同,都要經歷1、收集樣本數據sampleData2.訓練分類器mode3.對測試數據testData進行預測。這里使用一個在別處看到的例子,利用身高體重等原始信息預測男女的概率。通過一些簡單的數據學習,用測試數據預測男女概率。

實例代碼:

import org.opencv.core.Core; import org.opencv.core.CvType; import org.opencv.core.Mat; import org.opencv.core.TermCriteria; import org.opencv.ml.ANN_MLP; import org.opencv.ml.Boost; import org.opencv.ml.DTrees; import org.opencv.ml.KNearest; import org.opencv.ml.LogisticRegression; import org.opencv.ml.Ml; import org.opencv.ml.NormalBayesClassifier; import org.opencv.ml.RTrees; import org.opencv.ml.SVM; import org.opencv.ml.SVMSGD; import org.opencv.ml.TrainData; public class ML { public static void main(String[] args) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // 訓練數據,兩個維度,表示身高和體重 float[] trainingData = { 186, 80, 185, 81, 160, 50, 161, 48 }; // 訓練標簽數據,前兩個表示男生0,后兩個表示女生1,由于使用了多種機器學習算法,他們的輸入有些不一樣,所以labelsMat有三種 float[] labels = { 0f, 0f, 0f, 0f, 1f, 1f, 1f, 1f }; int[] labels2 = { 0, 0, 1, 1 }; float[] labels3 = { 0, 0, 1, 1 }; // 測試數據,先男后女 float[] test = { 184, 79, 159, 50 }; Mat trainingDataMat = new Mat(4, 2, CvType.CV_32FC1); trainingDataMat.put(0, 0, trainingData); Mat labelsMat = new Mat(4, 2, CvType.CV_32FC1); labelsMat.put(0, 0, labels); Mat labelsMat2 = new Mat(4, 1, CvType.CV_32SC1); labelsMat2.put(0, 0, labels2); Mat labelsMat3 = new Mat(4, 1, CvType.CV_32FC1); labelsMat3.put(0, 0, labels3); Mat sampleMat = new Mat(2, 2, CvType.CV_32FC1); sampleMat.put(0, 0, test); MyAnn(trainingDataMat, labelsMat, sampleMat); MyBoost(trainingDataMat, labelsMat2, sampleMat); MyDtrees(trainingDataMat, labelsMat2, sampleMat); MyKnn(trainingDataMat, labelsMat3, sampleMat); MyLogisticRegression(trainingDataMat, labelsMat3, sampleMat); MyNormalBayes(trainingDataMat, labelsMat2, sampleMat); MyRTrees(trainingDataMat, labelsMat2, sampleMat); MySvm(trainingDataMat, labelsMat2, sampleMat); MySvmsgd(trainingDataMat, labelsMat2, sampleMat); } // 人工神經網絡 public static Mat MyAnn(Mat trainingData, Mat labels, Mat testData) { // train data using aNN TrainData td = TrainData.create(trainingData, Ml.ROW_SAMPLE, labels); Mat layerSizes = new Mat(1, 4, CvType.CV_32FC1); // 含有兩個隱含層的網絡結構,輸入、輸出層各兩個節點,每個隱含層含兩個節點 layerSizes.put(0, 0, new float[] { 2, 2, 2, 2 }); ANN_MLP ann = ANN_MLP.create(); ann.setLayerSizes(layerSizes); ann.setTrainMethod(ANN_MLP.BACKPROP); ann.setBackpropWeightScale(0.1); ann.setBackpropMomentumScale(0.1); ann.setActivationFunction(ANN_MLP.SIGMOID_SYM, 1, 1); ann.setTermCriteria(new TermCriteria(TermCriteria.MAX_ITER + TermCriteria.EPS, 300, 0.0)); boolean success = ann.train(td.getSamples(), Ml.ROW_SAMPLE, td.getResponses()); System.out.println("Ann training result: " + success); // ann.save("D:/bp.xml");//存儲模型 // ann.load("D:/bp.xml");//讀取模型 // 測試數據 Mat responseMat = new Mat(); ann.predict(testData, responseMat, 0); System.out.println("Ann responseMat:\n" + responseMat.dump()); for (int i = 0; i < responseMat.size().height; i++) { if (responseMat.get(i, 0)[0] + responseMat.get(i, i)[0] >= 1) System.out.println("Girl\n"); if (responseMat.get(i, 0)[0] + responseMat.get(i, i)[0] < 1) System.out.println("Boy\n"); } return responseMat; } // Boost public static Mat MyBoost(Mat trainingData, Mat labels, Mat testData) { Boost boost = Boost.create(); // boost.setBoostType(Boost.DISCRETE); boost.setBoostType(Boost.GENTLE); boost.setWeakCount(2); boost.setWeightTrimRate(0.95); boost.setMaxDepth(2); boost.setUseSurrogates(false); boost.setPriors(new Mat()); TrainData td = TrainData.create(trainingData, Ml.ROW_SAMPLE, labels); boolean success = boost.train(td.getSamples(), Ml.ROW_SAMPLE, td.getResponses()); System.out.println("Boost training result: " + success); // boost.save("D:/bp.xml");//存儲模型 Mat responseMat = new Mat(); float response = boost.predict(testData, responseMat, 0); System.out.println("Boost responseMat:\n" + responseMat.dump()); for (int i = 0; i < responseMat.height(); i++) { if (responseMat.get(i, 0)[0] == 0) System.out.println("Boy\n"); if (responseMat.get(i, 0)[0] == 1) System.out.println("Girl\n"); } return responseMat; } // 決策樹 public static Mat MyDtrees(Mat trainingData, Mat labels, Mat testData) { DTrees dtree = DTrees.create(); // 創建分類器 dtree.setMaxDepth(8); // 設置最大深度 dtree.setMinSampleCount(2); dtree.setUseSurrogates(false); dtree.setCVFolds(0); // 交叉驗證 dtree.setUse1SERule(false); dtree.setTruncatePrunedTree(false); TrainData td = TrainData.create(trainingData, Ml.ROW_SAMPLE, labels); boolean success = dtree.train(td.getSamples(), Ml.ROW_SAMPLE, td.getResponses()); System.out.println("Dtrees training result: " + success); // dtree.save("D:/bp.xml");//存儲模型 Mat responseMat = new Mat(); float response = dtree.predict(testData, responseMat, 0); System.out.println("Dtrees responseMat:\n" + responseMat.dump()); for (int i = 0; i < responseMat.height(); i++) { if (responseMat.get(i, 0)[0] == 0) System.out.println("Boy\n"); if (responseMat.get(i, 0)[0] == 1) System.out.println("Girl\n"); } return responseMat; } // K最鄰近 public static Mat MyKnn(Mat trainingData, Mat labels, Mat testData) { final int K = 2; TrainData td = TrainData.create(trainingData, Ml.ROW_SAMPLE, labels); KNearest knn = KNearest.create(); boolean success = knn.train(trainingData, Ml.ROW_SAMPLE, labels); System.out.println("Knn training result: " + success); // knn.save("D:/bp.xml");//存儲模型 // find the nearest neighbours of test data Mat results = new Mat(); Mat neighborResponses = new Mat(); Mat dists = new Mat(); knn.findNearest(testData, K, results, neighborResponses, dists); System.out.println("results:\n" + results.dump()); System.out.println("Knn neighborResponses:\n" + neighborResponses.dump()); System.out.println("dists:\n" + dists.dump()); for (int i = 0; i < results.height(); i++) { if (results.get(i, 0)[0] == 0) System.out.println("Boy\n"); if (results.get(i, 0)[0] == 1) System.out.println("Girl\n"); } return results; } // 邏輯回歸 public static Mat MyLogisticRegression(Mat trainingData, Mat labels, Mat testData) { LogisticRegression lr = LogisticRegression.create(); TrainData td = TrainData.create(trainingData, Ml.ROW_SAMPLE, labels); boolean success = lr.train(td.getSamples(), Ml.ROW_SAMPLE, td.getResponses()); System.out.println("LogisticRegression training result: " + success); // lr.save("D:/bp.xml");//存儲模型 Mat responseMat = new Mat(); float response = lr.predict(testData, responseMat, 0); System.out.println("LogisticRegression responseMat:\n" + responseMat.dump()); for (int i = 0; i < responseMat.height(); i++) { if (responseMat.get(i, 0)[0] == 0) System.out.println("Boy\n"); if (responseMat.get(i, 0)[0] == 1) System.out.println("Girl\n"); } return responseMat; } // 貝葉斯 public static Mat MyNormalBayes(Mat trainingData, Mat labels, Mat testData) { NormalBayesClassifier nb = NormalBayesClassifier.create(); TrainData td = TrainData.create(trainingData, Ml.ROW_SAMPLE, labels); boolean success = nb.train(td.getSamples(), Ml.ROW_SAMPLE, td.getResponses()); System.out.println("NormalBayes training result: " + success); // nb.save("D:/bp.xml");//存儲模型 Mat responseMat = new Mat(); float response = nb.predict(testData, responseMat, 0); System.out.println("NormalBayes responseMat:\n" + responseMat.dump()); for (int i = 0; i < responseMat.height(); i++) { if (responseMat.get(i, 0)[0] == 0) System.out.println("Boy\n"); if (responseMat.get(i, 0)[0] == 1) System.out.println("Girl\n"); } return responseMat; } // 隨機森林 public static Mat MyRTrees(Mat trainingData, Mat labels, Mat testData) { RTrees rtrees = RTrees.create(); rtrees.setMaxDepth(4); rtrees.setMinSampleCount(2); rtrees.setRegressionAccuracy(0.f); rtrees.setUseSurrogates(false); rtrees.setMaxCategories(16); rtrees.setPriors(new Mat()); rtrees.setCalculateVarImportance(false); rtrees.setActiveVarCount(1); rtrees.setTermCriteria(new TermCriteria(TermCriteria.MAX_ITER, 5, 0)); TrainData tData = TrainData.create(trainingData, Ml.ROW_SAMPLE, labels); boolean success = rtrees.train(tData.getSamples(), Ml.ROW_SAMPLE, tData.getResponses()); System.out.println("Rtrees training result: " + success); // rtrees.save("D:/bp.xml");//存儲模型 Mat responseMat = new Mat(); rtrees.predict(testData, responseMat, 0); System.out.println("Rtrees responseMat:\n" + responseMat.dump()); for (int i = 0; i < responseMat.height(); i++) { if (responseMat.get(i, 0)[0] == 0) System.out.println("Boy\n"); if (responseMat.get(i, 0)[0] == 1) System.out.println("Girl\n"); } return responseMat; } // 支持向量機 public static Mat MySvm(Mat trainingData, Mat labels, Mat testData) { SVM svm = SVM.create(); svm.setKernel(SVM.LINEAR); svm.setType(SVM.C_SVC); TermCriteria criteria = new TermCriteria(TermCriteria.EPS + TermCriteria.MAX_ITER, 1000, 0); svm.setTermCriteria(criteria); svm.setGamma(0.5); svm.setNu(0.5); svm.setC(1); TrainData td = TrainData.create(trainingData, Ml.ROW_SAMPLE, labels); boolean success = svm.train(td.getSamples(), Ml.ROW_SAMPLE, td.getResponses()); System.out.println("Svm training result: " + success); // svm.save("D:/bp.xml");//存儲模型 // svm.load("D:/bp.xml");//讀取模型 Mat responseMat = new Mat(); svm.predict(testData, responseMat, 0); System.out.println("SVM responseMat:\n" + responseMat.dump()); for (int i = 0; i < responseMat.height(); i++) { if (responseMat.get(i, 0)[0] == 0) System.out.println("Boy\n"); if (responseMat.get(i, 0)[0] == 1) System.out.println("Girl\n"); } return responseMat; } // SGD支持向量機 public static Mat MySvmsgd(Mat trainingData, Mat labels, Mat testData) { SVMSGD Svmsgd = SVMSGD.create(); TermCriteria criteria = new TermCriteria(TermCriteria.EPS + TermCriteria.MAX_ITER, 1000, 0); Svmsgd.setTermCriteria(criteria); Svmsgd.setInitialStepSize(2); Svmsgd.setSvmsgdType(SVMSGD.SGD); Svmsgd.setMarginRegularization(0.5f); boolean success = Svmsgd.train(trainingData, Ml.ROW_SAMPLE, labels); System.out.println("SVMSGD training result: " + success); // svm.save("D:/bp.xml");//存儲模型 // svm.load("D:/bp.xml");//讀取模型 Mat responseMat = new Mat(); Svmsgd.predict(testData, responseMat, 0); System.out.println("SVMSGD responseMat:\n" + responseMat.dump()); for (int i = 0; i < responseMat.height(); i++) { if (responseMat.get(i, 0)[0] == 0) System.out.println("Boy\n"); if (responseMat.get(i, 0)[0] == 1) System.out.println("Girl\n"); } return responseMat; } }

備注:作者的代碼運行無誤,可直接測試。



創作挑戰賽新人創作獎勵來咯,堅持創作打卡瓜分現金大獎

總結

以上是生活随笔為你收集整理的OpenCV3 Java 机器学习使用方法汇总的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。