嘿伙计们我已经尝试了所有的搜索,所以很明显谷歌没有帮助我^^ 我确定,我已经安装了opencv库,因为这对早期的原型有效。我所做的就是我改变了主要方法,所以请密切注意这一点。程序的功能:它从网络摄像头流出,到达头部的中间并绘制一条跟随你的头的路径。
Error: Exception in thread "main" java.lang.UnsatisfiedLinkError: org.opencv.objdetect.CascadeClassifier.CascadeClassifier_1(Ljava/lang/String;)J
at org.opencv.objdetect.CascadeClassifier.CascadeClassifier_1(Native Method)
at org.opencv.objdetect.CascadeClassifier.<init>(CascadeClassifier.java:58)
at at.htlklu.tremoranalyzer.FaceDetector.<init>(FaceDetector.java:17)
at at.htlklu.tremoranalyzer.Main.initComponents(Main.java:110)
at at.htlklu.tremoranalyzer.Main.<init>(Main.java:40)
at at.htlklu.tremoranalyzer.Main.main(Main.java:198)
//主要方法
package at.htlklu.tremoranalyzer;
//In diesem Zwischenschritt vom TremorAnalyzer funktioniert das Zeichnen
//im Panel mit Linien, dem gelben Punkt und das Rechteck
//um das Gesicht.
//Screenshot im Matura Ordner Screenshots.
//Datum: 4.11.2014
//------------------------
//Update: 3 Buttons die funktionieren
//Datum 7.11.2014
import java.awt.*;
import javax.swing.*;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.highgui.VideoCapture;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JFrame;
import javax.swing.LayoutStyle.ComponentPlacement;
public class Main {
private FaceDetector faceDetector;
private Frame frame;
private static FacePanelX facePanel;
public static boolean state = true;
public Main(){
initComponents();
initCamera();
initProcessing();
}
private void initProcessing() {
VideoCapture webCam = new VideoCapture(0);
Mat webcam_image = new Mat();
if (webCam.isOpened()) {
try {
Thread.sleep(500);
} catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
while (state) {
webCam.read(webcam_image);
if (!webcam_image.empty()) {
try {
Thread.sleep(200);
} catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
frame.setSize(webcam_image.width() +40 ,
webcam_image.height() +80 );
webcam_image = faceDetector.detect(webcam_image);
// Display the image
facePanel.matToBufferedImage(webcam_image);
Point center = faceDetector.getCenter();
if (center != null) {
facePanel.setFaceCenter(center);
facePanel.repaint();
facePanel.invalidate();
}
} else {
System.out.println(" --(!) No captured frame from webcam !");
webCam.release();
break;
}
}
}
webCam.release();
}
private void initCamera() {
}
private void initComponents() {
frame = new JFrame("WebCam Capture - Face detection");
((JFrame) frame).setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
faceDetector = new FaceDetector();
frame.setSize(400, 400); // give the frame some size
frame.setBackground(Color.BLUE);
JButton btnExit = new JButton("Exit");
btnExit.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
System.exit(0);
}
});
JButton btnStopRecord = new JButton("Stop Recording");
btnStopRecord.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
state = false;
}
});
JButton btndrawLine = new JButton("Draw Path");
btndrawLine.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
facePanel.getArray().clear();
drawLine();
}
});
GroupLayout groupLayout = new GroupLayout(((JFrame) frame).getContentPane());
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addContainerGap()
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addComponent(facePanel, GroupLayout.DEFAULT_SIZE, 388, Short.MAX_VALUE)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(btnExit, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGap(37)
.addComponent(btnStopRecord, GroupLayout.DEFAULT_SIZE, 139, Short.MAX_VALUE)
.addGap(30)
.addComponent(btndrawLine, GroupLayout.DEFAULT_SIZE, 107, Short.MAX_VALUE)))
.addContainerGap())
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.TRAILING)
.addGroup(groupLayout.createSequentialGroup()
.addContainerGap()
.addComponent(facePanel, GroupLayout.DEFAULT_SIZE, 331, Short.MAX_VALUE)
.addPreferredGap(ComponentPlacement.RELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(btnExit, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btndrawLine, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btnStopRecord, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
((JFrame) frame).getContentPane().setLayout(groupLayout);
frame.setVisible(true);
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (Throwable e) {
e.printStackTrace();
}
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
facePanel = new FacePanelX();
}
public static void main(String arg[]) throws InterruptedException {
Main m = new Main();
}
private static void drawLine(){
facePanel.setDrawLineActive(!facePanel.isDrawLineActive());
}
}
//用于绘图的FacePanelX
package at.htlklu.tremoranalyzer;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.ArrayList;
import javax.imageio.ImageIO;
import javax.swing.JPanel;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.Point;
import org.opencv.highgui.Highgui;
public class FacePanelX extends JPanel {
private BufferedImage image;
private boolean drawLineActive = false;
private ArrayList<Point> array = new ArrayList<Point>();
private Point faceCenter = new Point();
//---------------------------------------------------------------------------------------------------------------
// getters, setters
public ArrayList<Point> getArray() {
return array;
}
public void setArray(ArrayList<Point> array) {
this.array = array;
}
public Point getFaceCenter() {
return faceCenter;
}
public void setFaceCenter(Point point) {
this.faceCenter = point;
array.add(point);
}
// \getters, setters
//---------------------------------------------------------------------------------------------------------------
public FacePanelX() {
super();
}
//---------------------------------------------------------------------------------------------------------------
public boolean matToBufferedImage(Mat matrix) {
MatOfByte mb = new MatOfByte();
Highgui.imencode(".jpg", matrix, mb);
try {
this.image = ImageIO.read(new ByteArrayInputStream(mb.toArray()));
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
// \ matToBufferedImage
//---------------------------------------------------------------------------------------------------------------
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
Graphics2D g2d = (Graphics2D) g.create();
if (this.image == null)
return;
g2d.drawImage(this.image, 10, 10, this.image.getWidth(),
this.image.getHeight(), null);
drawFaceRect(g2d);
drawMiddle(g2d);
if(drawLineActive)
drawLine(g2d);
g2d.dispose();
}
// \ paintComponent
//---------------------------------------------------------------------------------------------------------------
public void drawFaceRect(Graphics2D g) {
g.setColor(Color.blue);
g.drawRect(((int) faceCenter.x - 31), (int) faceCenter.y - 31, 62, 62);
}
public void drawMiddle(Graphics2D g) {
g.setColor(Color.ORANGE);
g.fillOval(1280 / 2, 720 / 2, 10, 10);
}
// \ drawRect, drawMiddle
//---------------------------------------------------------------------------------------------------------------
public void drawLine(Graphics2D g) {
// state = false;
// if (!((int) faceCenter.x > 350 && (int) faceCenter.y > 280)
// || ((int) faceCenter.x < 280 && (int) faceCenter.y < 190)) {
// state = true;
// }
Point bufP = new Point(-1, -1);
g.setColor(Color.blue);
if (array.size() > 1) {
for (Point p : array) {
System.out.println("p: " + p.x + " " + p.y);
g.fillOval((int) p.x, (int) p.y, 2, 2);
if (!(bufP.x == -1 && bufP.y == -1)) {
g.drawLine((int) bufP.x, (int) bufP.y, (int) p.x, (int) p.y);
}
bufP.x = p.x;
bufP.y = p.y;
}
}
}
// \ methode drawLine
//---------------------------------------------------------------------------------------------------------------
public boolean isDrawLineActive() {
return drawLineActive;
}
public void setDrawLineActive(boolean drawLineActive) {
this.drawLineActive = drawLineActive;
}
}
// CascadeClassifier的FaceDetector
package at.htlklu.tremoranalyzer;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
public class FaceDetector {
private CascadeClassifier faceCascade;
private Point center;
/**
*
*/
public FaceDetector() {
faceCascade = new CascadeClassifier("src/lbpcascade_frontalface.xml");
if (faceCascade.empty()) {
System.out.println("--(!)Error loading cascade classifier\n");
return;
} else {
System.out.println("Face classifier loaded up");
}
}
/**
* Does some basic preprocessing
*
* @param inputframe The frame to be processed
* @return The processed matrix
*/
public Mat detect(Mat inputframe) {
Mat mRgba = new Mat();
Mat mGrey = new Mat();
MatOfRect faces = new MatOfRect();
inputframe.copyTo(mRgba);
inputframe.copyTo(mGrey);
Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
Imgproc.equalizeHist(mGrey, mGrey);
faceCascade.detectMultiScale(mGrey, faces);
for (Rect rect : faces.toArray()) {
center = new Point(rect.x + rect.width * 0.5, rect.y + rect.height
* 0.5);
}
return mRgba;
}
// ---------------------------------------------------------------------------------------------------------------
public Point getCenter() {
return center;
}
}
// \class FaceDetector
// ---------------------------------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------------------------------