OpenCV4Android:如何找到头部的动作

时间:2014-09-09 06:38:27

标签: opencv image-processing motion-detection opencv4android

我正在使用Viola-Jones方法来检测脸部,每当脸部倾斜时,算法都有可能无法正常工作。

我想检测那些动作,当它无法检测到脸部时。

我可以使用动作检测来实现此功能,还是有其他方法可以找到动作。

提前致谢,

1 个答案:

答案 0 :(得分:1)

是的,您可以通过捕获并比较它们来实现。

This将帮助您检测它们,然后您可以比较x和y位置。

class DetectNose extends JPanel implements KeyListener, ActionListener {
private static final long serialVersionUID = 1L;
private static JFrame frame;
private BufferedImage image;
private CascadeClassifier face_cascade;
private Point center;
private JLabel label;
private Image scalledItemImage;
private double customY = 0;
private double customX = 0;
private Iterator<InputStream> iterator;
private ArrayList<BufferedImage> listOfCachedImages;
private int imageIndex = 1;
private int customZ = 0;
private Size size;
private Image scalledItemImageBackup;
private Point center1;
private int imgSize = 35;
private boolean isLocked;


public DetectNose(JFrame frame, List<Long> listOfOrnaments) {
    super();
    this.frame = frame;
    this.frame.setFocusable(true);
    this.frame.requestFocusInWindow();
    this.frame.addKeyListener(this);

    File f = null;
    try {
        System.out.println(System.getProperty("os.name"));
        if (System.getProperty("os.name").contains("Windows")) {
            f = new File("res/opencv_lib_win/opencv_java249.dll");
            System.load(f.getAbsolutePath());
            System.out.println("Loaded :" + f.getAbsolutePath());
        } else {
            f = new File("res/opencv_lib/libopencv_java246.so");
            System.load(f.getAbsolutePath());
            System.out.println("Loaded :" + f.getAbsolutePath());
        }
    } catch (Exception ex) {
        ex.printStackTrace();
    }

    List<InputStream> ornaments = DatabaseHandler
            .getOrnamentsImagesByListOfOrnaments(listOfOrnaments);

    iterator = ornaments.iterator();

    listOfCachedImages = new ArrayList<BufferedImage>();

    try {
        while (iterator.hasNext()) {
            InputStream inputStream = iterator.next();
            listOfCachedImages.add(ImageIO.read(inputStream));
        }

        setFirstOrnament();

    } catch (IOException e) {
        e.printStackTrace();
    }
    label = new JLabel(new ImageIcon(scalledItemImage));
    add(label);
    face_cascade = new CascadeClassifier(
            "res/cascades/haarcascade_frontalface_alt_tree.xml");
    if (face_cascade.empty()) {
        System.out.println("--(!)Error loading A\n");
        return;
    } else {
        System.out.println("Face classifier loaded up");
    }
}

private void setFirstOrnament() {
    scalledItemImage = listOfCachedImages.get(imageIndex - 1);
    scalledItemImageBackup = scalledItemImage.getScaledInstance(700, 700,
            BufferedImage.TYPE_INT_RGB);
    scalledItemImage = scalledItemImage.getScaledInstance(imgSize, imgSize,
            BufferedImage.TYPE_INT_RGB);
    repaint();
    System.out.println("imageIndex = " + imageIndex);
}

private void setPrevOrnament() {
    if (imageIndex > 1) {
        imageIndex--;
        scalledItemImage = listOfCachedImages.get(imageIndex - 1);
        scalledItemImageBackup = scalledItemImage.getScaledInstance(700,
                700, BufferedImage.TYPE_INT_RGB);
        scalledItemImage = scalledItemImage.getScaledInstance(imgSize,
                imgSize, BufferedImage.TYPE_INT_RGB);
        GoLiveIntermediator.nextButton.setEnabled(true);
        repaint();
        revalidate();
        System.out.println("imageIndex = " + imageIndex);
    } else {
        GoLiveIntermediator.prevButton.setEnabled(false);
    }
}

private void setNextOrnament() {
    if (listOfCachedImages.size() > imageIndex) {
        imageIndex++;
        scalledItemImage = listOfCachedImages.get(imageIndex - 1);
        scalledItemImageBackup = scalledItemImage.getScaledInstance(700,
                700, BufferedImage.TYPE_INT_RGB);
        scalledItemImage = scalledItemImage.getScaledInstance(imgSize,
                imgSize, BufferedImage.TYPE_INT_RGB);
        GoLiveIntermediator.prevButton.setEnabled(true);
        repaint();
        revalidate();
        System.out.println("imageIndex = " + imageIndex);
    } else {
        GoLiveIntermediator.nextButton.setEnabled(false);
    }
}

private BufferedImage getimage() {
    return image;
}

public void setimage(BufferedImage newimage) {
    image = newimage;
    return;
}

public BufferedImage matToBufferedImage(Mat matrix) {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        // bgr to rgb
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        return null;
    }
    BufferedImage image2 = new BufferedImage(cols, rows, type);
    image2.getRaster().setDataElements(0, 0, cols, rows, data);
    return image2;
}

public void paintComponent(Graphics g) {
    try {
        this.frame.requestFocusInWindow();
        BufferedImage temp = getimage();
        g.drawImage(temp, 0, 0, temp.getWidth(), temp.getHeight() + 50,
                this);
    } catch (Exception ex) {
        System.out.print("Trying to load images...");
    }
}

public Mat detect(Mat inputframe) {
    Mat mRgba = new Mat();
    Mat mGrey = new Mat();
    MatOfRect faces = new MatOfRect();
    inputframe.copyTo(mRgba);
    inputframe.copyTo(mGrey);
    Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
    Imgproc.equalizeHist(mGrey, mGrey);
    try {
        face_cascade.detectMultiScale(mGrey, faces);
    } catch (Exception e) {
        System.out.print(".");
    }
    frame.setLocationRelativeTo(null);
    frame.setResizable(false);

    for (Rect rect : faces.toArray()) {
        center = new Point(rect.x + rect.width * 0.5, rect.y + rect.height
                * 0.5); // You can use this to point out as first detection and last detection
        size = new Size(rect.width * 0.5, rect.height * 0.5);
        Core.ellipse(mRgba, center, size, 0, 0, 360, new Scalar(255, 0,
                255), 1, 8, 0);

        repaint();
    }
    return mRgba;
}

此处中心是第一个检测点,您可以从图像中找到最后一个检测点。