通过语音命令从相机捕获

时间:2014-12-30 15:44:25

标签: android android-camera speech-recognition cmusphinx pocketsphinx-android

我正在开发一种简单的语音控制相机应用程序。我正在使用CMUsphinx库来启用语音识别。

我的应用程序有两种捕获方法。一个是单击按钮而另一个是语音命令。单击按钮时,将捕获并保存图像。但是,当发出语音命令时,它会显示以下异常。

12-30 20:48:09.457  19826-19826/edu.cmu.pocketsphinx.demo E/AndroidRuntime﹕ FATAL EXCEPTION: main
Process: edu.cmu.pocketsphinx.demo, PID: 19826
java.lang.RuntimeException: takePicture failed
        at android.hardware.Camera.native_takePicture(Native Method)
        at android.hardware.Camera.takePicture(Camera.java:1436)
        at android.hardware.Camera.takePicture(Camera.java:1381)
        at saycheese.triangles.com.myapplication.CameraActivity.capture(CameraActivity.java:112)
        at saycheese.triangles.com.myapplication.CheeseSpeechRecognizor.capturePicture(CheeseSpeechRecognizor.java:162)
        at saycheese.triangles.com.myapplication.CheeseSpeechRecognizor.onPartialResult(CheeseSpeechRecognizor.java:87)
        at edu.cmu.pocketsphinx.SpeechRecognizer$ResultEvent.execute(Unknown Source)
        at edu.cmu.pocketsphinx.SpeechRecognizer$RecognitionEvent.run(Unknown Source)
        at android.os.Handler.handleCallback(Handler.java:739)
        at android.os.Handler.dispatchMessage(Handler.java:95)
        at android.os.Looper.loop(Looper.java:135)
        at android.app.ActivityThread.main(ActivityThread.java:5221)
        at java.lang.reflect.Method.invoke(Native Method)
        at java.lang.reflect.Method.invoke(Method.java:372)
        at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:899)
        at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:694)

我已正确授予权限。

    <uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.VIBRATE" />

<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.hardware.camera.flash" />
<uses-permission android:name="android.hardware.camera.any" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-feature android:name="android.hardware.camera.autofocus" />

这是我的活动代码:

package saycheese.triangles.com.myapplication;


import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.Toast;

import java.io.IOException;


public class CameraActivity extends Activity implements OnClickListener
{

    public static final int MEDIA_TYPE_IMAGE = 1;
    Context context = this.context;
    Camera camera;
    private CameraPreview mPreview;
    private Button shutterButtun;
    private FrameLayout preview;

    CheeseSpeechRecognizor speechRecognizor;

    public static final String TAG = "tag";

    @Override
    protected void onCreate(Bundle savedInstanceState)
    {
        super.onCreate(savedInstanceState);
        requestWindowFeature(getWindow().FEATURE_NO_TITLE);
        setContentView(R.layout.activity_camera);
        getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
        getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_IMMERSIVE);

        if (!checkCameraHardware(this))
        {
            Log.i(TAG, "no camera present");
        }
        camera = getCameraInstance();
        if (camera == null)
        {
            Log.i(TAG, "biscuit");
        }
        speechRecognizor = new CheeseSpeechRecognizor(this);
        // Create our Preview view and set it as the content of our
        // activity.
        mPreview = new CameraPreview(this, camera);
        preview = (FrameLayout) findViewById(R.id.camera_preview);
        preview.addView(mPreview);

        shutterButtun = (Button) findViewById(R.id.button_capture);
        speechRecognizor = new CheeseSpeechRecognizor(this);
        shutterButtun.setOnClickListener(this);
    }

    public static Camera getCameraInstance()
    {
        Log.i(TAG, "get camera instance");
        Camera c = null;
        try
        {
            c = Camera.open(); // attempt to get a Camera instance
            Log.i(TAG, "camera opened");

        } catch (Exception e)
        {
            Log.i(TAG, "camera unaavailable");
        }
        return c; // returns null if camera is unavailable

    }

    public static boolean isCameraAvailable()
    {
        Camera c = null;
        try
        {
            c = Camera.open(); // attempt to get a Camera instance
            Log.i(TAG, "camera opened");
            return true;
        } catch (Exception e)
        {
            Log.i(TAG, "camera unaavailable");
            Log.i(TAG, e.getMessage());
            e.printStackTrace();
            return false;
            // Camera is not available (in use or does not exist)
        }
    }


    @Override
    public void onClick(View v)
    {
        capture();
        //speechRecognizor.capturePicture();
    }

    public void capture()
    {
        Toast.makeText(this, "capture.", Toast.LENGTH_SHORT).show();
        camera.takePicture(null, null, mPicture);
    }

    protected void onStop()
    {
        Log.i(TAG, "onstop()");
        preview.removeView(mPreview);
        camera.release();
        Log.i(TAG, "camera released");
        speechRecognizor.stopListening();
        super.onStop();

    }

    protected void onRestart()
    {

        camera = getCameraInstance();
        Log.i(TAG, "preview started");
        mPreview = new CameraPreview(this, camera);
        preview = (FrameLayout) findViewById(R.id.camera_preview);
        preview.addView(mPreview);
        camera.startPreview();
        speechRecognizor.startListening();
        //recognizer.startListening()
        super.onRestart();

    }


    private PictureCallback mPicture = new PictureCallback()
    {
        @Override
        public void onPictureTaken(byte[] data, Camera camera)
        {
            Log.d(TAG, "on picture taken");
            PictureWritingThread pictureWriter = new PictureWritingThread(data.clone());
            Log.d(TAG, "main thread");

            if (isCameraAvailable())
            {
                camera = getCameraInstance();
            }
            camera.startPreview();
            Log.d(TAG, "preview started");
        }


    };

    @Override
    protected void onStart()
    {
        Log.i(TAG, "onStart()");
        if (camera == null)
        {
            camera = getCameraInstance();
        }
        if (camera != null)
        {
            Log.i(TAG, "preview started");
            camera.startPreview();
        }
        super.onStart();

    }

    protected void onDestroy()
    {
        Log.i(TAG, "onDestroy()");
        //camera.release();
        super.onDestroy();

    }

    /**
     * Check if this device has a camera
     */
    private boolean checkCameraHardware(Context context)
    {
        if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA))
        {
            // this device has a camera
            return true;
        } else
        {
            // no camera on this device
            return false;
        }
    }

    public void dummyCapture()
    {
        //Toast.makeText(this, "capture.", Toast.LENGTH_SHORT).show();
        //camera.release();
        try
        {
            camera.reconnect();
        } catch (IOException e)
        {
            e.printStackTrace();
        }
        //Log.i(TAG," camera "+);
        if (!isCameraAvailable())
        {
            Toast.makeText(this, "camera not available", Toast.LENGTH_SHORT).show();
        } else
        {
            Toast.makeText(this, "camera available", Toast.LENGTH_SHORT).show();
        }
    }

    @Override
    public boolean onCreateOptionsMenu(Menu menu)
    {
        // Inflate the menu; this adds items to the action bar if it is present.
        getMenuInflater().inflate(R.menu.menu_camera, menu);
        return true;
    }

    @Override
    public boolean onOptionsItemSelected(MenuItem item)
    {
        // Handle action bar item clicks here. The action bar will
        // automatically handle clicks on the Home/Up button, so long
        // as you specify a parent activity in AndroidManifest.xml.
        int id = item.getItemId();

        //noinspection SimplifiableIfStatement
        if (id == R.id.action_settings)
        {
            return true;
        }

        return super.onOptionsItemSelected(item);
    }

    @Override
    public void onWindowFocusChanged(boolean hasFocus)
    {
        super.onWindowFocusChanged(hasFocus);
        if (hasFocus)
        {
            getWindow().getDecorView().setSystemUiVisibility(
                    View.SYSTEM_UI_FLAG_LAYOUT_STABLE
                            | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
                            | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
                            | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
                            | View.SYSTEM_UI_FLAG_FULLSCREEN
                            | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
        }
    }


}

SpeechRecognizor.java

package saycheese.triangles.com.myapplication;
import android.os.AsyncTask;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import edu.cmu.pocketsphinx.Assets;
import edu.cmu.pocketsphinx.Hypothesis;
import edu.cmu.pocketsphinx.RecognitionListener;
import edu.cmu.pocketsphinx.SpeechRecognizer;
import static edu.cmu.pocketsphinx.SpeechRecognizerSetup.defaultSetup;
public class CheeseSpeechRecognizor implements RecognitionListener
{
        private static final String KWS_SEARCH = "wakeup";
    private static final String KEYPHRASE = "cheese";
    private static final String STOP_PHRASE = "stop";
    private SpeechRecognizer recognizer;
    CameraActivity activity;
    public static final String TAG = "tag";


    public CheeseSpeechRecognizor(CameraActivity activity)
    {
        this.activity = activity;
         prepareAsyncTask();
    }

    private void prepareAsyncTask()
    {
        new AsyncTask<Void, Void, Exception>()
        {

            @Override
            protected Exception doInBackground(Void... params)
            {
                Log.d(TAG, "preparing asynctask");
                try
                {

                    Assets assets = new Assets(CheeseSpeechRecognizor.this.activity);
                    File assetDir = assets.syncAssets();
                    setupRecognizer(assetDir);
                } catch (IOException e)
                {
                    return e;
                }
                return null;
            }

            @Override
            protected void onPostExecute(Exception result)
            {
                Log.d(TAG, "on post execute");
                if (result != null)
                {
                    //((TextView) findViewById(R.id.caption_text)).setText("Failed to init recognizer " + result);
                } else
                {
                    recognizer.stop();
                    recognizer.startListening(KWS_SEARCH);
                }
            }
        }.execute();
    }

    @Override
    public void onPartialResult(Hypothesis hypothesis)
    {
        //Log.d(TAG, "on partial result");
        if (hypothesis != null)
        {

            String text = hypothesis.getHypstr();
            Log.d(TAG, "on result" + text);
            //Toast.makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
            if (text.equals(KEYPHRASE))
            {
                Log.d(TAG, "cheese");
                capturePicture();
            }
            if (text.equals(STOP_PHRASE))
            {
                Log.d(TAG, "stop");
                recognizer.stop();
            }
        }
    }


    @Override
    public void onResult(Hypothesis hypothesis)
    {
        //Log.d(TAG, "on result" + );
        //((TextView) findViewById(R.id.result_text)).setText("");

        if (hypothesis != null)
        {

            String text = hypothesis.getHypstr();
            Log.d(TAG, "on result" + text);
            //Toast.makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
            if (text.equals(KEYPHRASE))
            {
                Log.d(TAG, "cheese");
                capturePicture();
            }
            if (text.equals(STOP_PHRASE))
            {
                Log.d(TAG, "stop");
                recognizer.stop();
            }

        }
    }

    @Override
    public void onEndOfSpeech()
    {
        Log.d(TAG, "on endof speech");
        /*
        if (DIGITS_SEARCH.equals(recognizer.getSearchName())
        {

        }*/

    }


    private void setupRecognizer(File assetsDir)
    {
        Log.d(TAG, "setting up the recognizor");
        File modelsDir = new File(assetsDir, "models");
        recognizer = defaultSetup()
                .setAcousticModel(new File(modelsDir, "hmm/en-us-semi"))
                .setDictionary(new File(modelsDir, "dict/cmu07a.dic"))
                .setRawLogDir(assetsDir).setKeywordThreshold(1e-20f)
                .getRecognizer();
        recognizer.addListener(this);

        // Create keyword-activation search.
        recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);

    }

    @Override
    public void onBeginningOfSpeech()
    {
        Log.d(TAG, "on beginning of the speech");
    }

    public void capturePicture()
    {
        //activity.dummyCapture();
        activity.capture();
    }

    public void stopListening()
    {
        recognizer.stop();
    }

    public void startListening()
    {
        recognizer.startListening(KWS_SEARCH);
    }
}

如果您想测试它,我会分享我的项目代码。

更新

请在下面找到我的项目:

Github https://github.com/mittu-spidey/SayCheese

Google云端硬盘https://drive.google.com/file/d/0B_L5g7q6wyKOQklTSWFWeGpUWVk/view?usp=sharing

1 个答案:

答案 0 :(得分:1)

您需要对onPartialResult中的关键字做出正确的反应,停止并重新启动搜索:

       if (text.equals(KEYPHRASE))
        {
            Log.d(TAG, "cheese");
            capturePicture();
            recognizer.cancel();
            recognizer.startListening(KWS_SEARCH);
        }

否则你会尝试两次捕捉图片。你不需要在onResult上做任何事情,实际上它永远不会被调用。

相关问题