将主菜单链接到我的Android应用程序中的屏幕选项按钮

时间:2013-12-09 11:35:20

标签: android opencv

Hello Guys我在Android应用程序中遇到问题。 我制作了一个Android应用程序,然后在LG optimus 2x上用ICS安卓版测试了它 但是当我在SONY xperia C上测试它时,它给了我一些问题

第一个也是最重要的问题是我在我的应用程序中创建了一个菜单,它通过在我的Lg optimus 2x上按下选项硬件键打开,但是在带有JB安卓操作系统的sony Xperia C中,硬件键已经准备就绪了到其他一些功能,并没有打开我的选项菜单

现在我要做的是我想制作一个屏幕上的选项按钮,我将用它在菜单中调用。

我已经制作了一个按钮,但我不知道如何将我的按钮链接到我的应用程序菜单中调用

下面的

是我的layout.xml文件中的代码

<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical" >
<LinearLayout
    android:layout_width="wrap_content"
    android:layout_height="wrap_content"
    android:orientation="vertical"
     >
<TextView 
    android:id="@+id/colortxt"
    android:layout_width="wrap_content"
    android:layout_height="wrap_content"
    android:text="Result will be displayed here.."
    android:textColor="#000000"
    android:textSize="12sp"
    />    
    </LinearLayout>

<Button
    android:id="@+id/Button"
    android:textColorLink="#3334"
    android:shadowColor="#4445"
    android:layout_width="wrap_content"
    android:layout_height="wrap_content"
    android:onClick="Buttonbtn1"
    android:text="Options" 
    />

 <org.opencv.android.JavaCameraView
    android:layout_width="fill_parent"
    android:layout_height="fill_parent"
    android:id="@+id/color_blob_detection_activity_surface_view" />

这是我的.java文件中的代码

    package imtech.aglab;

import java.util.List;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;



import com.InjQuant.R;

import android.os.Bundle;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.text.style.SuperscriptSpan;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnTouchListener;
import android.widget.TextView;
import android.widget.Toast;
import android.view.View.OnClickListener;

public class MainActivity<Button> extends Activity implements OnTouchListener, CvCameraViewListener2 {
    private static final String  TAG = "OCVSample::Activity";

    private boolean              mIsColorSelected = false;
    private Mat                  mRgba;
    private Scalar               mBlobColorRgba;
    private Scalar               mBlobColorHsv;
    private BlobDetector          mDetector;
    private Mat                  mSpectrum;
    private Size                 SPECTRUM_SIZE;
    private Scalar               CONTOUR_COLOR;
    TextView tf;

    private CameraBridgeViewBase mOpenCvCameraView;

    private BaseLoaderCallback  mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
                case LoaderCallbackInterface.SUCCESS:
                {
                    Log.i(TAG, "OpenCV loaded successfully");
                    mOpenCvCameraView.enableView();
                    mOpenCvCameraView.setOnTouchListener(MainActivity.this);
                } break;
                default:
                {
                    super.onManagerConnected(status);
                } break;
            }
        }
    };
    private Object[] data;

    public MainActivity() {
        Log.i(TAG, "Instantiated new " + this.getClass());
    }

    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        Log.i(TAG, "called onCreate");
        super.onCreate(savedInstanceState);
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.activity_main);

        mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.color_blob_detection_activity_surface_view);
        mOpenCvCameraView.setCvCameraViewListener(this);
    }

    @Override
    public void onPause()
    {
        super.onPause();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public void onResume()
    {
        super.onResume();
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
    }

    public void onDestroy() {
        super.onDestroy();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    public void onCameraViewStarted(int width, int height) {
        mRgba = new Mat(height, width, CvType.CV_8UC4);
        mDetector = new BlobDetector();
        mSpectrum = new Mat();
        mBlobColorRgba = new Scalar(255);
        mBlobColorHsv = new Scalar(255);
        SPECTRUM_SIZE = new Size(200, 64);
        CONTOUR_COLOR = new Scalar(255,0,0,255);
    }

    public void onCameraViewStopped() {
        mRgba.release();
    }

    public boolean onTouch(View v, MotionEvent event) {

        tf=(TextView)findViewById(R.id.colortxt);
        int cols = mRgba.cols();
        int rows = mRgba.rows();

        int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
        int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;

        int x = (int)event.getX() - xOffset;
        int y = (int)event.getY() - yOffset;

        Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
        tf.setText("Touch image coordinates: (" + x + ", " + y + ")");

        if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;

        Rect touchedRect = new Rect();

        touchedRect.x = (x>4) ? x-4 : 0;
        touchedRect.y = (y>4) ? y-4 : 0;

        touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
        touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;

        Mat touchedRegionRgba = mRgba.submat(touchedRect);

        Mat touchedRegionHsv = new Mat();
        Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);

        // Calculate average color of touched region
        mBlobColorHsv = Core.sumElems(touchedRegionHsv);
        int pointCount = touchedRect.width*touchedRect.height;
        for (int i = 0; i < mBlobColorHsv.val.length; i++)
            mBlobColorHsv.val[i] /= pointCount;

        mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);

        Log.i("Color Check" + TAG , "Touched hsv color: (" + mBlobColorHsv.val[0] + ", " + mBlobColorHsv.val[1] +
                ", " + mBlobColorHsv.val[2] + ")");
     tf.setText(tf.getText()+ "\n" + "Touched hsv color: (" + mBlobColorHsv.val[0] + ", " + mBlobColorHsv.val[1] +
                ", " + mBlobColorHsv.val[2] + ")");

        Log.i("Color Check" + TAG , "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
                ", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
      tf.setText(tf.getText() + "\n" +"Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
              ", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
        mDetector.setHsvColor(mBlobColorHsv);

        Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);

        mIsColorSelected = true;

        touchedRegionRgba.release();
        touchedRegionHsv.release();

        return false; // don't need subsequent touch events
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        mRgba = inputFrame.rgba();

        if (mIsColorSelected) {
            mDetector.process(mRgba);
            List<MatOfPoint> contours = mDetector.getContours();
            Log.e(TAG, "Contours count: " + contours.size());
          ;
            Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);

            Mat colorLabel = mRgba.submat(4, 68, 4, 68);
            colorLabel.setTo(mBlobColorRgba);

            Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
            mSpectrum.copyTo(spectrumLabel);
        }

        return mRgba;
    }

    private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
        Mat pointMatRgba = new Mat();
        Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
        Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);

        return new Scalar(pointMatRgba.get(0, 0));
    }

    /* (non-Javadoc)
     * @see android.app.Activity#onCreateOptionsMenu(android.view.Menu)
     */
    @Override
    public boolean onCreateOptionsMenu(Menu menu)
    {
        getMenuInflater().inflate(R.menu.main, menu);
        return true;
    }

    public boolean onOptionsItemSelected(MenuItem item) {
        //respond to menu item selection

        int itemId = item.getItemId();

        if (itemId == R.id.aboutapp) {
            // Toast.makeText(getBaseContext(), "Color Blob Detection Application.", Toast.LENGTH_SHORT).show();
            startActivity(new Intent(this, Aboutapp.class));
            return true;

        } else if (itemId == R.id.developers) {
            //          Toast.makeText(getBaseContext(), "IMTECH Lab.", Toast.LENGTH_SHORT).show();
            startActivity(new Intent(this, developers.class));
            return true;

        } else if (itemId == R.id.credits) {
            //          Toast.makeText(getBaseContext(), "IMTECH Lab.", Toast.LENGTH_SHORT).show();
            startActivity(new Intent(this, cridits.class));
            return true;

        } else if (itemId == R.id.save) {
            Toast.makeText(getBaseContext(), "Save Results in the Database.", Toast.LENGTH_SHORT).show();
            //          startActivity(new Intent(this, Save.class));
            return true;

        } else if (itemId == R.id.help) {
            // Toast.makeText(getBaseContext(), "How to use this application?", Toast.LENGTH_SHORT).show();
            startActivity(new Intent(this, help.class));
            return true;

        } else if (itemId == R.id.action_settings) {
            Toast.makeText(getBaseContext(), "Settings", Toast.LENGTH_SHORT).show();
            //          startActivity(new Intent(this, Setting.class));
            return true;
        } else {
            return super.onOptionsItemSelected(item);
        }

}
     public class MyActivity extends Activity {
         private View Buttonbtn1;


        protected void onCreate (Bundle icicle) {
             super.onCreate(icicle);
             setContentView(R.layout.activity_main);
             final Button button = (Button) findViewById (R.id.Button);
             Buttonbtn1.setOnClickListener(new View.OnClickListener() {
                 public void onClick(View view) {
                     // put the method hear about what the button should do
             }
         });
     }


    public void onBackPressed() 
    {
        AlertDialog.Builder b = new AlertDialog.Builder(MainActivity.this);
        b.setTitle("Exit");
        b.setMessage("Do you want to exit?");
        b.setPositiveButton("Yes", new DialogInterface.OnClickListener() 
        {

            @Override
            public void onClick(DialogInterface dialog, int which) 
            {
                MainActivity.this.finish();
            }
        });

        b.setNeutralButton("No",null);
        b.show();

        //super.onBackPressed();

    }
     }
}

并且第二个问题是我的应用程序调用了相机功能,但是当它在SONY xperia C中打开相机时它没有使用相机的全屏但是只使用了它的一小部分,因为这个应用程序使用了全部屏幕相机,当我在旧的Lg optimus 2x上运行它

  

伙计们,请帮助我知道在哪里和什么代码,以便我   达到预期的效果。

0 个答案:

没有答案