我正在开发一个Android Activity,我想将Camera Intent传递给输入流,以便通过Activity进一步处理它; 目的是使用户可以制作Camera图片,然后将图片作为Input Stream处理并传递给API。
我不确定这是否是将摄像机图像“转换”为输入流的最佳方法,因此我愿意接受任何建议和暗示;到目前为止,这是我的代码:
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.os.Bundle;
import android.provider.MediaStore;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.Toast;
import com.google.api.client.extensions.android.json.AndroidJsonFactory;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.services.vision.v1.Vision;
import com.google.api.services.vision.v1.VisionRequestInitializer;
import com.google.api.services.vision.v1.model.AnnotateImageRequest;
import com.google.api.services.vision.v1.model.BatchAnnotateImagesRequest;
import com.google.api.services.vision.v1.model.BatchAnnotateImagesResponse;
import com.google.api.services.vision.v1.model.FaceAnnotation;
import com.google.api.services.vision.v1.model.Feature;
import com.google.api.services.vision.v1.model.Image;
import org.apache.commons.io.IOUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import static java.nio.channels.Pipe.open;
public class VisionAPIActivity extends AppCompatActivity {
ImageView imgFavorite;
public final static int CAMERA_REQUEST = 1888;
public void TakePicture() {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(takePictureIntent, CAMERA_REQUEST);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
TakePicture();
setContentView(R.layout.activity_vision_api);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
FloatingActionButton fab = findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show();
}
});
Vision.Builder visionBuilder = new Vision.Builder(
new NetHttpTransport(),
new AndroidJsonFactory(),
null);
visionBuilder.setVisionRequestInitializer(
new VisionRequestInitializer("AIzaSyCnPwvnEQakkUXpkFaj2TcwJs_E3DPqjm0"));
final Vision vision = visionBuilder.build();
Log.i("log-", "passed VisionBuilder Initialisation");
// Create new thread
AsyncTask.execute(new Runnable() {
@Override
public void run() {
// Convert photo to byte array
final InputStream inputStream =
getResources().openRawResource(R.raw.skate);
byte[] photoData = new byte[0];
Log.i("log-", "Content of Photo Data" + photoData);
try {
photoData = IOUtils.toByteArray(inputStream);
} catch (IOException e) {
e.printStackTrace();
}
try {
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
Image inputImage = new Image();
inputImage.encodeContent(photoData);
Feature desiredFeature = new Feature();
desiredFeature.setType("FACE_DETECTION");
AnnotateImageRequest request = new AnnotateImageRequest();
request.setImage(inputImage);
Log.i("log-", "Content of inputImage" + inputImage);
request.setFeatures(Arrays.asList(desiredFeature));
BatchAnnotateImagesRequest batchRequest =
new BatchAnnotateImagesRequest();
batchRequest.setRequests(Arrays.asList(request));
BatchAnnotateImagesResponse batchResponse =
null;
try {
batchResponse = vision.images().annotate(batchRequest).execute();
List<FaceAnnotation> faces = batchResponse.getResponses()
.get(0).getFaceAnnotations();
// Count faces
int numberOfFaces = faces.size();
Log.i("log-", "number Of Faces" + numberOfFaces);
runOnUiThread(new Runnable() {
@Override
public void run() {
ImageView mImageView;
mImageView = findViewById(R.id.imageViewId);
InputStream is = getResources().openRawResource(R.raw.skate);
mImageView.setImageBitmap(BitmapFactory.decodeStream(is));
}
});
// Get joy likelihood for each face
String likelihoods = "";
for (int i = 0; i < numberOfFaces; i++) {
likelihoods += "\n It is " +
faces.get(i).getJoyLikelihood() +
" that face " + i + " is happy";
}
// Concatenate everything
final String message =
"This photo has " + numberOfFaces + " faces" + likelihoods;
// Display toast on UI thread
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(getApplicationContext(),
message, Toast.LENGTH_LONG).show();
}
});
}
catch (IOException e) {
e.printStackTrace();
}
}
});
}
public void imageClick(View view){
imgFavorite = findViewById(R.id.imageView1);
open();
}
public void open(){
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); //IMAGE CAPTURE CODE
startActivityForResult(intent, 0);
}
protected void onActivityResult(int requestCode,int resultCode,Intent data){
super.onActivityResult(requestCode,resultCode,data);
Bitmap bitmap=(Bitmap)data.getExtras().get("data");
imgFavorite.setImageBitmap(bitmap);
}
答案 0 :(得分:1)
您可以触发Camera Intent,并告诉它将图片保存在何处,之后您可以将文件作为InputStream打开。为此,您需要像这样传递文件Uri:
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
如果您不通过文件Uri,则会收到缩略图,而不是完整尺寸的照片。有关更多详细信息,请查看文档:
https://developer.android.com/training/camera/photobasics#TaskPath