将相机 Intent 传递给 Android 输入流

Passing a Camera Intent to Android Input Stream

我正在研究 Android Activity,我想将 Camera Intent 传递给输入流,以便通过 Activity 进一步处理它; 目的是用户可以制作相机图片,然后将图片作为输入流处理并传递给 API。

我不确定这是否是将相机图像 "convert" 放入输入流的最佳方式,因此我愿意接受任何建议和提示;到目前为止,这是我的代码:

import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.os.Bundle;
import android.provider.MediaStore;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.Toast;

import com.google.api.client.extensions.android.json.AndroidJsonFactory;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.services.vision.v1.Vision;
import com.google.api.services.vision.v1.VisionRequestInitializer;
import com.google.api.services.vision.v1.model.AnnotateImageRequest;
import com.google.api.services.vision.v1.model.BatchAnnotateImagesRequest;
import com.google.api.services.vision.v1.model.BatchAnnotateImagesResponse;
import com.google.api.services.vision.v1.model.FaceAnnotation;
import com.google.api.services.vision.v1.model.Feature;
import com.google.api.services.vision.v1.model.Image;

import org.apache.commons.io.IOUtils;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;

import static java.nio.channels.Pipe.open;

public class VisionAPIActivity extends AppCompatActivity {

    ImageView imgFavorite;
    public final static int CAMERA_REQUEST = 1888;

    public void TakePicture() {
        Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
        startActivityForResult(takePictureIntent, CAMERA_REQUEST);
    }

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        TakePicture();
        setContentView(R.layout.activity_vision_api);
        Toolbar toolbar = findViewById(R.id.toolbar);
        setSupportActionBar(toolbar);

        FloatingActionButton fab = findViewById(R.id.fab);
        fab.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
                        .setAction("Action", null).show();
            }
        });





        Vision.Builder visionBuilder = new Vision.Builder(
                new NetHttpTransport(),
                new AndroidJsonFactory(),
                null);

        visionBuilder.setVisionRequestInitializer(
                new VisionRequestInitializer("AIzaSyCnPwvnEQakkUXpkFaj2TcwJs_E3DPqjm0"));
        final Vision vision = visionBuilder.build();

        Log.i("log-", "passed VisionBuilder Initialisation");

        // Create new thread
        AsyncTask.execute(new Runnable() {
            @Override
            public void run() {
                // Convert photo to byte array
                final InputStream inputStream =
                        getResources().openRawResource(R.raw.skate);
                byte[] photoData = new byte[0];
                Log.i("log-", "Content of Photo Data" + photoData);

                try {
                    photoData = IOUtils.toByteArray(inputStream);
                } catch (IOException e) {
                    e.printStackTrace();
                }
                try {
                    inputStream.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }

                Image inputImage = new Image();
                inputImage.encodeContent(photoData);
                Feature desiredFeature = new Feature();
                desiredFeature.setType("FACE_DETECTION");
                AnnotateImageRequest request = new AnnotateImageRequest();
                request.setImage(inputImage);
                Log.i("log-", "Content of inputImage" + inputImage);
                request.setFeatures(Arrays.asList(desiredFeature));
                BatchAnnotateImagesRequest batchRequest =
                        new BatchAnnotateImagesRequest();

                batchRequest.setRequests(Arrays.asList(request));
                BatchAnnotateImagesResponse batchResponse =
                        null;
                try {
                    batchResponse = vision.images().annotate(batchRequest).execute();
                    List<FaceAnnotation> faces = batchResponse.getResponses()
                            .get(0).getFaceAnnotations();

                    // Count faces
                    int numberOfFaces = faces.size();
                    Log.i("log-", "number Of Faces" + numberOfFaces);

                    runOnUiThread(new Runnable() {
                        @Override
                        public void run() {
                            ImageView mImageView;
                            mImageView = findViewById(R.id.imageViewId);
                            InputStream is = getResources().openRawResource(R.raw.skate);
                            mImageView.setImageBitmap(BitmapFactory.decodeStream(is));
                        }
                    });


                    // Get joy likelihood for each face
                    String likelihoods = "";
                    for (int i = 0; i < numberOfFaces; i++) {
                        likelihoods += "\n It is " +
                                faces.get(i).getJoyLikelihood() +
                                " that face " + i + " is happy";
                    }

                    // Concatenate everything
                    final String message =
                            "This photo has " + numberOfFaces + " faces" + likelihoods;

                    // Display toast on UI thread
                    runOnUiThread(new Runnable() {
                        @Override
                        public void run() {
                            Toast.makeText(getApplicationContext(),
                                    message, Toast.LENGTH_LONG).show();
                        }
                    });
               }
                catch (IOException e) {
                    e.printStackTrace();
                }

            }
        });
    }
    public void imageClick(View view){
        imgFavorite = findViewById(R.id.imageView1);
        open();
    }
    public void open(){
        Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); //IMAGE CAPTURE CODE
        startActivityForResult(intent, 0);
    }
    protected void onActivityResult(int requestCode,int resultCode,Intent data){
        super.onActivityResult(requestCode,resultCode,data);
        Bitmap bitmap=(Bitmap)data.getExtras().get("data");
        imgFavorite.setImageBitmap(bitmap);
    }

您可以触发相机 Intent 并告诉它保存图片的位置,之后您可以将文件作为 InputStream 打开。为此,您需要像这样传递文件 Uri:

Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);

如果您不传递文件 URI,您将收到缩略图,而不是全尺寸照片。有关详细信息,请查看文档:

https://developer.android.com/training/camera/photobasics#TaskPath