Android 使用 Microsoft Cognitive WAV 文件进行语音转文本

Android Speech to Text using Microsoft Cognitive WAV File

我正在使用 Microsoft Cognitive Speech API 在 Android 中实施文本转语音。当我们使用麦克风输入时它可以工作,但它不适用于 WAV 文件。它坠毁了。我要附上 Android 代码。

package com.devstop.speechtotext;

import android.content.Intent;

import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.database.Cursor;
import android.net.Uri;
import android.provider.MediaStore;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Toast;

import com.microsoft.cognitiveservices.speech.CancellationReason;
import com.microsoft.cognitiveservices.speech.ResultReason;
import com.microsoft.cognitiveservices.speech.SpeechConfig;
import com.microsoft.cognitiveservices.speech.SpeechRecognitionResult;
import com.microsoft.cognitiveservices.speech.SpeechRecognizer;
import com.microsoft.cognitiveservices.speech.audio.AudioConfig;
import com.microsoft.cognitiveservices.speech.audio.AudioInputStream;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Scanner;
import java.util.concurrent.ExecutionException;

public class MainActivity extends AppCompatActivity {
    private static final int VERIFY_PERMISSION_REQUEST = 200;
    public String path;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        if(checkPermissionArray(Permissions.READ_EXTERNAL_STORAGE)){



        }else{
            verifyPermission(Permissions.READ_EXTERNAL_STORAGE);


        }

    }

    public void Gallery(View view) {
        Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
        intent.setType("audio/*"); // specify "audio/mp3" to filter only mp3 files
        startActivityForResult(intent, 1);




    }
    @Override
    protected void onActivityResult(int requestCode,int resultCode,Intent data){

        if(requestCode == 1){

            if(resultCode == RESULT_OK){

                //the selected audio.
                Uri uri = data.getData();

                String path = getPath(uri);
                try {
                    speech(path);
                } catch (ExecutionException e) {
                    e.printStackTrace();
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }

            }
        }

    }

    public  void speech(String path) throws ExecutionException, InterruptedException {
        // Creates an instance of a speech config with specified
// subscription key and service region. Replace with your own subscription key
// and service region (e.g., "westus").
        SpeechConfig config = SpeechConfig.fromSubscription("Ijusthideit", "westus");




        AudioConfig audioInput = AudioConfig.fromWavFileInput(path);


        SpeechRecognizer recognizer = new SpeechRecognizer(config, audioInput);
        {
            // Subscribes to events.
            recognizer.recognizing.addEventListener((s, e) -> {
                System.out.println("RECOGNIZING: Text=" + e.getResult().getText());
            });

            recognizer.recognized.addEventListener((s, e) -> {
                if (e.getResult().getReason() == ResultReason.RecognizedSpeech) {
                    System.out.println("RECOGNIZED: Text=" + e.getResult().getText());
                }
                else if (e.getResult().getReason() == ResultReason.NoMatch) {
                    System.out.println("NOMATCH: Speech could not be recognized.");
                }
            });

            recognizer.canceled.addEventListener((s, e) -> {
                System.out.println("CANCELED: Reason=" + e.getReason());

                if (e.getReason() == CancellationReason.Error) {
                    System.out.println("CANCELED: ErrorDetails=" + e.getErrorDetails());
                    System.out.println("CANCELED: Did you update the subscription info?");
                }
            });

            recognizer.sessionStarted.addEventListener((s, e) -> {
                System.out.println("\n    Session started event.");
            });

            recognizer.sessionStopped.addEventListener((s, e) -> {
                System.out.println("\n    Session stopped event.");
            });

            // Starts continuous recognition. Uses StopContinuousRecognitionAsync() to stop recognition.
            System.out.println("Say something...");
            recognizer.startContinuousRecognitionAsync().get();

            System.out.println("Press any key to stop");
            new Scanner(System.in).nextLine();

            recognizer.stopContinuousRecognitionAsync().get();
        }
    }

    private void verifyPermission(String[] permissions) {
        ActivityCompat.requestPermissions(this,permissions,VERIFY_PERMISSION_REQUEST);
    }

    private boolean checkPermissionArray(String[] permissions) {
        for (String check : permissions) {
            if (!checkPermissions(check)) {
                return false;
            }
        }
        return true;
    }

    private boolean checkPermissions(String permission) {
        int permissionRequest = ActivityCompat.checkSelfPermission(getApplicationContext(),permission);
        if(permissionRequest != PackageManager.PERMISSION_GRANTED){

            return false;
        }else{
            return true;
        }

    }

    private String getPath( Uri uri ) {
        String result = null;
        String[] proj = {MediaStore.Images.Media.DATA};
        Cursor cursor = getApplicationContext().getContentResolver().query(uri, proj, null, null, null);
        if (cursor != null) {
            if (cursor.moveToFirst()) {
                int column_index = cursor.getColumnIndexOrThrow(proj[0]);
                result = cursor.getString(column_index);
            }
            cursor.close();
        }
        if (result == null) {
            result = "nill";
        }
        return result;
    }



}

当我select wav文件。它崩溃了,logcat 给出了这个。

018-10-16 13:17:47.965 495-495/com.devstop.speechtotext E/AndroidRuntime: FATAL EXCEPTION: main Process: com.devstop.speechtotext, PID: 495 java.lang.RuntimeException: Failure delivering result ResultInfo{who=null, request=1, result=-1, data=Intent { dat=content://com.android.providers.media.documents/document/audio:44395 flg=0x1 }} to activity {com.devstop.speechtotext/com.devstop.speechtotext.MainActivity}: java.lang.RuntimeException: Exception with an error code: 0x8 (SPXERR_FILE_OPEN_FAILED) [CALL STACK]# 0: 0x7e407f42f8_ZN9Microsoft17CognitiveServices6Speech4Impl17CSpxWavFileReader4OpenEPKw # 1: 0x7e407f6924 _ZN9Microsoft17CognitiveServices6Speech4Impl15CSpxWavFilePump10EnsureFileEPKw # 2: 0x7e407f702c _ZThn64_N9Microsoft17CognitiveServices6Speech4Impl15CSpxWavFilePump4OpenEPKw # 3: 0x7e407db8c4 _ZN9Microsoft17CognitiveServices6Speech4Impl22CSpxAudioStreamSession12InitFromFileEPKw # 4: 0x7e407b2b7c _ZN9Microsoft17CognitiveServices6Speech4Impl20CSpxSpeechApiFactory31InitSessionFromAudioInputConfigENSt6__ndk110shared_ptrINS2_11ISpxSessionEEENS5_INS2_15ISpxAudioConfigEEE # 5: 0x7e407b1450 _ZN9Microsoft17CognitiveServices6Speech4Impl20CSpxSpeechApiFactory34CreateRecognizerFromConfigInternalEPKcS5_S5_NS1_12OutputFormatENSt6__ndk110shared_ptrINS2_15ISpxAudioConfigEEE # 6: 0x7e407b1b80 _ZThn48_N9Microsoft17CognitiveServices6Speech4Impl20CSpxSpeechApiFactory32CreateSpeechRecognizerFromConfigEPKcNS1_12OutputFormatENSt6__ndk110shared_ptrINS2_15ISpxAudioConfigEEE # 7: 0x7e40792fd0 recognizer_create_speech_recognizer_from_config # 8: 0x7e40beae4c _ZN9Microsoft17CognitiveServices6Speech16SpeechRecognizer10FromConfigENSt6__ndk110shared_ptrINS1_12SpeechConfigEEENS4_INS1_5Audio11AudioConfigEEE # 9: 0x7e40bcf82c Java_com_microsoft_cognitiveservices_speech_internal_carbon_1javaJNI_SpeechRecognizer_1FromConfig_1_1SWIG_10 #10: 0x7e622fc704 ??? #11: 0x7e622f363c ??? #12: 0x7e61ec20b8 _ZN3art9ArtMethod6InvokeEPNS_6ThreadEPjjPNS_6JValueEPKc #13: 0x7e6206c1e0 _ZN3art11interpreter34ArtInterpreterToCompiledCodeBridgeEPNS_6ThreadEPNS_9ArtMethodEPKNS_7DexFile8CodeItemEPNS_11ShadowFrameEPNS_6JValueE #14: 0x7e62067718 _ZN3art11interpreter6DoCallILb1ELb0EEEbPNS_9ArtMethodEPNS_6ThreadERNS_11ShadowFrameEPKNS_11InstructionEtPNS_6JValueE #15: 0x7e622dd9dc MterpInvokeStaticRange #16: 0x7e622e5198 ExecuteMterpImpl #17: 0x7e62047624 ??? #18: 0x7e6204dd24 _ZN3art11interpreter33ArtInterpreterToInterpreterBridgeEPNS_6ThreadEPKNS_7DexFile8CodeItemEPNS_11ShadowFrameEPNS_6JValueE #19: 0x7e62066888 _ZN3art11interpreter6DoCallILb0ELb0EEEbPNS_9ArtMethodEPNS_6ThreadERNS_11ShadowFrameEPKNS_11InstructionEtPNS_6JValueE #20: 0x7e622dc134 MterpInvokeStatic #21: 0x7e622e4e98 ExecuteMterpImpl #22: 0x7e62047624 ??? #23: 0x7e6204dd24 _ZN3art11interpreter33ArtInterpreterToInterpreterBridgeEPNS_6ThreadEPKNS_7DexFile8CodeItemEPNS_11ShadowFrameEPNS_6JValueE #24: 0x7e62066888 _ZN3art11interpreter6DoCallILb0ELb0EEEbPNS_9ArtMethodEPNS_6ThreadERNS_11ShadowFrameEPKNS_11InstructionEtPNS_6JValueE #25: 0x7e622dbe7c MterpInvokeDirect #26: 0x7e622e4e18 ExecuteMterpImpl #27: 0x7e62047624 ??? at com.microsoft.cognitiveservices.speech.internal.carbon_javaJNI.SpeechRecognizer_FromConfig__SWIG_0(Native Method) at com.microsoft.cognitiveservices.speech.internal.SpeechRecognizer.FromConfig(SpeechRecognizer.java:41) at com.microsoft.cognitiveservices.speech.SpeechRecognizer.(SpeechRecognizer.java:88) at com.devstop.speechtotext.MainActivity.speech(MainActivity.java:96) at com.devstop.speechtotext.MainActivity.onActivityResult(MainActivity.java:71) at android.app.Activity.dispatchActivityResult(Activity.java:7690) at android.app.ActivityThread.deliverResults(ActivityThread.java:4928)

我不知道这里发生了什么。我传递给语音功能的文件路径是正确的。但它仍然崩溃。 任何帮助将不胜感激。 谢谢

我发现我做错了什么。我从 URI 获取的路径不正确。 您可以使用以下方法从 URI 中获取正确的路径。

@SuppressLint("NewApi")
public String getFilePath(Context context, Uri uri) throws URISyntaxException {
    String selection = null;
    String[] selectionArgs = null;
    // Uri is different in versions after KITKAT (Android 4.4), we need to
    if (Build.VERSION.SDK_INT >= 19 && DocumentsContract.isDocumentUri(context.getApplicationContext(), uri)) {
        if (isExternalStorageDocument(uri)) {
            final String docId = DocumentsContract.getDocumentId(uri);
            final String[] split = docId.split(":");
            return Environment.getExternalStorageDirectory() + "/" + split[1];
        } else if (isDownloadsDocument(uri)) {
            final String id = DocumentsContract.getDocumentId(uri);
            uri = ContentUris.withAppendedId(
                    Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
        } else if (isMediaDocument(uri)) {
            final String docId = DocumentsContract.getDocumentId(uri);
            final String[] split = docId.split(":");
            final String type = split[0];
            if ("image".equals(type)) {
                uri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
            } else if ("video".equals(type)) {
                uri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
            } else if ("audio".equals(type)) {
                uri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
            }
            selection = "_id=?";
            selectionArgs = new String[]{
                    split[1]
            };
        }
    }
    if ("content".equalsIgnoreCase(uri.getScheme())) {
        String[] projection = {
                MediaStore.Images.Media.DATA
        };
        Cursor cursor = null;
        try {
            cursor = context.getContentResolver()
                    .query(uri, projection, selection, selectionArgs, null);
            int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
            if (cursor.moveToFirst()) {
                return cursor.getString(column_index);
            }
        } catch (Exception e) {
        }
    } else if ("file".equalsIgnoreCase(uri.getScheme())) {
        return uri.getPath();
    }
    return null;
}

public static boolean isExternalStorageDocument(Uri uri) {
    return "com.android.externalstorage.documents".equals(uri.getAuthority());
}

public static boolean isDownloadsDocument(Uri uri) {
    return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}

public static boolean isMediaDocument(Uri uri) {
    return "com.android.providers.media.documents".equals(uri.getAuthority());
}

这个回答对我有帮助。

不要忘记为原作者背书。