2017-09-07 1 views
0

이제 음성 인식 기능과 무효화 기능을 갖추고 있습니다. 인식 프로세스가 진행 중일 때 음성 버퍼 데이터를 수집 할 것입니다. RecognitionListener와 마찬가지로 onBufferReceived은 인식 프로세스 중에 트리거되어야하지만 인식 프로세스 중에는 로그가 인쇄되지 않습니다. 내 디버거 모드에서 또한 메서드 onBufferReceived에 들어 있어야한다는 것을 보여줍니다. 제 의도는 인식 과정에서 버퍼 데이터를 수집하고 버퍼를 녹음 파일에 저장하는 것입니다.음성 인식이 진행될 때 RecognitionListener의 onBufferReceived가 호출되지 않습니다.

MainActivity

package com.example.syoui.voicerecordtest; 

import android.Manifest; 
import android.content.Context; 
import android.content.Intent; 
import android.content.pm.PackageManager; 
import android.media.MediaPlayer; 
import android.media.MediaRecorder; 
import android.os.Bundle; 
import android.os.Handler; 
import android.speech.RecognitionListener; 
import android.speech.RecognizerIntent; 
import android.speech.SpeechRecognizer; 
import android.support.annotation.NonNull; 
import android.support.v4.app.ActivityCompat; 
import android.support.v7.app.AppCompatActivity; 
import android.util.Log; 
import android.view.View; 
import android.view.ViewGroup; 
import android.widget.Button; 
import android.widget.LinearLayout; 
import android.widget.TextView; 

import java.io.IOException; 
import java.util.ArrayList; 

import static android.R.attr.prompt; 

public class MainActivity extends AppCompatActivity { 
    private static final String LOG_TAG = "AudioRecordTest"; 
    private static final int REQUEST_RECORD_AUDIO_PERMISSION = 200; 
    private static String mFileName = null; 

    private RecordButton mRecordButton = null; 
    public MediaRecorder mRecorder = null; 

    private PlayButton mPlayButton = null; 
    private MediaPlayer mPlayer = null; 

    // Requesting permission to RECORD_AUDIO 
    private boolean permissionToRecordAccepted = false; 
    private String [] permissions = {Manifest.permission.RECORD_AUDIO}; 


    private static final String TAG = "SpeechRecognizerSampleActivity"; 
    private SpeechRecognizer recog; 
    private Runnable readyRecognizeSpeech; 
    public Handler handler = new Handler(); 

    @Override 
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { 
     super.onRequestPermissionsResult(requestCode, permissions, grantResults); 
     switch (requestCode){ 
      case REQUEST_RECORD_AUDIO_PERMISSION: 
       permissionToRecordAccepted = grantResults[0] == PackageManager.PERMISSION_GRANTED; 
       break; 
     } 
     if (!permissionToRecordAccepted) finish(); 

    } 

    private void onRecord(boolean start) { 
     if (start) { 
      startRecording(); 
     } else { 
      stopRecording(); 
     } 
    } 

    private void onPlay(boolean start) { 
     if (start) { 
      startPlaying(); 
     } else { 
      stopPlaying(); 
     } 
    } 

    private void startPlaying() { 
     mPlayer = new MediaPlayer(); 
     try { 
      mPlayer.setDataSource(mFileName); 
      mPlayer.prepare(); 
      mPlayer.start(); 
     } catch (IOException e) { 
      Log.e(LOG_TAG, "prepare() failed"); 
     } 
    } 

    private void stopPlaying() { 
     mPlayer.release(); 
     mPlayer = null; 
    } 

    private void startRecording() { 

     if(mRecorder != null){ 
      stopRecording(); 
     } 

     mRecorder = new MediaRecorder(); 
     mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); 
     mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); 
     mRecorder.setOutputFile(mFileName); 
     mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); 

     try { 
      mRecorder.prepare(); 
     } catch (IOException e) { 
      Log.e(LOG_TAG, "prepare() failed"); 
     } 

     mRecorder.start(); 
     startRecognizeSpeech(); 
    } 

    private void stopRecording() { 
     mRecorder.stop(); 
     mRecorder.release(); 
     mRecorder = null; 
    } 

    class RecordButton extends Button { 
     boolean mStartRecording = true; 

     OnClickListener clicker = new OnClickListener() { 
      public void onClick(View v) { 
       onRecord(mStartRecording); 
       if (mStartRecording) { 
        setText("Stop recording"); 
       } else { 
        setText("Start recording"); 
       } 
       mStartRecording = !mStartRecording; 
      } 
     }; 

     public RecordButton(Context ctx) { 
      super(ctx); 
      setText("Start recording"); 
      setOnClickListener(clicker); 
     } 
    } 

    class PlayButton extends Button { 
     boolean mStartPlaying = true; 

     OnClickListener clicker = new OnClickListener() { 
      public void onClick(View v) { 
       onPlay(mStartPlaying); 
       if (mStartPlaying) { 
        setText("Stop playing"); 
       } else { 
        setText("Start playing"); 
       } 
       mStartPlaying = !mStartPlaying; 
      } 
     }; 

     public PlayButton(Context ctx) { 
      super(ctx); 
      setText("Start playing"); 
      setOnClickListener(clicker); 
     } 
    } 

    @Override 
    public void onStop() { 
     super.onStop(); 
     if (mRecorder != null) { 
      mRecorder.release(); 
      mRecorder = null; 
     } 

     if (mPlayer != null) { 
      mPlayer.release(); 
      mPlayer = null; 
     } 
    } 


    @Override 
    public void onCreate(Bundle icicle) { 
     super.onCreate(icicle); 
     setContentView(R.layout.activity_main); 
     // Record to the external cache directory for visibility 

     mFileName = getExternalCacheDir().getAbsolutePath(); 
     //mFileName = "/sdcard"; 
     mFileName += "/audiorecordtest.3gp"; 

     Log.i("mFileName",mFileName); 

     ActivityCompat.requestPermissions(this, permissions, REQUEST_RECORD_AUDIO_PERMISSION); 

     LinearLayout ll = (LinearLayout) findViewById(R.id.recordButton); 
     mRecordButton = new RecordButton(this); 
     ll.addView(mRecordButton, 
       new LinearLayout.LayoutParams(
         ViewGroup.LayoutParams.WRAP_CONTENT, 
         ViewGroup.LayoutParams.WRAP_CONTENT, 
         0)); 
     mPlayButton = new PlayButton(this); 
     ll.addView(mPlayButton, 
       new LinearLayout.LayoutParams(
         ViewGroup.LayoutParams.WRAP_CONTENT, 
         ViewGroup.LayoutParams.WRAP_CONTENT, 
         0)); 

     /*******/ 

     recog = SpeechRecognizer.createSpeechRecognizer(this); 
     recog.setRecognitionListener(new RecogListener(this)); 



     // listener登録 
     Button b = (Button)findViewById(R.id.start_recognize); 
     b.setOnClickListener(new View.OnClickListener() { 
      @Override 
      public void onClick(View v) { 
       startRecognizeSpeech(); 
      } 
     }); 
     // startRecognizeSpeech(); 

    } 


    private void startRecognizeSpeech() { 
     //Intent intent = RecognizerIntent.getVoiceDetailsIntent(getApplicationContext()); 
     Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); 
     intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, 
       RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); 
     intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt); 

     recog.startListening(intent); 


     ((TextView)findViewById(R.id.status)).setText(""); 
     ((TextView)findViewById(R.id.sub_status)).setText(""); 
     findViewById(R.id.start_recognize).setEnabled(false); 
    } 

    /***********/ 


    private static class RecogListener implements RecognitionListener { 
     private MainActivity caller; 
     private TextView status; 
     private TextView subStatus; 

     RecogListener(MainActivity a) { 
      caller = a; 
      status = (TextView)a.findViewById(R.id.status); 
      subStatus = (TextView)a.findViewById(R.id.sub_status); 
     } 

     // 音声認識準備完了 
     @Override 
     public void onReadyForSpeech(Bundle params) { 
      status.setText("ready for speech"); 
      Log.v(TAG,"ready for speech"); 
     } 

     // 音声入力開始 
     @Override 
     public void onBeginningOfSpeech() { 
      status.setText("beginning of speech"); 
      Log.v(TAG,"beginning of speech"); 
     } 

     // 録音データのフィードバック用 
     @Override 
     public void onBufferReceived(byte[] buffer) { 
      status.setText("onBufferReceived"); 
      Log.v(TAG,"onBufferReceived"); 

      //status.setText(buffer.toString()); 
     } 

     public void BufferReceived(byte[] buffer) { 
      status.setText("onBufferReceived"); 
      Log.v(TAG,"onBufferReceived"); 

      //status.setText(buffer.toString()); 
     } 


     // 入力音声のdBが変化した 
     @Override 
     public void onRmsChanged(float rmsdB) { 
      String s = String.format("recieve : % 2.2f[dB]", rmsdB); 
      subStatus.setText(s); 
      //Log.v(TAG,"recieve : " + rmsdB + "dB"); 
     } 

     // 音声入力終了 
     @Override 
     public void onEndOfSpeech() { 
      status.setText("end of speech"); 
      Log.v(TAG,"end of speech"); 
      caller.handler.postDelayed(caller.readyRecognizeSpeech, 500); 
     } 

     // ネットワークエラー又は、音声認識エラー 
     @Override 
     public void onError(int error) { 
      status.setText("on error"); 
      caller.findViewById(R.id.start_recognize).setEnabled(true); 
      Log.v(TAG,"on error"); 
      switch (error) { 
       case SpeechRecognizer.ERROR_AUDIO: 
        // 音声データ保存失敗 
        subStatus.setText("ERROR_AUDIO"); 
        break; 
       case SpeechRecognizer.ERROR_CLIENT: 
        // Android端末内のエラー(その他) 
        subStatus.setText("ERROR_CLIENT"); 
        break; 
       case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS: 
        // 権限無し 
        subStatus.setText("ERROR_INSUFFICIENT_PERMISSIONS"); 
        break; 
       case SpeechRecognizer.ERROR_NETWORK: 
        // ネットワークエラー(その他) 
        subStatus.setText("ERROR_NETWORK"); 
        break; 
       case SpeechRecognizer.ERROR_NETWORK_TIMEOUT: 
        // ネットワークタイムアウトエラー 
        subStatus.setText("ERROR_NETWORK_TIMEOUT"); 
        break; 
       case SpeechRecognizer.ERROR_NO_MATCH: 
        // 音声認識結果無し 
        subStatus.setText("ERROR_NO_MATCH"); 
        caller.handler.postDelayed(caller.readyRecognizeSpeech,1000); 
        break; 
       case SpeechRecognizer.ERROR_RECOGNIZER_BUSY: 
        // RecognitionServiceへ要求出せず 
        subStatus.setText("ERROR_RECOGNIZER_BUSY"); 
        caller.handler.postDelayed(caller.readyRecognizeSpeech,1000); 
        break; 
       case SpeechRecognizer.ERROR_SERVER: 
        // Server側からエラー通知 
        subStatus.setText("ERROR_SERVER"); 
        break; 
       case SpeechRecognizer.ERROR_SPEECH_TIMEOUT: 
        // 音声入力無し 
        subStatus.setText("ERROR_SPEECH_TIMEOUT"); 
        caller.handler.postDelayed(caller.readyRecognizeSpeech,1000); 
        break; 
       default: 
      } 
     } 

     // イベント発生時に呼び出される 
     @Override 
     public void onEvent(int eventType, Bundle params) { 
      status.setText("on event"); 
      Log.v(TAG,"on event"); 
     } 

     // 部分的な認識結果が得られる場合に呼び出される 
     @Override 
     public void onPartialResults(Bundle partialResults) { 
      status.setText("on partial results"); 
      Log.v(TAG,"on results"); 
     } 

     // 認識結果 
     @Override 
     public void onResults(Bundle data) { 
      status.setText("on results"); 
      Log.v(TAG,"on results"); 
      ArrayList<String> results = data.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); 

      TextView t = (TextView)caller.findViewById(R.id.result); 
      t.setText(""); 
      for (String s : results) { 
       t.append(s + "\n"); 
      } 

//   boolean end=false; 
//   for (String s : results) { 
//    if (s.equals("終わり")) 
//     end=true; 
//    if (s.equals("おわり")) 
//     end=true; 
//    if (s.equals("キャンセル")) 
//     end=true; 
//   } 
//   if (end) 
//    caller.findViewById(R.id.start_recognize).setEnabled(true); 
//   else 
//    caller.startRecognizeSpeech(); 


      caller.findViewById(R.id.start_recognize).setEnabled(true); 

      //caller.startRecognizeSpeech(); 

     } 
    } 


} 

activity_main.xml

<?xml version="1.0" encoding="utf-8"?> 
<android.support.constraint.ConstraintLayout 
    xmlns:android="http://schemas.android.com/apk/res/android" 
    xmlns:app="http://schemas.android.com/apk/res-auto" 
    xmlns:tools="http://schemas.android.com/tools" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent" 
    tools:context="com.example.syoui.voicerecordtest.MainActivity"> 





       <LinearLayout 
           android:layout_width="fill_parent" 
           android:layout_height="wrap_content" 
           android:orientation="vertical" > 
          <Button 
           android:id="@+id/start_recognize" 
           android:layout_width="wrap_content" 
           android:layout_height="wrap_content" 
           android:text="@string/start_recognize" /> 
          <TextView 
           android:id="@+id/status" 
           android:layout_width="fill_parent" 
           android:layout_height="wrap_content" /> 
          <TextView 
           android:id="@+id/sub_status" 
           android:layout_width="fill_parent" 
           android:layout_height="wrap_content" /> 
          <ScrollView 
           android:layout_height="fill_parent" 
           android:layout_width="wrap_content"> 
           <TextView 
            android:id="@+id/result" 
            android:inputType="textMultiLine" 
            android:layout_height="fill_parent" 
            android:layout_width="fill_parent"/> 
          </ScrollView> 



        <LinearLayout 
         android:layout_width="fill_parent" 
         android:layout_height="130dp" 
         android:orientation="vertical" 
         android:id="@+id/recordButton"> 
        </LinearLayout> 
       </LinearLayout> 







</android.support.constraint.ConstraintLayout> 

string.xml onBufferReceived 더 이상 호출되지 ICS의로서는

<resources> 
    <string name="app_name">voiceRecordTest</string> 
    <string name="start_recognize">開始</string> 
</resources> 

답변

관련 문제