2014-09-04 1 views
1

나는 안드로이드 애플 리케이션의 배경에 지속적인 음성 인식 서비스를 실행하려고합니다. 서비스가 처리 된 텍스트를 UI 용도로 메인 스레드로 다시 보내길 원합니다.서비스 스레드 시작시 다이스

내 스레드의 실행 기능을 사용 중일 때 서비스를 바인딩 한 직후 스레드가 종료됩니다. 왜 이런 일이 일어나고 있는지에 관해 당신들 중 올바른 방향으로 나를 안내 할 수 있습니까? 여기

여기

/* 
* Copyright (C) 2010 The Android Open Source Project 
* 
* Licensed under the Apache License, Version 2.0 (the "License"); you may not 
* use this file except in compliance with the License. You may obtain a copy of 
* the License at 
* 
* http://www.apache.org/licenses/LICENSE-2.0 
* 
* Unless required by applicable law or agreed to in writing, software 
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 
* License for the specific language governing permissions and limitations under 
* the License. 
*/ 

package com.example.quadcontrol; 

//import android.annotation.SdkConstant; 
//import android.annotation.SdkConstant.SdkConstantType; 
import android.app.Service; 
import android.content.Intent; 
import android.content.pm.PackageManager; 
import android.os.Bundle; 
import android.os.Handler; 
import android.os.IBinder; 
import android.os.Message; 
import android.os.RemoteException; 
import android.speech.RecognitionListener; 
import android.speech.RecognitionService; 
import android.speech.SpeechRecognizer; 
import android.util.Log; 



public abstract class SimpleVoiceService extends Service 
{ 
    /** 
    * The {@link Intent} that must be declared as handled by the service. 
    */ 
    //@SdkConstant(SdkConstantType.SERVICE_ACTION) 
    public static final String SERVICE_INTERFACE = "android.speech.RecognitionService"; 

    /** 
    * Name under which a RecognitionService component publishes information about itself. 
    * This meta-data should reference an XML resource containing a 
    * <code>&lt;{@link android.R.styleable#RecognitionService recognition-service}&gt;</code> tag. 
    */ 
    public static final String SERVICE_META_DATA = "android.speech"; 

    /** Log messages identifier */ 
    private static final String TAG = "SimpleVoiceService"; 

    /** Debugging flag */ 
    private static final boolean DBG = false; 

    /** Binder of the recognition service */ 
    private RecognitionServiceBinder mBinder = new RecognitionServiceBinder(this); 

    /** 
    * The current callback of an application that invoked the 
    * {@link RecognitionService#onStartListening(Intent, Callback)} method 
    */ 
    private Callback mCurrentCallback = null; 

    private static final int MSG_START_LISTENING = 1; 

    private static final int MSG_STOP_LISTENING = 2; 

    private static final int MSG_CANCEL = 3; 

    private final Handler mHandler = new Handler() { 
     @Override 
     public void handleMessage(Message msg) { 
      switch (msg.what) { 
       case MSG_START_LISTENING: 
        StartListeningArgs args = (StartListeningArgs) msg.obj; 
        dispatchStartListening(args.mIntent, args.mListener); 
        break; 
       case MSG_STOP_LISTENING: 
        dispatchStopListening((RecognitionListener) msg.obj); 
        break; 
       case MSG_CANCEL: 
        dispatchCancel((RecognitionListener) msg.obj); 
      } 
     } 
    }; 

    private void dispatchStartListening(Intent intent, android.speech.RecognitionService.Callback mListener) { 
     if (mCurrentCallback == null) { 
      //if (DBG) Log.d(TAG, "created new mCurrentCallback, listener = " + mListener.asBinder()); 
      mCurrentCallback = new Callback(mListener); 
      SimpleVoiceService.this.onStartListening(intent, mCurrentCallback); 
     } else { 
      try { 
       mListener.error(SpeechRecognizer.ERROR_RECOGNIZER_BUSY); 
      } catch (RemoteException e) { 
       Log.d(TAG, "onError call from startListening failed"); 
      } 
      Log.i(TAG, "concurrent startListening received - ignoring this call"); 
     } 
    } 

    private void dispatchStopListening(RecognitionListener listener) { 
     if (mCurrentCallback == null) { 
      listener.onError(SpeechRecognizer.ERROR_CLIENT); 
      Log.w(TAG, "stopListening called with no preceding startListening - ignoring"); 
     } else if (mCurrentCallback.mListener != listener) { 
      listener.onError(SpeechRecognizer.ERROR_RECOGNIZER_BUSY); 
      Log.w(TAG, "stopListening called by other caller than startListening - ignoring"); 
     } else { // the correct state 
      SimpleVoiceService.this.onStopListening(mCurrentCallback); 
     } 
    } 

    private void dispatchCancel(RecognitionListener listener) { 
     if (mCurrentCallback == null) { 
      if (DBG) Log.d(TAG, "cancel called with no preceding startListening - ignoring"); 
     } else if (mCurrentCallback.mListener != listener) { 
      Log.w(TAG, "cancel called by client who did not call startListening - ignoring"); 
     } else { // the correct state 
      SimpleVoiceService.this.onCancel(mCurrentCallback); 
      mCurrentCallback = null; 
      if (DBG) Log.d(TAG, "canceling - setting mCurrentCallback to null"); 
     } 
    } 

    private class StartListeningArgs { 
     public final Intent mIntent; 

     public final android.speech.RecognitionService.Callback mListener; 

     public StartListeningArgs(Intent intent, android.speech.RecognitionService.Callback listener) { 
      this.mIntent = intent; 
      this.mListener = listener; 
     } 
    } 

    /** 
    * Checks whether the caller has sufficient permissions 
    * 
    * @param listener to send the error message to in case of error 
    * @return {@code true} if the caller has enough permissions, {@code false} otherwise 
    */ 
    private boolean checkPermissions(android.speech.RecognitionService.Callback listener) { 
     if (DBG) Log.d(TAG, "checkPermissions"); 
     if (SimpleVoiceService.this.checkCallingOrSelfPermission(android.Manifest.permission. 
       RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) { 
      return true; 
     } 
     Log.e(TAG, "call for recognition service without RECORD_AUDIO permissions"); 
     try { 
      listener.error(SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS); 
     } catch (RemoteException e) { 
      // TODO Auto-generated catch block 
      e.printStackTrace(); 
     } 
     return false; 
    } 

    /** 
    * Notifies the service that it should start listening for speech. 
    * 
    * @param recognizerIntent contains parameters for the recognition to be performed. The intent 
    *  may also contain optional extras, see {@link RecognizerIntent}. If these values are 
    *  not set explicitly, default values should be used by the recognizer. 
    * @param listener that will receive the service's callbacks 
    */ 
    protected abstract void onStartListening(Intent recognizerIntent, Callback listener); 

    /** 
    * Notifies the service that it should cancel the speech recognition. 
    */ 
    protected abstract void onCancel(Callback listener); 

    /** 
    * Notifies the service that it should stop listening for speech. Speech captured so far should 
    * be recognized as if the user had stopped speaking at this point. This method is only called 
    * if the application calls it explicitly. 
    */ 
    protected abstract void onStopListening(Callback listener); 

    @Override 
    public final IBinder onBind(final Intent intent) { 
     if (DBG) Log.d(TAG, "onBind, intent=" + intent); 
     return (IBinder) mBinder; 
    } 

    @Override 
    public void onDestroy() { 
     if (DBG) Log.d(TAG, "onDestroy"); 
     mCurrentCallback = null; 
     mBinder.clearReference(); 
     super.onDestroy(); 
    } 

    /** 
    * This class receives callbacks from the speech recognition service and forwards them to the 
    * user. An instance of this class is passed to the 
    * {@link RecognitionService#onStartListening(Intent, Callback)} method. Recognizers may call 
    * these methods on any thread. 
    */ 
    public class Callback { 
     private final android.speech.RecognitionService.Callback mListener; 

     private Callback(android.speech.RecognitionService.Callback mListener2) { 
      mListener = mListener2; 
     } 

     /** 
     * The service should call this method when the user has started to speak. 
     */ 
     public void beginningOfSpeech() throws RemoteException { 
      if (DBG) Log.d(TAG, "beginningOfSpeech"); 
      mListener.beginningOfSpeech(); 
     } 

     /** 
     * The service should call this method when sound has been received. The purpose of this 
     * function is to allow giving feedback to the user regarding the captured audio. 
     * 
     * @param buffer a buffer containing a sequence of big-endian 16-bit integers representing a 
     *  single channel audio stream. The sample rate is implementation dependent. 
     */ 
     public void bufferReceived(byte[] buffer) throws RemoteException { 
      mListener.bufferReceived(buffer); 
     } 

     /** 
     * The service should call this method after the user stops speaking. 
     */ 
     public void endOfSpeech() throws RemoteException { 
      mListener.endOfSpeech(); 
     } 

     /** 
     * The service should call this method when a network or recognition error occurred. 
     * 
     * @param error code is defined in {@link SpeechRecognizer} 
     */ 
     public void error(int error) throws RemoteException { 
      mCurrentCallback = null; 
      mListener.error(error); 
     } 

     /** 
     * The service should call this method when partial recognition results are available. This 
     * method can be called at any time between {@link #beginningOfSpeech()} and 
     * {@link #results(Bundle)} when partial results are ready. This method may be called zero, 
     * one or multiple times for each call to {@link SpeechRecognizer#startListening(Intent)}, 
     * depending on the speech recognition service implementation. 
     * 
     * @param partialResults the returned results. To retrieve the results in 
     *  ArrayList&lt;String&gt; format use {@link Bundle#getStringArrayList(String)} with 
     *  {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter 
     */ 
     public void partialResults(Bundle partialResults) throws RemoteException { 
      mListener.partialResults(partialResults); 
     } 

     /** 
     * The service should call this method when the endpointer is ready for the user to start 
     * speaking. 
     * 
     * @param params parameters set by the recognition service. Reserved for future use. 
     */ 
     public void readyForSpeech(Bundle params) throws RemoteException { 
      mListener.readyForSpeech(params); 
     } 

     /** 
     * The service should call this method when recognition results are ready. 
     * 
     * @param results the recognition results. To retrieve the results in {@code 
     *  ArrayList&lt;String&gt;} format use {@link Bundle#getStringArrayList(String)} with 
     *  {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter 
     */ 
     public void results(Bundle results) throws RemoteException { 
      mCurrentCallback = null; 
      mListener.results(results); 
     } 

     /** 
     * The service should call this method when the sound level in the audio stream has changed. 
     * There is no guarantee that this method will be called. 
     * 
     * @param rmsdB the new RMS dB value 
     */ 
     public void rmsChanged(float rmsdB) throws RemoteException { 
      mListener.rmsChanged(rmsdB); 
     } 
    } 

    /** Binder of the recognition service */ 
    private static class RecognitionServiceBinder extends RecognitionService { 
     private SimpleVoiceService mInternalService; 

     public RecognitionServiceBinder(SimpleVoiceService simpleVoiceService) { 
      mInternalService = simpleVoiceService; 
     } 

     @Override 
     protected void onStartListening(Intent recognizerIntent, Callback listener) { 
      //if (DBG) Log.d(TAG, "startListening called by:" + listener.asBinder()); 
      if (mInternalService != null && mInternalService.checkPermissions(listener)) { 
       mInternalService.mHandler.sendMessage(Message.obtain(mInternalService.mHandler, 
         MSG_START_LISTENING, mInternalService.new StartListeningArgs(
           recognizerIntent, listener))); 
      } 
     } 

     @Override 
     protected void onStopListening(Callback listener) { 
      //if (DBG) Log.d(TAG, "stopListening called by:" + listener.asBinder()); 
      if (mInternalService != null && mInternalService.checkPermissions(listener)) { 
       mInternalService.mHandler.sendMessage(Message.obtain(mInternalService.mHandler, 
         MSG_STOP_LISTENING, listener)); 
      } 
     } 

     @Override 
     protected void onCancel(Callback listener) { 
      //if (DBG) Log.d(TAG, "cancel called by:" + listener.asBinder()); 
      if (mInternalService != null && mInternalService.checkPermissions(listener)) { 
       mInternalService.mHandler.sendMessage(Message.obtain(mInternalService.mHandler, 
         MSG_CANCEL, listener)); 
      } 
     } 

     public void clearReference() { 
      mInternalService = null; 
     }  
    } 
} 

here에서 차용 서비스 코드 것은 여기 내 (이것은 구글 유리 프로젝트)

<?xml version="1.0" encoding="utf-8"?> 
<manifest xmlns:android="http://schemas.android.com/apk/res/android" 
    package="com.example.quadcontrol" 
    android:versionCode="1" 
    android:versionName="1.0" > 

    <uses-permission android:name="com.google.android.glass.permission.DEVELOPMENT" /> 
    <uses-permission android:name="android.permission.RECORD_AUDIO" /> 

    <uses-sdk 
     android:minSdkVersion="15" 
     android:targetSdkVersion="19" />  

    <application 
     android:allowBackup="true" 
     android:icon="@drawable/ic_launcher" 
     android:label="@string/app_name" 
     android:immersive="true" > 

     <activity 
      android:name=".MainActivity" 
      android:label="@string/app_name" 
      android:icon="@drawable/ic_launcher" >      

      <intent-filter>     
       <action android:name="android.intent.action.MAIN" /> 
       <category android:name="android.intent.category.LAUNCHER" />    
      </intent-filter> 

       <intent-filter> 
       <action android:name="android.speech.action.RECOGNIZE_SPEECH" /> 
       <category android:name="android.intent.category.DEFAULT" /> 
       </intent-filter> 

      <intent-filter> 
       <action android:name="com.google.android.glass.action.VOICE_TRIGGER" /> 
      </intent-filter> 
      <meta-data 
       android:name="com.google.angroid.glass.VoiceTrigger" 
       android:resource="@xml/voice_trigger" />  
     </activity> 

     <recognition-service 
      android:name="WordService" 
      android:icon="@drawable/ic_launcher" 
      android:label="@string/service_name"> 

      <intent-filter> 
       <action android:name="android.speech.RecognitionService" /> 
       <category android:name="android.intent.category.DEFAULT" /> 
      </intent-filter> 
     </recognition-service> 

    </application> 
</manifest> 

매니페스트 덕분에 내 MainActivity

public class MainActivity extends Activity 
{ 
    private static final String TAG = "Activity"; 
    private TextView txtMsg; // TextView that shows the number 
    private Handler mHandler; 

    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     // TODO Auto-generated method stub 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.activity_main); 
     txtMsg = (TextView)findViewById(R.id.lbl_info); 
     final ServiceConnection serviceConnection = new ServiceConnection() 
     { 

      @Override 
      public void onServiceConnected(ComponentName name, IBinder service) 
      { 
       // TODO Auto-generated method stub 

      } 

      @Override 
      public void onServiceDisconnected(ComponentName name) 
      { 
       // TODO Auto-generated method stub 

      } 

     }; 

     Thread t = new Thread() 
     { 
      public void run() 
      { 
       //Start service 
       getApplicationContext().bindService(
          new Intent(getApplicationContext(), SimpleVoiceService.class), 
          serviceConnection, 
          Context.BIND_AUTO_CREATE); 

      } 
     }; 

     t.start(); 

     //Create handler for receiving messages 
     mHandler = new Handler(Looper.getMainLooper()) 
     { 
      @Override 
      public void handleMessage(Message inputMessage) 
      { 
       //switch(inputMessage.what) 
       //{ 
        //case 4: 
        //{ 
         txtMsg.setText((String)inputMessage.obj); 
         //break; 
        //}    
       //} 
      } 
     }; 

     //mHandler.sendMessage(Message.obtain(mHandler, 1)); 
     txtMsg.setText("Service Started"); 
    } 

입니다

+1

가있다 예외, 로그에 스택 추적? – janos

+0

두 번째로 생각하면 스레드가 서비스를 바인딩 할 때마다 죽을 것입니다. 맞습니까? 그렇다면 잘못된 질문을하고 있습니다./ – myselfesteem

+0

스택 추적이 있습니까? 당신의 adb logcat을보십시오 – janos

답변

0

당신이 말할 때 :

스레드가

내가 당신의 로그에서 스택 추적에 예외가있을 것이라고 기대하는 서비스를 결합 후 즉시 죽는다. 아마 실제로 일어나는 일에 더 많은 빛을 줄 것입니다.

자세히 살펴에

, 사용자가 지정한 서비스 클래스는

public abstract class SimpleVoiceService extends Service 

내가이 확인 지금 나와 함께 안드로이드 개발 환경이없는 .... 추상적이다 , 그러나 나는 이것이 이런 식으로 작동 할 수 있는지 확신하지 못합니다. abstract 수정자를 제거하거나 이것을 확장하는 다른 비 추상적 서비스에 바인딩하십시오.


또한 왜 스레드의 서비스에 바인딩합니까? 대신이의 :

Thread t = new Thread() 
{ 
    public void run() 
    { 
     //Start service 
     getApplicationContext().bindService(
        new Intent(getApplicationContext(), SimpleVoiceService.class), 
        serviceConnection, 
        Context.BIND_AUTO_CREATE); 

    } 
}; 
t.start(); 

왜 당신이 그것을 단순히 이런 식으로하지 않습니다

당신이 * "스레드가 서비스를 결합 직후 사망"* 말
getApplicationContext().bindService(
      new Intent(getApplicationContext(), SimpleVoiceService.class), 
      serviceConnection, 
      Context.BIND_AUTO_CREATE); 
+0

문제의 의견보기. – myselfesteem