2

Ich versuche, eine WaveInApp-Bibliothek http://www.materialup.com/posts/waveinapp Ich habe alle erforderlichen Sache erklärt. Die App funktioniert, aber die Hintergrundwelle funktioniert nicht. Ich bin nicht in der Lage, die Wellenfunktion einzustellen als Anfänger in der Android-Entwicklung. Ich bin in der Lage, alle grundlegenden Dinge einschließlich der Media Player einzustellen. Hier bin ich die Rede recognization Handler setzen Hier nicht in der Lage ist, was ichSo stellen Sie die Bibliothek in Android Studio

import android.content.Context; 
import android.media.MediaPlayer; 
import android.os.Bundle; 
import android.support.v7.app.AppCompatActivity; 
import android.view.View; 
import android.widget.ImageButton; 
import android.widget.Toast; 
import com.cleveroad.audiovisualization.AudioVisualization; 
import com.cleveroad.audiovisualization.DbmHandler; 
import com.cleveroad.audiovisualization.SpeechRecognizerDbmHandler; 
import com.cleveroad.audiovisualization.VisualizerDbmHandler; 

public class MainActivity extends AppCompatActivity { 
private AudioVisualization audioVisualization; 
private Context context; 
private ImageButton button1,button2; 
private MediaPlayer mediaPlayer; 
@Override 
protected void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
    setContentView(R.layout.activity_main); 
    audioVisualization = (AudioVisualization)findViewById(R.id.visualizer_view); 
    button1=(ImageButton)findViewById(R.id.imageButton); 
    button2=(ImageButton)findViewById(R.id.imageButton2); 
    mediaPlayer =MediaPlayer.create(this,R.raw.song); 

    button2.setOnClickListener(new View.OnClickListener() { 
     @Override 
     public void onClick(View v) { 
      Toast.makeText(getApplicationContext(),"playing",Toast.LENGTH_SHORT).show(); 
      mediaPlayer.start(); 
     } 
    }); 
button1.setOnClickListener(new View.OnClickListener() { 
@Override 
public void onClick(View v) { 
    Toast.makeText(getApplicationContext(),"pause",Toast.LENGTH_SHORT).show(); 
    mediaPlayer.pause(); 
    VisualizerDbmHandler vizualizerHandler = DbmHandler.Factory.newVisualizerHandler(getContext(), 0); 
    audioVisualization.linkTo(vizualizerHandler); 
    // set speech recognizer handler 
    SpeechRecognizerDbmHandler speechRecHandler = DbmHandler.Factory.newSpeechRecognizerHandler(context); 
    speechRecHandler.innerRecognitionListener(...); 
    audioVisualization.linkTo(speechRecHandler); 
} 
}); 
} 

@Override 
public void onResume() { 
    super.onResume(); 
    audioVisualization.onResume(); 
} 

@Override 
public void onPause() { 
    audioVisualization.onPause(); 
    super.onPause(); 
} 
} 

So getan haben, wie das obige Händeler einzustellen. Ich bin nicht in der Lage, die Verbindungs-Audiovisualisierungsansicht auf die Audioausgabe einzustellen. Wie man die folgende Methode mit dem Media-Player einstellt

Außer dass alle funktionieren. Jeder Tipp oder Rat wird hilfreich sein. Ich versuchte

import android.content.Intent; 
import android.os.Bundle; 
import android.speech.RecognitionListener; 
import android.speech.RecognizerIntent; 
import android.support.annotation.Nullable; 
import android.support.v4.app.Fragment; 
import android.view.LayoutInflater; 
import android.view.View; 
import android.view.ViewGroup; 
import android.widget.Button; 

import com.cleveroad.audiovisualization.AudioVisualization; 
import com.cleveroad.audiovisualization.DbmHandler; 
import com.cleveroad.audiovisualization.SpeechRecognizerDbmHandler; 


public class SpeechRecognitionFragment extends Fragment { 

public static SpeechRecognitionFragment newInstance() { 
    return new SpeechRecognitionFragment(); 
} 

private AudioVisualization audioVisualization; 
private Button btnRecognize; 
private SpeechRecognizerDbmHandler handler; 
private boolean recognizing; 

@Nullable 
@Override 
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { 
    View view = inflater.inflate(R.layout.fragment_gles, container, false); 
    audioVisualization = (AudioVisualization) view.findViewById(R.id.visualizer_view); 
    btnRecognize = (Button) view.findViewById(R.id.btn_recognize); 
    return view; 
} 

@Override 
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { 
    super.onViewCreated(view, savedInstanceState); 
    btnRecognize.setOnClickListener(new View.OnClickListener() { 
     @Override 
     public void onClick(View v) { 
      if (recognizing) { 
       handler.stopListening(); 
      } else { 
       Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); 
       intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); 
       intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getContext().getPackageName()); 
       handler.startListening(intent); 
      } 
      btnRecognize.setEnabled(false); 
     } 
    }); 
    handler = DbmHandler.Factory.newSpeechRecognizerHandler(getContext()); 
    handler.innerRecognitionListener(new SimpleRecognitionListener() { 

     @Override 
     public void onReadyForSpeech(Bundle params) { 
      super.onReadyForSpeech(params); 
      onStartRecognizing(); 
     } 

     @Override 
     public void onResults(Bundle results) { 
      super.onResults(results); 
      onStopRecognizing(); 
     } 

     @Override 
     public void onError(int error) { 
      super.onError(error); 
      onStopRecognizing(); 

     } 
    }); 
    audioVisualization.linkTo(handler); 
} 

private void onStopRecognizing() { 
    recognizing = false; 
    btnRecognize.setText(R.string.start_recognition); 
    btnRecognize.setEnabled(true); 
} 

private void onStartRecognizing() { 
    btnRecognize.setText(R.string.stop_recognition); 
    btnRecognize.setEnabled(true); 
    recognizing = true; 
} 

@Override 
public void onDestroyView() { 
    audioVisualization.release(); 
    super.onDestroyView(); 
} 

private static class SimpleRecognitionListener implements RecognitionListener { 

    @Override 
    public void onReadyForSpeech(Bundle params) { 
    } 

    @Override 
    public void onBeginningOfSpeech() { 
    } 

    @Override 
    public void onRmsChanged(float rmsdB) { 
    } 

    @Override 
    public void onBufferReceived(byte[] buffer) { 
    } 

    @Override 
    public void onEndOfSpeech() { 
    } 

    @Override 
    public void onError(int error) { 
    } 

    @Override 
    public void onResults(Bundle results) { 
    } 

    @Override 
    public void onPartialResults(Bundle partialResults) { 
    } 

    @Override 
    public void onEvent(int eventType, Bundle params) { 
    } 
} 
} 

von https://github.com/Cleveroad/WaveInApp/blob/master/app/src/main/java/com/cleveroad/example/SpeechRecognitionFragment.java So jemand kann mir sagen, zu suchen, warum Fragment verwendet wird, und wie kann ich dies in meiner Haupttätigkeit eingestellt? Ersetzen

+0

danke für diese Frage .. ich habe etwas neues gefunden .. ich werde dies definitiv umsetzen .. –

Antwort

0

audioVisualization = (AudioVisualization) glAudioVisualizationView; 

Mit

audioVisualization = (AudioVisualization)findViewById(R.id.glAudioVisualizationView); 

glAudioVisualizationView sollte id von AudioVisualization in Ihrem Layout-Datei

1
I know i am late to post this answer...but for future use try this 
check my code you will find your answer 
first of all in the liberay you have to make some changes in your xml. 


<com.cleveroad.audiovisualization.GLAudioVisualizationView 
    android:id="@+id/visualizer_view" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent" 
    app:av_bubblesSize= "@dimen/bubble_size" 
    app:av_bubblesRandomizeSizes= "true" 
    app:av_wavesHeight= "@dimen/wave_height" 
    app:av_wavesFooterHeight="@dimen/footer_height" 
    app:av_wavesCount="7" 
    app:av_layersCount="4" /> 

dann

private VisualizerDbmHandler handler; 
    MediaPlayer mp ; 
    private AudioVisualization audioVisualization; 

    public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { 
    View ve = inflater.inflate(R.layout.cat, container, false); 
    ibCapture = (ImageButton) ve.findViewById(R.id.ibCapture); 
    audioVisualization = (AudioVisualization)ve.findViewById(R.id.visualizer_view); 
    hell(); 
    return ve; 
} 



    ibCapture.setOnClickListener(new View.OnClickListener() { 
     public void onClick(View v) { 

      if (bool) { 
       bool = false; 
       mp = MediaPlayer.create(getContext(), R.raw.blackcat); 
       mp.setLooping(true); 
       handler = VisualizerDbmHandler.Factory.newVisualizerHandler(getContext(),mp); 
       audioVisualization.linkTo(handler); 
       mp.start(); 

      ((home)getActivity()).vis(); 
      } else { 
       bool = true; 
       ((home)getActivity()).visgone(); 
       stopPlaying(); 
      } 
     } 
    }); 
sein
0

definieren

private AudioVisualization audioVisualization; 

initialisieren

audioVisualization = (AudioVisualization) view.findViewById(R.id.visualizer_view); 

für unter Code Rede Verwendung

SpeechRecognizerDbmHandler speechRecHandler = DbmHandler.Factory.newSpeechRecognizerHandler(getContext()); 
       speechRecHandler.innerRecognitionListener(); 
       audioVisualization.linkTo(speechRecHandler); 

       Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); 
       intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); 
       intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getContext().getPackageName()); 
       speechRecHandler.startListening(intent); 

für Medien auf Ihrem Gerät bei Temperaturen unter Code, Sandeep Singh Answer

MediaPlayer mp = MediaPlayer.create(getContext(), R.raw.blackcat); 
       mp.setLooping(true); 
       handler = VisualizerDbmHandler.Factory.newVisualizerHandler(getContext(),mp); 
       audioVisualization.linkTo(handler); 
       mp.start();