Android9.0 setStereoVolume左右声道流程(一)

注意:这条路其实没有走,过后会写第二篇分析到底是怎么控制AudioTrack的左右声道的。

栗子:setStereoVolume()控制左右声道流程.
private AudioTrack mAudioTrack;
int bufferSize = AudioTrack.getMinBufferSize(mSampleRateInHz, mChannelConfig, AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,mSampleRateInHz,mChannelConfig,AudioFormat.ENCODING_PCM_16BIT,bufferSize,AudioTrack.MODE_STREAM);
mAudioTrack.setStereoVolume(left ? 1 : 0, right ? 1 : 0);
mAudioTrack.play();


1.frameworks/base/media/java/android/media/AudioTrack.java
  frameworks/base/media/java/android/media/PlayerBase.java
<1>.public int setStereoVolume(float leftGain, float rightGain) {
  baseSetVolume(leftGain, rightGain);
}

<2>.void baseSetVolume(float leftVolume, float rightVolume) {
   playerSetVolume(isRestricted/*muting*/,leftVolume * mPanMultiplierL, rightVolume * mPanMultiplierR);
}

<3>.void playerSetVolume(boolean muting, float leftVolume, float rightVolume) {
  native_setVolume(leftVolume, rightVolume);
}

2.frameworks/base/core/jni/android_media_AudioTrack.cpp
static void android_media_AudioTrack_set_volume(JNIEnv *env, jobject thiz, jfloat leftVol, jfloat rightVol ){
  sp lpTrack = getAudioTrack(env, thiz);
  lpTrack->setVolume(leftVol, rightVol);
}

3.frameworks/av/media/libaudioclient/AudioTrack.cpp
status_t AudioTrack::setVolume(float left, float right){
  mProxy->setVolumeLR(gain_minifloat_pack(gain_from_float(left), gain_from_float(right)));
}

gain_minifloat_pack()函数分析:
static inline gain_minifloat_packed_t gain_minifloat_pack(gain_minifloat_t left,gain_minifloat_t right){
  //把右声道音量右移16位,并或上左声道音量.
  return (right << 16) | left;
}  


//audio_track_cblk_t为一个类
audio_track_cblk_t* const mCblk;
struct audio_track_cblk_t{
  audio_track_cblk_t(); 
  ~audio_track_cblk_t() { }
}

//gain_minifloat_packed_t为:unsigned int类型
typedef uint32_t gain_minifloat_packed_t;
gain_minifloat_packed_t mVolumeLR; 

void setVolumeLR(gain_minifloat_packed_t volumeLR) {mCblk->mVolumeLR = volumeLR;}
gain_minifloat_packed_t getVolumeLR() const { return mCblk->mVolumeLR; }

4.frameworks/av/services/audioflinger/Threads.cpp
  frameworks/av/services/audioflinger/Tracks.cpp
//混音线程
<1>.AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTracks_l(){
  gain_minifloat_packed_t vlr = proxy->getVolumeLR();
  float vlf = volume * float_from_gain(gain_minifloat_unpack_left(vlr));
  float vrf = volume * float_from_gain(gain_minifloat_unpack_right(vlr));
  track->setFinalVolume((vlf + vrf) / 2.f);
}

<2>.void AudioFlinger::PlaybackThread::Track::setFinalVolume(float volume){
  if(mFinalVolume != volume) {
    //继续查找mFinalVolume变量在哪赋值给别人?
    mFinalVolume = volume; 
  }
}

<3>.void AudioFlinger::PlaybackThread::Track::copyMetadataTo(MetadataInserter& backInserter) const{
  *backInserter++ = {
          .usage = mAttr.usage,
	  .content_type = mAttr.content_type,
	   //mFinalVolume赋值给gain
	  .gain = mFinalVolume,
	}
}

<4>.void AudioFlinger::PlaybackThread::updateMetadata_l(){
  StreamOutHalInterface::SourceMetadata metadata;
  auto backInserter = std::back_inserter(metadata.tracks);
  for(const sp &track : mActiveTracks) { 
     track->copyMetadataTo(backInserter);
  }
  //最终存在metadata.tracks里
  for(int i = 0; i < (int)metadata.tracks.size(); i++)
     ALOGE(“xxx——>%s, %s, %d, usage = %d, content_type = %d, gain = %f",__FILE__,__FUNCTION__,__LINE__,metadata.tracks[i].usage,metadata.tracks[i].content_type,metadata.tracks[i].gain);
  
   sendMetadataToBackend_l(metadata);
}

<5>.void AudioFlinger::PlaybackThread::sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata){
  mOutput->stream->updateSourceMetadata(metadata);
}

5.frameworks/av/media/libaudiohal/4.0/StreamHalHidl.cpp
status_t StreamOutHalHidl::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
   for(int i = 0; i < (int)halMetadata.tracks.size(); i++)                                                                                                                                                                       
     ALOGE("xxx----->%s, %s, %d, usage = %d, content_type = %d, gain = %f",__FILE__,__FUNCTION__,__LINE__,halMetadata.tracks[i].usage,halMetadata.tracks[i].contentType,halMetadata.tracks[i].gain);
   
  return processReturn("updateSourceMetadata", mStream->updateSourceMetadata(halMetadata));
}
//这一步没有调用,因为update_source_metadata()函数在audio_hw.c里没有实现
6.frameworks/av/media/libaudiohal/4.0/StreamHalLocal.cpp
status_t StreamOutHalLocal::updateSourceMetadata(const SourceMetadata& sourceMetadata) { 
  if(mStream->update_source_metadata == nullptr) {
    return INVALID_OPERATION; 
  }
  mStream->update_source_metadata(mStream, &metadata);   
}

//直接转到这
7.hardware/interfaces/audio/core/all-versions/default/include/core/all-versions/default/StreamOut.impl.h
#ifdef AUDIO_HAL_VERSION_4_0
Return StreamOut::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
  
  if(mStream->update_source_metadata == nullptr) {
    //在hardware/qcom/audio/hal/audio_hw.c里没有实现HAL回调函数:update_source_metadata();
    //在此直接返回
    return Void();  // not supported by the HAL
  }
  mStream->update_source_metadata(mStream, &halMetadata);
}

如果update_source_metadata的定义:
hardware/libhardware/include/hardware/audio.h
typedef struct audio_stream_out audio_stream_out_t;
struct audio_stream_out {
  void (*update_source_metadata)(struct audio_stream_out *stream,const struct source_metadata* source_metadata);
}
8.如果在hardware/qcom/audio/hal/audio_hw.c实现update_source_metadata回调的伪代码
继续:
static int adev_open(){
  adev->device.common.tag = HARDWARE_DEVICE_TAG;
  adev-> update_source_metadata = adev_update_source_metadata;
}

static int adev_update_source_metadata(struct audio_hw_device *dev, float volume){
  //操作kernel
  ioctl();
}

 

你可能感兴趣的:(Android,Audio学习)