- 28,932 files - Full Java source code - Smali files - Resources Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
963 lines
41 KiB
Java
963 lines
41 KiB
Java
package com.mbridge.msdk.playercommon.exoplayer2.audio;
|
|
|
|
import android.annotation.SuppressLint;
|
|
import android.annotation.TargetApi;
|
|
import android.media.AudioAttributes;
|
|
import android.media.AudioFormat;
|
|
import android.media.AudioTrack;
|
|
import android.os.ConditionVariable;
|
|
import android.os.SystemClock;
|
|
import android.util.Log;
|
|
import androidx.annotation.Nullable;
|
|
import com.ironsource.v8;
|
|
import com.mbridge.msdk.playercommon.exoplayer2.C;
|
|
import com.mbridge.msdk.playercommon.exoplayer2.PlaybackParameters;
|
|
import com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink;
|
|
import com.mbridge.msdk.playercommon.exoplayer2.audio.AudioTrackPositionTracker;
|
|
import com.mbridge.msdk.playercommon.exoplayer2.util.Assertions;
|
|
import com.mbridge.msdk.playercommon.exoplayer2.util.Util;
|
|
import java.lang.annotation.Retention;
|
|
import java.lang.annotation.RetentionPolicy;
|
|
import java.nio.ByteBuffer;
|
|
import java.nio.ByteOrder;
|
|
import java.util.ArrayDeque;
|
|
import java.util.ArrayList;
|
|
import java.util.Arrays;
|
|
import java.util.Collections;
|
|
|
|
/* loaded from: classes4.dex */
|
|
public final class DefaultAudioSink implements AudioSink {
|
|
private static final int BUFFER_MULTIPLICATION_FACTOR = 4;
|
|
private static final int ERROR_BAD_VALUE = -2;
|
|
private static final long MAX_BUFFER_DURATION_US = 750000;
|
|
private static final long MIN_BUFFER_DURATION_US = 250000;
|
|
private static final int MODE_STATIC = 0;
|
|
private static final int MODE_STREAM = 1;
|
|
private static final long PASSTHROUGH_BUFFER_DURATION_US = 250000;
|
|
private static final int START_IN_SYNC = 1;
|
|
private static final int START_NEED_SYNC = 2;
|
|
private static final int START_NOT_SET = 0;
|
|
private static final int STATE_INITIALIZED = 1;
|
|
private static final String TAG = "AudioTrack";
|
|
|
|
@SuppressLint({"InlinedApi"})
|
|
private static final int WRITE_NON_BLOCKING = 1;
|
|
public static boolean enablePreV21AudioSessionWorkaround = false;
|
|
public static boolean failOnSpuriousAudioTimestamp = false;
|
|
private AudioProcessor[] activeAudioProcessors;
|
|
|
|
@Nullable
|
|
private PlaybackParameters afterDrainPlaybackParameters;
|
|
private AudioAttributes audioAttributes;
|
|
|
|
@Nullable
|
|
private final AudioCapabilities audioCapabilities;
|
|
private final AudioProcessorChain audioProcessorChain;
|
|
private int audioSessionId;
|
|
private AudioTrack audioTrack;
|
|
private final AudioTrackPositionTracker audioTrackPositionTracker;
|
|
|
|
@Nullable
|
|
private ByteBuffer avSyncHeader;
|
|
private int bufferSize;
|
|
private int bytesUntilNextAvSync;
|
|
private boolean canApplyPlaybackParameters;
|
|
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
|
|
private int drainingAudioProcessorIndex;
|
|
private final boolean enableConvertHighResIntPcmToFloat;
|
|
private int framesPerEncodedSample;
|
|
private boolean handledEndOfStream;
|
|
|
|
@Nullable
|
|
private ByteBuffer inputBuffer;
|
|
private int inputSampleRate;
|
|
private boolean isInputPcm;
|
|
|
|
@Nullable
|
|
private AudioTrack keepSessionIdAudioTrack;
|
|
private long lastFeedElapsedRealtimeMs;
|
|
|
|
@Nullable
|
|
private AudioSink.Listener listener;
|
|
|
|
@Nullable
|
|
private ByteBuffer outputBuffer;
|
|
private ByteBuffer[] outputBuffers;
|
|
private int outputChannelConfig;
|
|
private int outputEncoding;
|
|
private int outputPcmFrameSize;
|
|
private int outputSampleRate;
|
|
private int pcmFrameSize;
|
|
private PlaybackParameters playbackParameters;
|
|
private final ArrayDeque<PlaybackParametersCheckpoint> playbackParametersCheckpoints;
|
|
private long playbackParametersOffsetUs;
|
|
private long playbackParametersPositionUs;
|
|
private boolean playing;
|
|
private byte[] preV21OutputBuffer;
|
|
private int preV21OutputBufferOffset;
|
|
private boolean processingEnabled;
|
|
private final ConditionVariable releasingConditionVariable;
|
|
private boolean shouldConvertHighResIntPcmToFloat;
|
|
private int startMediaTimeState;
|
|
private long startMediaTimeUs;
|
|
private long submittedEncodedFrames;
|
|
private long submittedPcmBytes;
|
|
private final AudioProcessor[] toFloatPcmAvailableAudioProcessors;
|
|
private final AudioProcessor[] toIntPcmAvailableAudioProcessors;
|
|
private final TrimmingAudioProcessor trimmingAudioProcessor;
|
|
private boolean tunneling;
|
|
private float volume;
|
|
private long writtenEncodedFrames;
|
|
private long writtenPcmBytes;
|
|
|
|
public interface AudioProcessorChain {
|
|
PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters);
|
|
|
|
AudioProcessor[] getAudioProcessors();
|
|
|
|
long getMediaDuration(long j);
|
|
|
|
long getSkippedOutputFrameCount();
|
|
}
|
|
|
|
@Retention(RetentionPolicy.SOURCE)
|
|
public @interface StartMediaTimeState {
|
|
}
|
|
|
|
private AudioProcessor[] getAvailableAudioProcessors() {
|
|
return this.shouldConvertHighResIntPcmToFloat ? this.toFloatPcmAvailableAudioProcessors : this.toIntPcmAvailableAudioProcessors;
|
|
}
|
|
|
|
private boolean isInitialized() {
|
|
return this.audioTrack != null;
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final PlaybackParameters getPlaybackParameters() {
|
|
return this.playbackParameters;
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void handleDiscontinuity() {
|
|
if (this.startMediaTimeState == 1) {
|
|
this.startMediaTimeState = 2;
|
|
}
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void setListener(AudioSink.Listener listener) {
|
|
this.listener = listener;
|
|
}
|
|
|
|
public static final class InvalidAudioTrackTimestampException extends RuntimeException {
|
|
private InvalidAudioTrackTimestampException(String str) {
|
|
super(str);
|
|
}
|
|
}
|
|
|
|
public static class DefaultAudioProcessorChain implements AudioProcessorChain {
|
|
private final AudioProcessor[] audioProcessors;
|
|
private final SilenceSkippingAudioProcessor silenceSkippingAudioProcessor;
|
|
private final SonicAudioProcessor sonicAudioProcessor;
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.DefaultAudioSink.AudioProcessorChain
|
|
public AudioProcessor[] getAudioProcessors() {
|
|
return this.audioProcessors;
|
|
}
|
|
|
|
public DefaultAudioProcessorChain(AudioProcessor... audioProcessorArr) {
|
|
AudioProcessor[] audioProcessorArr2 = (AudioProcessor[]) Arrays.copyOf(audioProcessorArr, audioProcessorArr.length + 2);
|
|
this.audioProcessors = audioProcessorArr2;
|
|
SilenceSkippingAudioProcessor silenceSkippingAudioProcessor = new SilenceSkippingAudioProcessor();
|
|
this.silenceSkippingAudioProcessor = silenceSkippingAudioProcessor;
|
|
SonicAudioProcessor sonicAudioProcessor = new SonicAudioProcessor();
|
|
this.sonicAudioProcessor = sonicAudioProcessor;
|
|
audioProcessorArr2[audioProcessorArr.length] = silenceSkippingAudioProcessor;
|
|
audioProcessorArr2[audioProcessorArr.length + 1] = sonicAudioProcessor;
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.DefaultAudioSink.AudioProcessorChain
|
|
public PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters) {
|
|
this.silenceSkippingAudioProcessor.setEnabled(playbackParameters.skipSilence);
|
|
return new PlaybackParameters(this.sonicAudioProcessor.setSpeed(playbackParameters.speed), this.sonicAudioProcessor.setPitch(playbackParameters.pitch), playbackParameters.skipSilence);
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.DefaultAudioSink.AudioProcessorChain
|
|
public long getMediaDuration(long j) {
|
|
return this.sonicAudioProcessor.scaleDurationForSpeedup(j);
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.DefaultAudioSink.AudioProcessorChain
|
|
public long getSkippedOutputFrameCount() {
|
|
return this.silenceSkippingAudioProcessor.getSkippedFrames();
|
|
}
|
|
}
|
|
|
|
public DefaultAudioSink(@Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessorArr) {
|
|
this(audioCapabilities, audioProcessorArr, false);
|
|
}
|
|
|
|
public DefaultAudioSink(@Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessorArr, boolean z) {
|
|
this(audioCapabilities, new DefaultAudioProcessorChain(audioProcessorArr), z);
|
|
}
|
|
|
|
public DefaultAudioSink(@Nullable AudioCapabilities audioCapabilities, AudioProcessorChain audioProcessorChain, boolean z) {
|
|
this.audioCapabilities = audioCapabilities;
|
|
this.audioProcessorChain = (AudioProcessorChain) Assertions.checkNotNull(audioProcessorChain);
|
|
this.enableConvertHighResIntPcmToFloat = z;
|
|
this.releasingConditionVariable = new ConditionVariable(true);
|
|
this.audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
|
|
ChannelMappingAudioProcessor channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
|
|
this.channelMappingAudioProcessor = channelMappingAudioProcessor;
|
|
TrimmingAudioProcessor trimmingAudioProcessor = new TrimmingAudioProcessor();
|
|
this.trimmingAudioProcessor = trimmingAudioProcessor;
|
|
ArrayList arrayList = new ArrayList();
|
|
Collections.addAll(arrayList, new ResamplingAudioProcessor(), channelMappingAudioProcessor, trimmingAudioProcessor);
|
|
Collections.addAll(arrayList, audioProcessorChain.getAudioProcessors());
|
|
this.toIntPcmAvailableAudioProcessors = (AudioProcessor[]) arrayList.toArray(new AudioProcessor[arrayList.size()]);
|
|
this.toFloatPcmAvailableAudioProcessors = new AudioProcessor[]{new FloatResamplingAudioProcessor()};
|
|
this.volume = 1.0f;
|
|
this.startMediaTimeState = 0;
|
|
this.audioAttributes = AudioAttributes.DEFAULT;
|
|
this.audioSessionId = 0;
|
|
this.playbackParameters = PlaybackParameters.DEFAULT;
|
|
this.drainingAudioProcessorIndex = -1;
|
|
this.activeAudioProcessors = new AudioProcessor[0];
|
|
this.outputBuffers = new ByteBuffer[0];
|
|
this.playbackParametersCheckpoints = new ArrayDeque<>();
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final boolean isEncodingSupported(int i) {
|
|
if (Util.isEncodingPcm(i)) {
|
|
return i != 4 || Util.SDK_INT >= 21;
|
|
}
|
|
AudioCapabilities audioCapabilities = this.audioCapabilities;
|
|
return audioCapabilities != null && audioCapabilities.supportsEncoding(i);
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final long getCurrentPositionUs(boolean z) {
|
|
if (!isInitialized() || this.startMediaTimeState == 0) {
|
|
return Long.MIN_VALUE;
|
|
}
|
|
return this.startMediaTimeUs + applySkipping(applySpeedup(Math.min(this.audioTrackPositionTracker.getCurrentPositionUs(z), framesToDurationUs(getWrittenFrames()))));
|
|
}
|
|
|
|
/* JADX WARN: Removed duplicated region for block: B:103:0x012b */
|
|
/* JADX WARN: Removed duplicated region for block: B:106:0x0090 */
|
|
/* JADX WARN: Removed duplicated region for block: B:22:0x004b */
|
|
/* JADX WARN: Removed duplicated region for block: B:41:0x009b */
|
|
/* JADX WARN: Removed duplicated region for block: B:43:0x00b2 */
|
|
/* JADX WARN: Removed duplicated region for block: B:44:0x00b5 */
|
|
/* JADX WARN: Removed duplicated region for block: B:45:0x00b8 */
|
|
/* JADX WARN: Removed duplicated region for block: B:46:0x00ba */
|
|
/* JADX WARN: Removed duplicated region for block: B:47:0x00bd */
|
|
/* JADX WARN: Removed duplicated region for block: B:48:0x00c0 */
|
|
/* JADX WARN: Removed duplicated region for block: B:49:0x00c3 */
|
|
/* JADX WARN: Removed duplicated region for block: B:60:0x00e8 */
|
|
/* JADX WARN: Removed duplicated region for block: B:63:0x00f0 */
|
|
/* JADX WARN: Removed duplicated region for block: B:69:0x0104 */
|
|
/* JADX WARN: Removed duplicated region for block: B:80:0x0126 */
|
|
/* JADX WARN: Removed duplicated region for block: B:83:0x0130 */
|
|
/* JADX WARN: Removed duplicated region for block: B:85:0x0133 */
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
/*
|
|
Code decompiled incorrectly, please refer to instructions dump.
|
|
To view partially-correct add '--show-bad-code' argument
|
|
*/
|
|
public final void configure(int r18, int r19, int r20, int r21, @androidx.annotation.Nullable int[] r22, int r23, int r24) throws com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink.ConfigurationException {
|
|
/*
|
|
Method dump skipped, instructions count: 410
|
|
To view this dump add '--comments-level debug' option
|
|
*/
|
|
throw new UnsupportedOperationException("Method not decompiled: com.mbridge.msdk.playercommon.exoplayer2.audio.DefaultAudioSink.configure(int, int, int, int, int[], int, int):void");
|
|
}
|
|
|
|
private void setupAudioProcessors() {
|
|
ArrayList arrayList = new ArrayList();
|
|
for (AudioProcessor audioProcessor : getAvailableAudioProcessors()) {
|
|
if (audioProcessor.isActive()) {
|
|
arrayList.add(audioProcessor);
|
|
} else {
|
|
audioProcessor.flush();
|
|
}
|
|
}
|
|
int size = arrayList.size();
|
|
this.activeAudioProcessors = (AudioProcessor[]) arrayList.toArray(new AudioProcessor[size]);
|
|
this.outputBuffers = new ByteBuffer[size];
|
|
flushAudioProcessors();
|
|
}
|
|
|
|
private void flushAudioProcessors() {
|
|
int i = 0;
|
|
while (true) {
|
|
AudioProcessor[] audioProcessorArr = this.activeAudioProcessors;
|
|
if (i >= audioProcessorArr.length) {
|
|
return;
|
|
}
|
|
AudioProcessor audioProcessor = audioProcessorArr[i];
|
|
audioProcessor.flush();
|
|
this.outputBuffers[i] = audioProcessor.getOutput();
|
|
i++;
|
|
}
|
|
}
|
|
|
|
private void initialize() throws AudioSink.InitializationException {
|
|
PlaybackParameters playbackParameters;
|
|
this.releasingConditionVariable.block();
|
|
AudioTrack initializeAudioTrack = initializeAudioTrack();
|
|
this.audioTrack = initializeAudioTrack;
|
|
int audioSessionId = initializeAudioTrack.getAudioSessionId();
|
|
if (enablePreV21AudioSessionWorkaround && Util.SDK_INT < 21) {
|
|
AudioTrack audioTrack = this.keepSessionIdAudioTrack;
|
|
if (audioTrack != null && audioSessionId != audioTrack.getAudioSessionId()) {
|
|
releaseKeepSessionIdAudioTrack();
|
|
}
|
|
if (this.keepSessionIdAudioTrack == null) {
|
|
this.keepSessionIdAudioTrack = initializeKeepSessionIdAudioTrack(audioSessionId);
|
|
}
|
|
}
|
|
if (this.audioSessionId != audioSessionId) {
|
|
this.audioSessionId = audioSessionId;
|
|
AudioSink.Listener listener = this.listener;
|
|
if (listener != null) {
|
|
listener.onAudioSessionId(audioSessionId);
|
|
}
|
|
}
|
|
if (this.canApplyPlaybackParameters) {
|
|
playbackParameters = this.audioProcessorChain.applyPlaybackParameters(this.playbackParameters);
|
|
} else {
|
|
playbackParameters = PlaybackParameters.DEFAULT;
|
|
}
|
|
this.playbackParameters = playbackParameters;
|
|
setupAudioProcessors();
|
|
this.audioTrackPositionTracker.setAudioTrack(this.audioTrack, this.outputEncoding, this.outputPcmFrameSize, this.bufferSize);
|
|
setVolumeInternal();
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void play() {
|
|
this.playing = true;
|
|
if (isInitialized()) {
|
|
this.audioTrackPositionTracker.start();
|
|
this.audioTrack.play();
|
|
}
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final boolean handleBuffer(ByteBuffer byteBuffer, long j) throws AudioSink.InitializationException, AudioSink.WriteException {
|
|
ByteBuffer byteBuffer2 = this.inputBuffer;
|
|
Assertions.checkArgument(byteBuffer2 == null || byteBuffer == byteBuffer2);
|
|
if (!isInitialized()) {
|
|
initialize();
|
|
if (this.playing) {
|
|
play();
|
|
}
|
|
}
|
|
if (!this.audioTrackPositionTracker.mayHandleBuffer(getWrittenFrames())) {
|
|
return false;
|
|
}
|
|
if (this.inputBuffer == null) {
|
|
if (!byteBuffer.hasRemaining()) {
|
|
return true;
|
|
}
|
|
if (!this.isInputPcm && this.framesPerEncodedSample == 0) {
|
|
int framesPerEncodedSample = getFramesPerEncodedSample(this.outputEncoding, byteBuffer);
|
|
this.framesPerEncodedSample = framesPerEncodedSample;
|
|
if (framesPerEncodedSample == 0) {
|
|
return true;
|
|
}
|
|
}
|
|
if (this.afterDrainPlaybackParameters != null) {
|
|
if (!drainAudioProcessorsToEndOfStream()) {
|
|
return false;
|
|
}
|
|
PlaybackParameters playbackParameters = this.afterDrainPlaybackParameters;
|
|
this.afterDrainPlaybackParameters = null;
|
|
this.playbackParametersCheckpoints.add(new PlaybackParametersCheckpoint(this.audioProcessorChain.applyPlaybackParameters(playbackParameters), Math.max(0L, j), framesToDurationUs(getWrittenFrames())));
|
|
setupAudioProcessors();
|
|
}
|
|
if (this.startMediaTimeState == 0) {
|
|
this.startMediaTimeUs = Math.max(0L, j);
|
|
this.startMediaTimeState = 1;
|
|
} else {
|
|
long inputFramesToDurationUs = this.startMediaTimeUs + inputFramesToDurationUs(getSubmittedFrames());
|
|
if (this.startMediaTimeState == 1 && Math.abs(inputFramesToDurationUs - j) > 200000) {
|
|
Log.e(TAG, "Discontinuity detected [expected " + inputFramesToDurationUs + ", got " + j + v8.i.e);
|
|
this.startMediaTimeState = 2;
|
|
}
|
|
if (this.startMediaTimeState == 2) {
|
|
this.startMediaTimeUs += j - inputFramesToDurationUs;
|
|
this.startMediaTimeState = 1;
|
|
AudioSink.Listener listener = this.listener;
|
|
if (listener != null) {
|
|
listener.onPositionDiscontinuity();
|
|
}
|
|
}
|
|
}
|
|
if (this.isInputPcm) {
|
|
this.submittedPcmBytes += byteBuffer.remaining();
|
|
} else {
|
|
this.submittedEncodedFrames += this.framesPerEncodedSample;
|
|
}
|
|
this.inputBuffer = byteBuffer;
|
|
}
|
|
if (this.processingEnabled) {
|
|
processBuffers(j);
|
|
} else {
|
|
writeBuffer(this.inputBuffer, j);
|
|
}
|
|
if (!this.inputBuffer.hasRemaining()) {
|
|
this.inputBuffer = null;
|
|
return true;
|
|
}
|
|
if (!this.audioTrackPositionTracker.isStalled(getWrittenFrames())) {
|
|
return false;
|
|
}
|
|
Log.w(TAG, "Resetting stalled audio track");
|
|
reset();
|
|
return true;
|
|
}
|
|
|
|
private void processBuffers(long j) throws AudioSink.WriteException {
|
|
ByteBuffer byteBuffer;
|
|
int length = this.activeAudioProcessors.length;
|
|
int i = length;
|
|
while (i >= 0) {
|
|
if (i > 0) {
|
|
byteBuffer = this.outputBuffers[i - 1];
|
|
} else {
|
|
byteBuffer = this.inputBuffer;
|
|
if (byteBuffer == null) {
|
|
byteBuffer = AudioProcessor.EMPTY_BUFFER;
|
|
}
|
|
}
|
|
if (i == length) {
|
|
writeBuffer(byteBuffer, j);
|
|
} else {
|
|
AudioProcessor audioProcessor = this.activeAudioProcessors[i];
|
|
audioProcessor.queueInput(byteBuffer);
|
|
ByteBuffer output = audioProcessor.getOutput();
|
|
this.outputBuffers[i] = output;
|
|
if (output.hasRemaining()) {
|
|
i++;
|
|
}
|
|
}
|
|
if (byteBuffer.hasRemaining()) {
|
|
return;
|
|
} else {
|
|
i--;
|
|
}
|
|
}
|
|
}
|
|
|
|
private void writeBuffer(ByteBuffer byteBuffer, long j) throws AudioSink.WriteException {
|
|
if (byteBuffer.hasRemaining()) {
|
|
ByteBuffer byteBuffer2 = this.outputBuffer;
|
|
int i = 0;
|
|
if (byteBuffer2 != null) {
|
|
Assertions.checkArgument(byteBuffer2 == byteBuffer);
|
|
} else {
|
|
this.outputBuffer = byteBuffer;
|
|
if (Util.SDK_INT < 21) {
|
|
int remaining = byteBuffer.remaining();
|
|
byte[] bArr = this.preV21OutputBuffer;
|
|
if (bArr == null || bArr.length < remaining) {
|
|
this.preV21OutputBuffer = new byte[remaining];
|
|
}
|
|
int position = byteBuffer.position();
|
|
byteBuffer.get(this.preV21OutputBuffer, 0, remaining);
|
|
byteBuffer.position(position);
|
|
this.preV21OutputBufferOffset = 0;
|
|
}
|
|
}
|
|
int remaining2 = byteBuffer.remaining();
|
|
if (Util.SDK_INT < 21) {
|
|
int availableBufferSize = this.audioTrackPositionTracker.getAvailableBufferSize(this.writtenPcmBytes);
|
|
if (availableBufferSize > 0) {
|
|
i = this.audioTrack.write(this.preV21OutputBuffer, this.preV21OutputBufferOffset, Math.min(remaining2, availableBufferSize));
|
|
if (i > 0) {
|
|
this.preV21OutputBufferOffset += i;
|
|
byteBuffer.position(byteBuffer.position() + i);
|
|
}
|
|
}
|
|
} else if (this.tunneling) {
|
|
Assertions.checkState(j != C.TIME_UNSET);
|
|
i = writeNonBlockingWithAvSyncV21(this.audioTrack, byteBuffer, remaining2, j);
|
|
} else {
|
|
i = writeNonBlockingV21(this.audioTrack, byteBuffer, remaining2);
|
|
}
|
|
this.lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
|
|
if (i < 0) {
|
|
throw new AudioSink.WriteException(i);
|
|
}
|
|
boolean z = this.isInputPcm;
|
|
if (z) {
|
|
this.writtenPcmBytes += i;
|
|
}
|
|
if (i == remaining2) {
|
|
if (!z) {
|
|
this.writtenEncodedFrames += this.framesPerEncodedSample;
|
|
}
|
|
this.outputBuffer = null;
|
|
}
|
|
}
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void playToEndOfStream() throws AudioSink.WriteException {
|
|
if (!this.handledEndOfStream && isInitialized() && drainAudioProcessorsToEndOfStream()) {
|
|
this.audioTrackPositionTracker.handleEndOfStream(getWrittenFrames());
|
|
this.audioTrack.stop();
|
|
this.bytesUntilNextAvSync = 0;
|
|
this.handledEndOfStream = true;
|
|
}
|
|
}
|
|
|
|
/* JADX WARN: Removed duplicated region for block: B:10:0x0021 */
|
|
/* JADX WARN: Removed duplicated region for block: B:18:0x0038 */
|
|
/* JADX WARN: Unsupported multi-entry loop pattern (BACK_EDGE: B:17:0x0032 -> B:7:0x0012). Please report as a decompilation issue!!! */
|
|
/*
|
|
Code decompiled incorrectly, please refer to instructions dump.
|
|
To view partially-correct add '--show-bad-code' argument
|
|
*/
|
|
private boolean drainAudioProcessorsToEndOfStream() throws com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink.WriteException {
|
|
/*
|
|
r9 = this;
|
|
int r0 = r9.drainingAudioProcessorIndex
|
|
r1 = 1
|
|
r2 = 0
|
|
r3 = -1
|
|
if (r0 != r3) goto L14
|
|
boolean r0 = r9.processingEnabled
|
|
if (r0 == 0) goto Ld
|
|
r0 = r2
|
|
goto L10
|
|
Ld:
|
|
com.mbridge.msdk.playercommon.exoplayer2.audio.AudioProcessor[] r0 = r9.activeAudioProcessors
|
|
int r0 = r0.length
|
|
L10:
|
|
r9.drainingAudioProcessorIndex = r0
|
|
L12:
|
|
r0 = r1
|
|
goto L15
|
|
L14:
|
|
r0 = r2
|
|
L15:
|
|
int r4 = r9.drainingAudioProcessorIndex
|
|
com.mbridge.msdk.playercommon.exoplayer2.audio.AudioProcessor[] r5 = r9.activeAudioProcessors
|
|
int r6 = r5.length
|
|
r7 = -9223372036854775807(0x8000000000000001, double:-4.9E-324)
|
|
if (r4 >= r6) goto L38
|
|
r4 = r5[r4]
|
|
if (r0 == 0) goto L28
|
|
r4.queueEndOfStream()
|
|
L28:
|
|
r9.processBuffers(r7)
|
|
boolean r0 = r4.isEnded()
|
|
if (r0 != 0) goto L32
|
|
return r2
|
|
L32:
|
|
int r0 = r9.drainingAudioProcessorIndex
|
|
int r0 = r0 + r1
|
|
r9.drainingAudioProcessorIndex = r0
|
|
goto L12
|
|
L38:
|
|
java.nio.ByteBuffer r0 = r9.outputBuffer
|
|
if (r0 == 0) goto L44
|
|
r9.writeBuffer(r0, r7)
|
|
java.nio.ByteBuffer r0 = r9.outputBuffer
|
|
if (r0 == 0) goto L44
|
|
return r2
|
|
L44:
|
|
r9.drainingAudioProcessorIndex = r3
|
|
return r1
|
|
*/
|
|
throw new UnsupportedOperationException("Method not decompiled: com.mbridge.msdk.playercommon.exoplayer2.audio.DefaultAudioSink.drainAudioProcessorsToEndOfStream():boolean");
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final boolean isEnded() {
|
|
return !isInitialized() || (this.handledEndOfStream && !hasPendingData());
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final boolean hasPendingData() {
|
|
return isInitialized() && this.audioTrackPositionTracker.hasPendingData(getWrittenFrames());
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
|
|
if (isInitialized() && !this.canApplyPlaybackParameters) {
|
|
PlaybackParameters playbackParameters2 = PlaybackParameters.DEFAULT;
|
|
this.playbackParameters = playbackParameters2;
|
|
return playbackParameters2;
|
|
}
|
|
PlaybackParameters playbackParameters3 = this.afterDrainPlaybackParameters;
|
|
if (playbackParameters3 == null) {
|
|
playbackParameters3 = !this.playbackParametersCheckpoints.isEmpty() ? this.playbackParametersCheckpoints.getLast().playbackParameters : this.playbackParameters;
|
|
}
|
|
if (!playbackParameters.equals(playbackParameters3)) {
|
|
if (isInitialized()) {
|
|
this.afterDrainPlaybackParameters = playbackParameters;
|
|
} else {
|
|
this.playbackParameters = this.audioProcessorChain.applyPlaybackParameters(playbackParameters);
|
|
}
|
|
}
|
|
return this.playbackParameters;
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void setAudioAttributes(AudioAttributes audioAttributes) {
|
|
if (this.audioAttributes.equals(audioAttributes)) {
|
|
return;
|
|
}
|
|
this.audioAttributes = audioAttributes;
|
|
if (this.tunneling) {
|
|
return;
|
|
}
|
|
reset();
|
|
this.audioSessionId = 0;
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void setAudioSessionId(int i) {
|
|
if (this.audioSessionId != i) {
|
|
this.audioSessionId = i;
|
|
reset();
|
|
}
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void enableTunnelingV21(int i) {
|
|
Assertions.checkState(Util.SDK_INT >= 21);
|
|
if (this.tunneling && this.audioSessionId == i) {
|
|
return;
|
|
}
|
|
this.tunneling = true;
|
|
this.audioSessionId = i;
|
|
reset();
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void disableTunneling() {
|
|
if (this.tunneling) {
|
|
this.tunneling = false;
|
|
this.audioSessionId = 0;
|
|
reset();
|
|
}
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void setVolume(float f) {
|
|
if (this.volume != f) {
|
|
this.volume = f;
|
|
setVolumeInternal();
|
|
}
|
|
}
|
|
|
|
private void setVolumeInternal() {
|
|
if (isInitialized()) {
|
|
if (Util.SDK_INT >= 21) {
|
|
setVolumeInternalV21(this.audioTrack, this.volume);
|
|
} else {
|
|
setVolumeInternalV3(this.audioTrack, this.volume);
|
|
}
|
|
}
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void pause() {
|
|
this.playing = false;
|
|
if (isInitialized() && this.audioTrackPositionTracker.pause()) {
|
|
this.audioTrack.pause();
|
|
}
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void reset() {
|
|
if (isInitialized()) {
|
|
this.submittedPcmBytes = 0L;
|
|
this.submittedEncodedFrames = 0L;
|
|
this.writtenPcmBytes = 0L;
|
|
this.writtenEncodedFrames = 0L;
|
|
this.framesPerEncodedSample = 0;
|
|
PlaybackParameters playbackParameters = this.afterDrainPlaybackParameters;
|
|
if (playbackParameters != null) {
|
|
this.playbackParameters = playbackParameters;
|
|
this.afterDrainPlaybackParameters = null;
|
|
} else if (!this.playbackParametersCheckpoints.isEmpty()) {
|
|
this.playbackParameters = this.playbackParametersCheckpoints.getLast().playbackParameters;
|
|
}
|
|
this.playbackParametersCheckpoints.clear();
|
|
this.playbackParametersOffsetUs = 0L;
|
|
this.playbackParametersPositionUs = 0L;
|
|
this.inputBuffer = null;
|
|
this.outputBuffer = null;
|
|
flushAudioProcessors();
|
|
this.handledEndOfStream = false;
|
|
this.drainingAudioProcessorIndex = -1;
|
|
this.avSyncHeader = null;
|
|
this.bytesUntilNextAvSync = 0;
|
|
this.startMediaTimeState = 0;
|
|
if (this.audioTrackPositionTracker.isPlaying()) {
|
|
this.audioTrack.pause();
|
|
}
|
|
final AudioTrack audioTrack = this.audioTrack;
|
|
this.audioTrack = null;
|
|
this.audioTrackPositionTracker.reset();
|
|
this.releasingConditionVariable.close();
|
|
new Thread() { // from class: com.mbridge.msdk.playercommon.exoplayer2.audio.DefaultAudioSink.1
|
|
@Override // java.lang.Thread, java.lang.Runnable
|
|
public void run() {
|
|
try {
|
|
audioTrack.flush();
|
|
audioTrack.release();
|
|
} finally {
|
|
DefaultAudioSink.this.releasingConditionVariable.open();
|
|
}
|
|
}
|
|
}.start();
|
|
}
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioSink
|
|
public final void release() {
|
|
reset();
|
|
releaseKeepSessionIdAudioTrack();
|
|
for (AudioProcessor audioProcessor : this.toIntPcmAvailableAudioProcessors) {
|
|
audioProcessor.reset();
|
|
}
|
|
for (AudioProcessor audioProcessor2 : this.toFloatPcmAvailableAudioProcessors) {
|
|
audioProcessor2.reset();
|
|
}
|
|
this.audioSessionId = 0;
|
|
this.playing = false;
|
|
}
|
|
|
|
private void releaseKeepSessionIdAudioTrack() {
|
|
final AudioTrack audioTrack = this.keepSessionIdAudioTrack;
|
|
if (audioTrack == null) {
|
|
return;
|
|
}
|
|
this.keepSessionIdAudioTrack = null;
|
|
new Thread() { // from class: com.mbridge.msdk.playercommon.exoplayer2.audio.DefaultAudioSink.2
|
|
@Override // java.lang.Thread, java.lang.Runnable
|
|
public void run() {
|
|
audioTrack.release();
|
|
}
|
|
}.start();
|
|
}
|
|
|
|
private long applySpeedup(long j) {
|
|
long j2;
|
|
long mediaDurationForPlayoutDuration;
|
|
PlaybackParametersCheckpoint playbackParametersCheckpoint = null;
|
|
while (!this.playbackParametersCheckpoints.isEmpty() && j >= this.playbackParametersCheckpoints.getFirst().positionUs) {
|
|
playbackParametersCheckpoint = this.playbackParametersCheckpoints.remove();
|
|
}
|
|
if (playbackParametersCheckpoint != null) {
|
|
this.playbackParameters = playbackParametersCheckpoint.playbackParameters;
|
|
this.playbackParametersPositionUs = playbackParametersCheckpoint.positionUs;
|
|
this.playbackParametersOffsetUs = playbackParametersCheckpoint.mediaTimeUs - this.startMediaTimeUs;
|
|
}
|
|
if (this.playbackParameters.speed == 1.0f) {
|
|
return (j + this.playbackParametersOffsetUs) - this.playbackParametersPositionUs;
|
|
}
|
|
if (this.playbackParametersCheckpoints.isEmpty()) {
|
|
j2 = this.playbackParametersOffsetUs;
|
|
mediaDurationForPlayoutDuration = this.audioProcessorChain.getMediaDuration(j - this.playbackParametersPositionUs);
|
|
} else {
|
|
j2 = this.playbackParametersOffsetUs;
|
|
mediaDurationForPlayoutDuration = Util.getMediaDurationForPlayoutDuration(j - this.playbackParametersPositionUs, this.playbackParameters.speed);
|
|
}
|
|
return j2 + mediaDurationForPlayoutDuration;
|
|
}
|
|
|
|
private long applySkipping(long j) {
|
|
return j + framesToDurationUs(this.audioProcessorChain.getSkippedOutputFrameCount());
|
|
}
|
|
|
|
private long inputFramesToDurationUs(long j) {
|
|
return (j * 1000000) / this.inputSampleRate;
|
|
}
|
|
|
|
private long framesToDurationUs(long j) {
|
|
return (j * 1000000) / this.outputSampleRate;
|
|
}
|
|
|
|
private long durationUsToFrames(long j) {
|
|
return (j * this.outputSampleRate) / 1000000;
|
|
}
|
|
|
|
/* JADX INFO: Access modifiers changed from: private */
|
|
public long getSubmittedFrames() {
|
|
return this.isInputPcm ? this.submittedPcmBytes / this.pcmFrameSize : this.submittedEncodedFrames;
|
|
}
|
|
|
|
/* JADX INFO: Access modifiers changed from: private */
|
|
public long getWrittenFrames() {
|
|
return this.isInputPcm ? this.writtenPcmBytes / this.outputPcmFrameSize : this.writtenEncodedFrames;
|
|
}
|
|
|
|
private AudioTrack initializeAudioTrack() throws AudioSink.InitializationException {
|
|
AudioTrack audioTrack;
|
|
if (Util.SDK_INT >= 21) {
|
|
audioTrack = createAudioTrackV21();
|
|
} else {
|
|
int streamTypeForAudioUsage = Util.getStreamTypeForAudioUsage(this.audioAttributes.usage);
|
|
if (this.audioSessionId == 0) {
|
|
audioTrack = new AudioTrack(streamTypeForAudioUsage, this.outputSampleRate, this.outputChannelConfig, this.outputEncoding, this.bufferSize, 1);
|
|
} else {
|
|
audioTrack = new AudioTrack(streamTypeForAudioUsage, this.outputSampleRate, this.outputChannelConfig, this.outputEncoding, this.bufferSize, 1, this.audioSessionId);
|
|
}
|
|
}
|
|
int state = audioTrack.getState();
|
|
if (state == 1) {
|
|
return audioTrack;
|
|
}
|
|
try {
|
|
audioTrack.release();
|
|
} catch (Exception unused) {
|
|
}
|
|
throw new AudioSink.InitializationException(state, this.outputSampleRate, this.outputChannelConfig, this.bufferSize);
|
|
}
|
|
|
|
@TargetApi(21)
|
|
private AudioTrack createAudioTrackV21() {
|
|
android.media.AudioAttributes audioAttributesV21;
|
|
if (this.tunneling) {
|
|
audioAttributesV21 = new AudioAttributes.Builder().setContentType(3).setFlags(16).setUsage(1).build();
|
|
} else {
|
|
audioAttributesV21 = this.audioAttributes.getAudioAttributesV21();
|
|
}
|
|
android.media.AudioAttributes audioAttributes = audioAttributesV21;
|
|
AudioFormat build = new AudioFormat.Builder().setChannelMask(this.outputChannelConfig).setEncoding(this.outputEncoding).setSampleRate(this.outputSampleRate).build();
|
|
int i = this.audioSessionId;
|
|
if (i == 0) {
|
|
i = 0;
|
|
}
|
|
return new AudioTrack(audioAttributes, build, this.bufferSize, 1, i);
|
|
}
|
|
|
|
private AudioTrack initializeKeepSessionIdAudioTrack(int i) {
|
|
return new AudioTrack(3, 4000, 4, 2, 2, 0, i);
|
|
}
|
|
|
|
private static int getFramesPerEncodedSample(int i, ByteBuffer byteBuffer) {
|
|
if (i == 7 || i == 8) {
|
|
return DtsUtil.parseDtsAudioSampleCount(byteBuffer);
|
|
}
|
|
if (i == 5) {
|
|
return Ac3Util.getAc3SyncframeAudioSampleCount();
|
|
}
|
|
if (i == 6) {
|
|
return Ac3Util.parseEAc3SyncframeAudioSampleCount(byteBuffer);
|
|
}
|
|
if (i == 14) {
|
|
int findTrueHdSyncframeOffset = Ac3Util.findTrueHdSyncframeOffset(byteBuffer);
|
|
if (findTrueHdSyncframeOffset == -1) {
|
|
return 0;
|
|
}
|
|
return Ac3Util.parseTrueHdSyncframeAudioSampleCount(byteBuffer, findTrueHdSyncframeOffset) * 16;
|
|
}
|
|
throw new IllegalStateException("Unexpected audio encoding: " + i);
|
|
}
|
|
|
|
@TargetApi(21)
|
|
private static int writeNonBlockingV21(AudioTrack audioTrack, ByteBuffer byteBuffer, int i) {
|
|
return audioTrack.write(byteBuffer, i, 1);
|
|
}
|
|
|
|
@TargetApi(21)
|
|
private int writeNonBlockingWithAvSyncV21(AudioTrack audioTrack, ByteBuffer byteBuffer, int i, long j) {
|
|
if (this.avSyncHeader == null) {
|
|
ByteBuffer allocate = ByteBuffer.allocate(16);
|
|
this.avSyncHeader = allocate;
|
|
allocate.order(ByteOrder.BIG_ENDIAN);
|
|
this.avSyncHeader.putInt(1431633921);
|
|
}
|
|
if (this.bytesUntilNextAvSync == 0) {
|
|
this.avSyncHeader.putInt(4, i);
|
|
this.avSyncHeader.putLong(8, j * 1000);
|
|
this.avSyncHeader.position(0);
|
|
this.bytesUntilNextAvSync = i;
|
|
}
|
|
int remaining = this.avSyncHeader.remaining();
|
|
if (remaining > 0) {
|
|
int write = audioTrack.write(this.avSyncHeader, remaining, 1);
|
|
if (write < 0) {
|
|
this.bytesUntilNextAvSync = 0;
|
|
return write;
|
|
}
|
|
if (write < remaining) {
|
|
return 0;
|
|
}
|
|
}
|
|
int writeNonBlockingV21 = writeNonBlockingV21(audioTrack, byteBuffer, i);
|
|
if (writeNonBlockingV21 < 0) {
|
|
this.bytesUntilNextAvSync = 0;
|
|
return writeNonBlockingV21;
|
|
}
|
|
this.bytesUntilNextAvSync -= writeNonBlockingV21;
|
|
return writeNonBlockingV21;
|
|
}
|
|
|
|
@TargetApi(21)
|
|
private static void setVolumeInternalV21(AudioTrack audioTrack, float f) {
|
|
audioTrack.setVolume(f);
|
|
}
|
|
|
|
private static void setVolumeInternalV3(AudioTrack audioTrack, float f) {
|
|
audioTrack.setStereoVolume(f, f);
|
|
}
|
|
|
|
public static final class PlaybackParametersCheckpoint {
|
|
private final long mediaTimeUs;
|
|
private final PlaybackParameters playbackParameters;
|
|
private final long positionUs;
|
|
|
|
private PlaybackParametersCheckpoint(PlaybackParameters playbackParameters, long j, long j2) {
|
|
this.playbackParameters = playbackParameters;
|
|
this.mediaTimeUs = j;
|
|
this.positionUs = j2;
|
|
}
|
|
}
|
|
|
|
public final class PositionTrackerListener implements AudioTrackPositionTracker.Listener {
|
|
private PositionTrackerListener() {
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioTrackPositionTracker.Listener
|
|
public final void onPositionFramesMismatch(long j, long j2, long j3, long j4) {
|
|
String str = "Spurious audio timestamp (frame position mismatch): " + j + ", " + j2 + ", " + j3 + ", " + j4 + ", " + DefaultAudioSink.this.getSubmittedFrames() + ", " + DefaultAudioSink.this.getWrittenFrames();
|
|
if (DefaultAudioSink.failOnSpuriousAudioTimestamp) {
|
|
throw new InvalidAudioTrackTimestampException(str);
|
|
}
|
|
Log.w(DefaultAudioSink.TAG, str);
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioTrackPositionTracker.Listener
|
|
public final void onSystemTimeUsMismatch(long j, long j2, long j3, long j4) {
|
|
String str = "Spurious audio timestamp (system clock mismatch): " + j + ", " + j2 + ", " + j3 + ", " + j4 + ", " + DefaultAudioSink.this.getSubmittedFrames() + ", " + DefaultAudioSink.this.getWrittenFrames();
|
|
if (DefaultAudioSink.failOnSpuriousAudioTimestamp) {
|
|
throw new InvalidAudioTrackTimestampException(str);
|
|
}
|
|
Log.w(DefaultAudioSink.TAG, str);
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioTrackPositionTracker.Listener
|
|
public final void onInvalidLatency(long j) {
|
|
Log.w(DefaultAudioSink.TAG, "Ignoring impossibly large audio latency: " + j);
|
|
}
|
|
|
|
@Override // com.mbridge.msdk.playercommon.exoplayer2.audio.AudioTrackPositionTracker.Listener
|
|
public final void onUnderrun(int i, long j) {
|
|
if (DefaultAudioSink.this.listener != null) {
|
|
DefaultAudioSink.this.listener.onUnderrun(i, j, SystemClock.elapsedRealtime() - DefaultAudioSink.this.lastFeedElapsedRealtimeMs);
|
|
}
|
|
}
|
|
}
|
|
}
|