private boolean canWriteIn = false; // 用来放弃前面一部分数据
private class AudioRecordThread extends Thread {
AudioRecord aRecord;
int bufferSize = 10240;
boolean createWav = false;
AutomaticGainControlUtils agcUtils = new AutomaticGainControlUtils();
NoiseSuppressorUtils nsUtils;
long nsxId = 0;
int nsxInit = 0;
int nexSetPolicy = 0;
long agcId = 0;
int agcInitResult = 0;
int agcSetConfigResult = 0;
byte[] tmpPcmArr = new byte[320];
short[] inputArr = new short[160];
short[] agcOutputArr = new short[160];
short[] nsOutputArr = new short[160];
ByteBuffer bufferCvt = ByteBuffer.allocate(2);
AudioRecordThread(boolean createWav) {
this.createWav = createWav;
bufferSize = AudioRecord.getMinBufferSize(AUDIO_FREQUENCY,
RECORD_CHANNEL_CONFIG, AUDIO_ENCODING) * RECORD_AUDIO_BUFFER_TIMES;
Log.d(TAG, "record buffer size = " + bufferSize); // e.g 1280
aRecord = new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION, AUDIO_FREQUENCY,
RECORD_CHANNEL_CONFIG, AUDIO_ENCODING, bufferSize);
if (enableWebRTCAGC) {
bufferCvt.order(ByteOrder.LITTLE_ENDIAN);
nsUtils = new NoiseSuppressorUtils();
nsxId = nsUtils.nsxCreate();
nsxInit = nsUtils.nsxInit(nsxId, 16000);
nexSetPolicy = nsUtils.nsxSetPolicy(nsxId, 2);
agcUtils = new AutomaticGainControlUtils();
agcId = agcUtils.agcCreate();
agcInitResult = agcUtils.agcInit(agcId, 0, 255, 3, 16000);
agcSetConfigResult = agcUtils.agcSetConfig(agcId, (short) 3, (short) 20, true);
Log.d(TAG, "[rec] agcInitResult: " + agcInitResult + ", agcSetConfigResult: " + agcSetConfigResult);
} else {
if (AutomaticGainControl.isAvailable()) {
AutomaticGainControl agc = AutomaticGainControl.create(
aRecord.getAudioSessionId());
if (agc != null) {
agc.setEnabled(true);
} else {
}
} else {
Log.w(TAG, "AudioRecordThread: 不支持自动增益AutomaticGainControl");
}
}
}
@Override
public void run() {
state = WindState.RECORDING;
notifyState(state);
Log.d(TAG, "录制开始");
try {
FileOutputStream wavFos = new FileOutputStream(tmpWavFile);
if (createWav) {
byte[] zeroHeader = new byte[44]; // 占位置
wavFos.write(zeroHeader);
}
aRecord.startRecording();
byte[] byteBuffer = new byte[bufferSize];
while (state.equals(WindState.RECORDING) && !isInterrupted()) {
int startOff = 0;
int end = aRecord.read(byteBuffer, 0, byteBuffer.length);
if (!canWriteIn) {
continue;
}
if (enableWebRTCAGC && byteBuffer.length > 320) {
for (int i = 0; i < byteBuffer.length; i += 320) {
System.arraycopy(byteBuffer, i, tmpPcmArr, 0, 320);
ByteBuffer.wrap(tmpPcmArr).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(inputArr);
nsUtils.nsxProcess(nsxId, inputArr, 1, nsOutputArr);
agcUtils.agcProcess(
agcId, nsOutputArr, 1, 160, agcOutputArr,
0, 0, 0, false
);
for (int j = 0; j < agcOutputArr.length; j++) {
bufferCvt.clear();
bufferCvt.putShort(agcOutputArr[j]);
byteBuffer[i + j * 2] = bufferCvt.get(0);
byteBuffer[i + j * 2 + 1] = bufferCvt.get(1);
}
}
}
if (createWav) {
wavFos.write(byteBuffer, startOff, end - startOff);
wavFos.flush();
}
float vol = calVolumeInByte(byteBuffer);
if (onVolListener != null) {
onVolListener.onVol(vol);
}
}
aRecord.stop(); // 录制结束
try {
aRecord.release();
} catch (Exception e) {
Log.e(TAG, "run: ", e);
}
wavFos.close();
if (createWav) {
// 修改header
RandomAccessFile wavRaf = new RandomAccessFile(tmpWavFile, "rw");
byte[] header = generateWavFileHeader(tmpWavFile.length() - 44, AUDIO_FREQUENCY, aRecord.getChannelCount());
wavRaf.seek(0);
wavRaf.write(header);
wavRaf.close();
Log.d(TAG, "tmpWavFile.length: " + tmpWavFile.length());
}
} catch (Exception e) {
Log.e(TAG, "AudioRecordThread:", e);
notifyState(WindState.ERROR);
}
notifyState(state);
state = WindState.IDLE;
notifyState(state);
Log.d(TAG, "录制结束");
}
}