Я пытаюсь записать свой вокал, а затем объединить его с аудиофайлом вместе, используя OPENSL ES Library.Я нашел этот пример GitHub под названием Native-Audio .Это объединяет две аудио.Но фоновый аудиофайл воспроизводится намного быстрее, чем фактическая скорость в конечном выводе.
Пожалуйста, используйте наушники, чтобы заметить разницу.
Примеры ссылок: До и После
Кроме того, он использует файлы только из папки ресурсов.Как я могу вручную выбрать файлы MP3 из файлового менеджера?
private void mixAudio(){
try {
if (!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) ||
!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED))
{
// Show rationale and request permission.
ActivityCompat.requestPermissions(this,
new String[]{android.Manifest.permission.READ_EXTERNAL_STORAGE, android.Manifest.permission.WRITE_EXTERNAL_STORAGE},
1000);
}
else {
buttonMix.setEnabled(false);
buttonMix.setText("MIXING....");
textViewMixPath.setText("");
buttonPlay.setEnabled(false);
buttonRecord.setEnabled(false);
buttonStart.setEnabled(false);
listView.setEnabled(false);
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try{
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
//File file = new File(baseDir + "/mix.wav");
String path = Environment.getExternalStorageDirectory().getPath() + "/VocalRecorder";
File fileParent = new File(path);
if (!fileParent.exists()){
fileParent.mkdir();
}
final File file = new File(fileParent.getPath() + "/mix.wav");
//String author = getApplicationContext().getPackageName() + ".provider";
//Uri videoUri = FileProvider.get(this, author, mediaFile);
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//MediaMuxer muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
String beat = beats[selectedBeat];
//beat = beat.replace(".wav", ".mp3");
AssetFileDescriptor afd = getAssets().openFd(beat);
MediaCodec codec = null;
//ByteBuffer outputBuffer;
//short[] data; // data for the AudioTrack playback
//int outputBufferIndex = -1;
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
final long duration = Long.parseLong(durationStr);
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
// right now I am pointing to a URI but I have tested that both will
// play the media file using MediaPlayer
int sampleRate = 0;
int numChannels = 0;
int dstIndex = -1;
int numTracks = extractor.getTrackCount(); //This says 1
for (int i = 0; i < numTracks; ++i) { // so this will just run once
MediaFormat format = extractor.getTrackFormat(i); // getting info so it looks good so far
String mime = format.getString(MediaFormat.KEY_MIME); // "audio/mpeg"
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, null, 0);
//format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
//dstIndex = muxer.addTrack(format);
//writer.setFrameRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
//writer.setSamplesPerFrame(format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
//writer.setBitsPerSample(16);
sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
break;
}
}
// Calculate the number of frames required for specified duration
long numFrames = (long)(duration * sampleRate/1000);
// Create a wav file with the name specified as the first argument
WavFile wavFile = WavFile.newWavFile(file, numChannels, numFrames, 16, sampleRate);
if (codec == null) {
throw new IllegalArgumentException("No decoder for file format");
}
//ByteBuffer[] inputBuffers = decoder.getInputBuffers();
//ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
/*
Boolean eosReceived = false;
while (!eosReceived) {
int inIndex = decoder.dequeueInputBuffer(1000);
if (inIndex >= 0) {
ByteBuffer buffer = decoder.getInputBuffer(inIndex);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
// We shouldn't stop the playback at this point, just pass the EOS
// flag to mDecoder, we will get it again from the
// dequeueOutputBuffer
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
int outIndex = decoder.dequeueOutputBuffer(info, 1000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
//outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = decoder.getOutputFormat();
Log.d("DecodeActivity", "New format " + format);
//audioTrack.setPlaybackRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer outBuffer = decoder.getOutputBuffer(outIndex);
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outBuffer);
final byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
//audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
decoder.releaseOutputBuffer(outIndex, false);
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
}
*/
short recordedData[] = recordedData();
int recordMixStartIndex = -1;
//muxer.start();
codec.start();
Boolean sawInputEOS = false;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo infoMux = new MediaCodec.BufferInfo();
int count = 0;
while (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_US);
Log.i(LOG_TAG, "inputBufIndex : " + inputBufIndex);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codec.getInputBuffer(inputBufIndex);
int sampleSize = extractor.readSampleData(dstBuf, 0);
Log.i(LOG_TAG, "sampleSize : " + sampleSize);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.i(LOG_TAG, "Saw input end of stream!");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
Log.i(LOG_TAG, "presentationTimeUs " + presentationTimeUs);
}
codec.queueInputBuffer(inputBufIndex,
0, //offset
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
Log.i(LOG_TAG, "extractor.advance()");
extractor.advance();
}
}
final int res = codec.dequeueOutputBuffer(info, TIMEOUT_US);
if (res >= 0) {
int outputBufIndex = res;
ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
//final byte[] chunk = new byte[info.size];
//buf.get(chunk); // Read the buffer all at once
short[] shortArray = new short[info.size/2];
buf.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shortArray);
buf.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
if (shortArray.length > 0) {
//mAudioTrack.write(chunk, 0, chunk.length);
//infoMux.presentationTimeUs = info.presentationTimeUs;
//infoMux.flags = info.flags;
//muxer.writeSampleData(dstIndex, ByteBuffer.wrap(chunk),
// infoMux);
long []longData = new long[shortArray.length];
// Merge data with vocal
// Calculate the time
final long bufferTimer = info.presentationTimeUs/1000;
int vocalCount = 0;
for (int i = 0; i < shortArray.length; i ++) {
//writer.writeShortLittle(shortArray[i]);
long offsetTime = i*1000/(sampleRate*2); // 2 channels
Boolean mixed = false;
if ((offsetTime + bufferTimer > recordStartTime) && (offsetTime + bufferTimer <= recordStopTime + 500)){
if (recordMixStartIndex == -1){
recordMixStartIndex = 0;
}
if (recordMixStartIndex < recordedData.length){
//Log.i("TAG", "############ mix record data: " + recordMixStartIndex);
longData[i] = TPMixSamples((int)(recordedData[recordMixStartIndex]), (int)shortArray[i]/3);
if (vocalCount >= 3) {
recordMixStartIndex++;
vocalCount = 0;
}
else{
vocalCount ++;
}
mixed = true;
}
}
else {
// All done, set sawInputEOS to stop mixing
if (bufferTimer > recordStopTime + 500){
sawInputEOS = true;
}
}
if (!mixed) {
longData[i] = shortArray[i];
}
}
Log.i("TAG", "############ write frames: " + longData.length/2);
wavFile.writeFrames(longData, longData.length/2);
count ++;
if (count % 5 == 0){
runOnUiThread(new Runnable() {
@Override
public void run() {
long percent = bufferTimer*100/duration;
buttonMix.setText("MIXING..." + percent + "%");
}
});
}
}
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawInputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//codecOutputBuffers = codec.getOutputBuffers();
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
final MediaFormat oformat = codec.getOutputFormat();
Log.d(LOG_TAG, "Output format has changed to " + oformat);
//mAudioTrack.setPlaybackRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
}
}
// Close the wavFile
wavFile.close();
// muxer.stop();
// muxer.release();
codec.stop();
codec.release();
extractor.release();
runOnUiThread(new Runnable() {
@Override
public void run() {
buttonMix.setText("MIX DONE");
buttonPlay.setEnabled(true);
buttonRecord.setEnabled(true);
textViewMixPath.setText(file.getPath());
buttonStart.setEnabled(true);
listView.setEnabled(true);
}
});
}
catch (Exception e){
}
}
});
thread.start();
}
}
catch (Exception e){
e.printStackTrace();
}
}
private final int INT16_MIN = - 32768;
private final int INT16_MAX = 32767;
private long TPMixSamples(int a, int b) {
if (a > INT16_MAX) {a = INT16_MAX;}
if (a < INT16_MIN) {a = INT16_MIN;}
return
// If both samples are negative, mixed signal must have an amplitude between the lesser of A and B, and the minimum permissible negative amplitude
a < 0 && b < 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MIN) :
// If both samples are positive, mixed signal must have an amplitude between the greater of A and B, and the maximum permissible positive amplitude
( a > 0 && b > 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MAX)
// If samples are on opposite sides of the 0-crossing, mixed signal should reflect that samples cancel each other out somewhat
:
a + b);
}
/** Native methods, implemented in jni folder */
public static native void createEngine();
public static native void createBufferQueueAudioPlayer(int sampleRate, int samplesPerBuf);
/////
public static native boolean createAssetAudioPlayer(AssetManager assetManager, String filename);
// true == PLAYING, false == PAUSED
public static native void setPlayingAssetAudioPlayer(boolean isPlaying);
public static native int getDurationAssetAudioPlayer();
public static native int getCurrentPositionAssetAudioPlayer();
//////
public static native boolean createUriAudioPlayer(String uri);
public static native void setPlayingUriAudioPlayer(boolean isPlaying);
public static native void setLoopingUriAudioPlayer(boolean isLooping);
public static native void setChannelMuteUriAudioPlayer(int chan, boolean mute);
public static native void setChannelSoloUriAudioPlayer(int chan, boolean solo);
public static native int getNumChannelsUriAudioPlayer();
public static native void setVolumeUriAudioPlayer(int millibel);
public static native void setMuteUriAudioPlayer(boolean mute);
public static native void enableStereoPositionUriAudioPlayer(boolean enable);
public static native void setStereoPositionUriAudioPlayer(int permille);
public static native boolean selectClip(int which, int count);
public static native void stopClip();
public static native boolean enableReverb(boolean enabled);
public static native boolean createAudioRecorder();
public static native void startRecording();
public static native void stopRecording();
public static native void pauseRecording();
public static native void resumeRecording();
public static native short[] recordedData();
public static native double recordedDuration();
public static native void shutdown();
/** Load jni .so on initialization */
static {
System.loadLibrary("native-audio-jni");
}