オーディオを録音してから、aac オーディオと mp4 ビデオ ファイル (ミュート、オーディオなし) をマージし、マージされた mp4 ファイルを共有しようとしています。マージされた mp4 ファイルの共有は、Samsung J2 と Xiaomi で機能しますが、mp4 ファイルは Lenovo、Micromax およびその他の電話では再生されません。テスト結果は、 https ://docs.google.com/spreadsheets/d/1eeJEM- v-smEUzY-bSxwIwFVOsAbv6KT2u3Kz3jdOb8o/edit?usp=sharing 問題が共有または不適切な多重化によるものであり、問題の原因が何かを理解できません。助けてください。
共有用のコードは recordAudio.java にあり、これは次のとおりです。
public void shareVroom(View view) {
// Toast.makeText(this, "Share feature is temporarily disabled", android.widget.Toast.LENGTH_LONG).show();
// Toast.makeText(this, "Share feature is enabled", android.widget.Toast.LENGTH_LONG).show();
// Code commented for UAT
try {
MediaMultiplexer mediaMultiplexer = new MediaMultiplexer();
mediaMultiplexer.startMuxing(this);
Toast.makeText(this, "in share",Toast.LENGTH_SHORT).show();
String shareableFileName = "";
Intent intentShareFile = new Intent(Intent.ACTION_SEND);
shareableFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
shareableFileName += getString(R.string.vroom_video_output_file_name);
File fileWithinMyDir = new File(shareableFileName);
Uri videoUri=Uri.parse(shareableFileName);
if (fileWithinMyDir.exists()) {
intentShareFile.setType("video/mp4");
intentShareFile.putExtra(Intent.EXTRA_STREAM, videoUri);
intentShareFile.putExtra(Intent.EXTRA_SUBJECT, "Listen to my VROOM");
intentShareFile.putExtra(Intent.EXTRA_TEXT, "Vroom attached");
startActivity(Intent.createChooser(intentShareFile, "Share your Vroom with"));
}
} catch (IllegalStateException e) {
e.printStackTrace();
Log.e("tag", e.getMessage(), e);
Toast.makeText(this, "could not shared"+e.getMessage(),Toast.LENGTH_SHORT).show();
}
//TODO:Use event to identify if muxing is done
}
多重化のコード:
public class MediaMultiplexer {
private static final int MAX_SAMPLE_SIZE = 256 * 1024;
public void startMuxing(Context context) {
MediaMuxer muxer = null;
MediaFormat VideoFormat = null;
Resources mResources = context.getResources();
int sourceVideo = R.raw.vid;
String outputVideoFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
outputVideoFileName += context.getString(R.string.vroom_video_output_file_name);
try {
muxer = new MediaMuxer(outputVideoFileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
e.printStackTrace();
}
MediaExtractor extractorVideo = new MediaExtractor();
try {
AssetFileDescriptor srcVideoFd = mResources.openRawResourceFd(sourceVideo);
extractorVideo.setDataSource(srcVideoFd.getFileDescriptor(), srcVideoFd.getStartOffset(), srcVideoFd.getLength());
int tracks = extractorVideo.getTrackCount();
for (int i = 0; i < tracks; i++) {
MediaFormat mf = extractorVideo.getTrackFormat(i);
String mime = mf.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractorVideo.selectTrack(i);
VideoFormat = extractorVideo.getTrackFormat(i);
break;
}
}
} catch (IOException e) {
e.printStackTrace();
}
MediaExtractor extractorAudio = new MediaExtractor();
try {
String audioFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
audioFileName += context.getString(R.string.vroom_audio_file_name);
extractorAudio.setDataSource(audioFileName);
int tracks = extractorAudio.getTrackCount();
// Toast.makeText(context, "No of tracks::::" + String.valueOf(tracks), Toast.LENGTH_SHORT).show();
extractorAudio.selectTrack(0);
MediaFormat AudioFormat = extractorAudio.getTrackFormat(0);
int audioTrackIndex = muxer.addTrack(AudioFormat);
int videoTrackIndex = muxer.addTrack(VideoFormat);
boolean sawEOS = false;
boolean sawAudioEOS = false;
int bufferSize = MAX_SAMPLE_SIZE;
ByteBuffer dstBuf = ByteBuffer.allocate(bufferSize);
int offset = 100;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
muxer.start();
while (!sawEOS) {
bufferInfo.offset = offset;
bufferInfo.size = extractorVideo.readSampleData(dstBuf, offset);
if (bufferInfo.size < 0) {
sawEOS = true;
bufferInfo.size = 0;
} else {
bufferInfo.presentationTimeUs = extractorVideo.getSampleTime();
bufferInfo.flags = extractorVideo.getSampleFlags();
int trackIndex = extractorVideo.getSampleTrackIndex();
muxer.writeSampleData(videoTrackIndex, dstBuf, bufferInfo);
extractorVideo.advance();
}
}
ByteBuffer audioBuf = ByteBuffer.allocate(bufferSize);
while (!sawAudioEOS) {
bufferInfo.offset = offset;
bufferInfo.size = extractorAudio.readSampleData(audioBuf, offset);
if (bufferInfo.size < 0) {
sawAudioEOS = true;
bufferInfo.size = 0;
} else {
bufferInfo.presentationTimeUs = extractorAudio.getSampleTime();
bufferInfo.flags = extractorAudio.getSampleFlags();
int trackIndex = extractorAudio.getSampleTrackIndex();
muxer.writeSampleData(audioTrackIndex, audioBuf, bufferInfo);
extractorAudio.advance();
}
}
muxer.stop();
muxer.release();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
}
}
}