0

ビデオにテーマ音楽を追加するアプリを作成しています。

一部のユーザーは、自分の音楽がアップル ロスレス フォーマットの場合、ビデオが大きすぎると不満を漏らしています。

これは、AVMutableComposition私が生成したビデオに元の音楽フォーマットを入れただけだからです。

MPMediaItem の音楽のビットレートを下げたり、エンコードされている形式を変更したりする方法はありますか?

これは、ビデオに音楽を追加するために使用するコードのコード スニペットです。

AVMutableComposition* mixComposition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                    ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                     atTime:kCMTimeZero error:nil];

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                               preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                               ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                atTime:kCMTimeZero error:nil];

AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:AVAssetExportPresetPassthrough];

NSURL    *exportUrl = [NSURL fileURLWithPath:_videoOutputPath];

if ([[NSFileManager defaultManager] fileExistsAtPath:_videoOutputPath]){
    [[NSFileManager defaultManager] removeItemAtPath:_videoOutputPath error:nil];
}

_assetExport.outputFileType = @"com.apple.quicktime-movie";

_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;

[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {}
4

1 に答える 1

0

私はついにそれを手に入れました、これは私が使用するコードです:

static NSString * const kWriterInputIsReadyForMoreData = @"readyForMoreMediaData";

#import <AVFoundation/AVFoundation.h>
@implementation AudioUtil
{
    AVAssetReader *_assetReader;
    AVAssetWriter *_assetWriter;
    AVAssetWriterInput *_assetWriterInput;
    AVAssetReaderTrackOutput *_readerOutput;
    void (^_callback)(BOOL);
    CMSampleBufferRef _sampleBufferToAppend;
}

-(void)downSamplingAudioWithSourceURL:(NSURL *)sourceURL destinationURL:(NSURL *)destURL timeRange:(CMTimeRange)timeRange  callBack:(void (^)(BOOL))callback
{
    NSError *error = nil;
    _callback = callback;

    [[NSFileManager defaultManager] removeItemAtURL:destURL error:nil];

//initialize reader
AVURLAsset *inputAsset = [AVURLAsset assetWithURL:sourceURL];
_assetReader = [[AVAssetReader alloc] initWithAsset:inputAsset error:&error];
_assetReader.timeRange = timeRange;
AVAssetTrack* track = [[inputAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

NSMutableDictionary* audioReadSettings = [NSMutableDictionary dictionary];
audioReadSettings[AVFormatIDKey] = @(kAudioFormatLinearPCM);
audioReadSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);

_readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
NSAssert([_assetReader canAddOutput:_readerOutput], @"reader can't add output");
[_assetReader addOutput:_readerOutput];

//initialize writer
_assetWriter = [[AVAssetWriter alloc] initWithURL:destURL fileType:[QLVideoFormatProvider audioFileType] error:nil];

NSMutableDictionary *audioOutputSettings = [NSMutableDictionary dictionary];
audioOutputSettings[AVFormatIDKey] = [QLVideoFormatProvider audioFormatKeyForEncoder];
audioOutputSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);
audioOutputSettings[AVSampleRateKey] = @([QLVideoFormatProvider audioSampleRate]);
audioOutputSettings[AVEncoderBitRateKey] = @([QLVideoFormatProvider audioBitrate]);

_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[_assetWriter addInput:_assetWriterInput];

//start
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
BOOL canStartReading = [_assetReader startReading];
NSLog(@"can start reading %d",canStartReading);
if (!canStartReading) {
    callback(NO);
    return;
}

[_assetWriterInput addObserver:self forKeyPath:kWriterInputIsReadyForMoreData options:NSKeyValueObservingOptionOld|NSKeyValueObservingOptionNew context:NULL];
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];

    [self appendBufferToAppend];
}

-(void)appendBufferToAppend
{
    if ([_assetWriterInput isReadyForMoreMediaData]) {
        if (_sampleBufferToAppend) {
            [_assetWriterInput appendSampleBuffer:_sampleBufferToAppend];
            CFRelease(_sampleBufferToAppend);
        }
        _sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
        if (_sampleBufferToAppend) {
            [self appendBufferToAppend];
        }
        else {
            [_assetWriter finishWritingWithCompletionHandler:^(){
                if (_callback) {
                    _callback(_assetWriter.status == AVAssetWriterStatusCompleted);
                };
            }];
        }
    }
    else {

    }
}

-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    if ([keyPath isEqualToString:kWriterInputIsReadyForMoreData]) {
        if ([change[NSKeyValueChangeNewKey] boolValue] == YES) {
            [self appendBufferToAppend];
        }
    }
}
于 2013-08-09T05:50:03.257 に答える