DEMO [GinCamera] (github.com/ginhoor/Gin…)

Video shooting is similar to photo shooting, but with an additional audio for data.

For details about the creation process, download demo


The main object

@property (strong, nonatomic) AVCaptureSession *captureSession; / / @property (strong, nonatomic) AVCaptureDevice *captureDevice; @property (strong, nonatomic) AVCaptureDeviceInput *captureDeviceInput; /** audioDevice */ @property (strong, nonatomic) AVCaptureDevice *audioDevice; @property (strong, nonatomic) AVCaptureDeviceInput *audioDeviceInput; / / @property (strong, nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput; / * * * / @ preview layer property (strong, nonatomic) AVCaptureVideoPreviewLayer * captureVideoPreviewLayer;Copy the code

Video shooting is different from taking photos. You need to set the video storage address first

NSURL *fileUrl = [NSURL fileURLWithPath:filePath];
self.videoFilePath = filePath;
[self.captureMovieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
Copy the code

Video shooting requires access to both the camera and the microphone. It is common for someone to ask why the camera is enabled but the picture cannot be captured. Please check the microphone access at this time.

Enable the video stabilization function

AVCaptureConnection *captureConnection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; // Enable video stabilizationif ([captureConnection isVideoStabilizationSupported]) {
            captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
        }
Copy the code

Detect dynamic changes in the video area

The callback can be got through listening AVCaptureDeviceSubjectAreaDidChangeNotification notice, video synthesis method

Gets a preview of the specified video

/** Get a preview of the specified video @param filePath video address @returnGetVideoPreViewImage :(NSString *)filePath {AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:filePath] options:nil]; AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset]; generator.appliesPreferredTrackTransform = YES; CMTime CMTimeMakeWithSeconds(0.0, 600); CMTime actualTime; NSError *error = nil; CGImageRef image = [generator copyCGImageAtTime:time actualTime:&actualTime error:&error];if (error) {
        NSLog(@"get preview image failed!! Error: % @",error);
        CGImageRelease(image);
        return nil;
    }
    UIImage *videoImage = [[UIImage alloc] initWithCGImage:image];
    CGImageRelease(image);
    return videoImage;
}
Copy the code

Multiple video composition

/** Video composition @param videoFilePathList Video address @param outputPath Output address @param presetName Resolution, default: AVAssetExportPreset640x480 @ param outputFileType output formats, the default: */ + (void)mergeAndExportVideos:(NSArray <NSString *> *)videoFilePathList outputPath:(NSString *)outputPath presetName:(NSString *)presetName outputFileType:(NSString *)outputFileType completion:(void(^)(BOOL success))completion {if (videoFilePathList.count == 0) {
        return;
    }

    NSLog(@"videoFilePathList--->%@",videoFilePathList); AVMutableComposition *mixComposition = [AVMutableComposition composition]; AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; __block CMTime totalDuration = kCMTimeZero; [videoFilePathList enumerateObjectsUsingBlock:^(NSString * _Nonnull filePath, NSUInteger idx, BOOL * _Nonnull stop) { AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:filePath]];  NSError *audioError = nil; / / retrieve audio from AVAsset AVAssetTrack * assetAudioTrack = [[asset tracksWithMediaType: AVMediaTypeAudio] firstObject]; / / to join channel audio BOOL audioFlag = [audioTrack insertTimeRange: CMTimeRangeMake (kCMTimeZero, asset.duration) ofTrack:assetAudioTrack atTime:totalDuration error:&audioError];if(! audioFlag) { NSLog(@"audioTrack insert error:%@,%d",audioError,audioFlag); } // Add video to channel NSError *videoError = nil; AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; [videoTracksetPreferredTransform:assetVideoTrack.preferredTransform];
        
        BOOL videoFlag = [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
                                             ofTrack:assetVideoTrack
                                              atTime:totalDuration
                                               error:&videoError];
        
        if(! videoFlag) { NSLog(@"videoTrack insert error:%@,%d",videoError,videoFlag); } totalDuration = CMTimeAdd(totalDuration, asset.duration); }]; NSURL *mergeFileURL = [NSURL fileURLWithPath:outputPath]; / / resolutionif(! [presetName isNotBlank]) { presetName = AVAssetExportPreset640x480; } AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:presetName]; exporter.outputURL = mergeFileURL; // Roll out the formatif(! [outputFileType isNotBlank]) { outputFileType = AVFileTypeMPEG4; } exporter.outputFileType = outputFileType; exporter.shouldOptimizeForNetworkUse = YES; [exporterexportAsynchronouslyWithCompletionHandler:^{
        if (exporter.error) {
            NSLog(@"AVAssetExportSession Error: % @",exporter.error);
            if (completion) {
                completion(NO);
            }
            return; } // The export status is completedif ([exporter status] == AVAssetExportSessionStatusCompleted) {
            NSLog(@"Video merge complete -->%@",mergeFileURL);

            if(completion) { completion(YES); }}else {
            NSLog(@"AVAssetExportSession Current compression :%f",exporter.progress); }}]; }Copy the code