音视频录制及图片添加滤镜

文章目录
  1. 1. 使用UIImagePickerController录制
  2. 2. 使用AVCaptureSession录制
  3. 3. 图片添加简单滤镜

使用原生API进行音视频录制的二种方式

使用UIImagePickerController录制

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44

- (void)imageControllerCapure{
// 1.判断摄像头可不可用
if (![UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]){
[SWProgressHUB showWithInfoStatus:@"没法找到摄像头"];
return;
}
// 2.判断支不支持录制视频
NSArray *availableMediaTypes = [UIImagePickerController availableMediaTypesForSourceType:UIImagePickerControllerSourceTypeCamera];
if (![availableMediaTypes containsObject:(NSString *)kUTTypeMovie]){
[SWProgressHUB showWithInfoStatus:@"无法录制视频"];
return;
}
// 3.创建PickerView
UIImagePickerController *videoImage = [[UIImagePickerController alloc]init];
// 4.设置可用类型 摄像头还是图库
videoImage.sourceType = UIImagePickerControllerSourceTypeCamera;
// 5.设置媒体类型 图片还是视频
videoImage.mediaTypes = @[(NSString *)kUTTypeMovie];
// 6.设置媒体质量 高,中,低....
videoImage.videoQuality = UIImagePickerControllerQualityTypeHigh;
// 7.设置代理
videoImage.delegate = self;
// 8.设置摄像头模式 前置还是后置
videoImage.cameraDevice = UIImagePickerControllerCameraDeviceRear;
[self presentViewController:videoImage animated:YES completion:nil];
}
#pragma mark - 代理
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary<NSString *,id> *)info{
WELog(@"infoData = %@",info);
// infoData = {
// UIImagePickerControllerMediaType = "public.movie";
// UIImagePickerControllerMediaURL = "file:///private/var/mobile/Containers/Data/Application/676A90DD-7CC8-4BCE-B0AF-052ED0686CA9/tmp/52160991560__4E8816F7-B81D-49F8-ABAC-F58019E2C0E6.MOV";
// }
// 录制成功取出数据及地址
NSData *data = [NSData dataWithContentsOfURL:info[UIImagePickerControllerMediaURL]];
WELog(@"videoData = %@",data);
// 将资源保存在图库中
ALAssetsLibrary *al = [[ALAssetsLibrary alloc]init];
[al writeVideoAtPathToSavedPhotosAlbum:info[UIImagePickerControllerMediaURL] completionBlock:^(NSURL *assetURL, NSError *error) {
WELog(@"assetURL = %@",assetURL);
[self dismissViewControllerAnimated:YES completion:nil];
}];
}

使用AVCaptureSession录制

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
@implementation VideoCaptureController
{
AVCaptureSession *_sesstion;
}

- (void)reportWithFoundtion{
// 1.添加视频调度sesstion 管理输入输出设备
_sesstion = [[AVCaptureSession alloc]init];
// 1.1 设置分辨率
if ([_sesstion canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
_sesstion.sessionPreset=AVCaptureSessionPreset1280x720;
}
// 2.添加输入设备
AVCaptureDevice *videoDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
// 3.添加输入流
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc]initWithDevice:videoDevice error:nil];
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc]initWithDevice:audioDevice error:nil];
// 4.添加输入流到sesstion
if ([_sesstion canAddInput:videoInput]) {
[_sesstion addInput:videoInput];
}
if ([_sesstion canAddInput:audioInput]) {
[_sesstion addInput:audioInput];
}
// 5.添加输出
AVCaptureMovieFileOutput *outPut = [[AVCaptureMovieFileOutput alloc]init];
AVCaptureConnection *captureConnection = [outPut connectionWithMediaType:AVMediaTypeVideo];
if ([captureConnection isVideoStabilizationSupported]) {
captureConnection.preferredVideoStabilizationMode =AVCaptureVideoStabilizationModeAuto;
}
if ([_sesstion canAddOutput:outPut]) {
[_sesstion addOutput:outPut];
}
// [outPut startRecordingToOutputFileURL:[NSURL fileURLWithPath:[NSString stringWithFormat:@"%@.mp4",PTCacheDir]] recordingDelegate:self];

// 创建视频预览层,用于实时展示摄像头状态
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:_sesstion];
CALayer *layer = self.view.layer;
layer.masksToBounds = YES;
captureVideoPreviewLayer.frame = layer.bounds;
// 填充模式
captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
// 将视频预览层添加到界面中
[layer addSublayer:captureVideoPreviewLayer];

[_sesstion startRunning];

UIButton *save = [UIButton buttonWithType:UIButtonTypeCustom];
[save setTitle:@"保存" forState:UIControlStateNormal];
save.frame = CGRectMake(100, 100, 100, 100);
save.backgroundColor = kMainColor;
[save addTarget:self action:@selector(stop:) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:save];
}
- (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *camera in cameras) {
if ([camera position] == position) {
return camera;
}
}
return nil;
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
WELog(@"开始录制");
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didResumeRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
WELog(@"重新录制");
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
WELog(@"结束录制");
}
- (void)stop:(UIButton *)sender{
sender.selected = !sender.selected;
if (sender.selected) {
[_sesstion stopRunning];

}
else{
[_sesstion startRunning];
}
}
@end

图片添加简单滤镜

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
- (UIImage *)filterImage:(UIImage *)image{
CIImage *imag = [CIImage imageWithCGImage:image.CGImage];
NSLog(@"%@",[CIFilter filterNamesInCategory:kCICategoryDistortionEffect]);
// 创建修改颜色的滤镜对象
CIFilter *fifle = [CIFilter filterWithName:@"CIColorMonochrome"];
// 创建滤镜对象-将图片加入滤镜
[fifle setValue:imag forKey:kCIInputImageKey];
// 设置颜色
[fifle setValue:[CIColor colorWithRed:1.000 green:0.759 blue:0.592 alpha:1] forKey:kCIInputColorKey];
// 创建上下文对象(图板)
CIContext *context = [CIContext contextWithOptions:nil];
// 生成新图片
CGImageRef imageRef = [context createCGImage:fifle.outputImage fromRect:fifle.outputImage.extent];
image = [UIImage imageWithCGImage:imageRef];
return image;
}
分享到 评论