服务器之家

服务器之家 > 正文

iOS使用AVFoundation展示视频

时间:2021-05-24 15:41     来源/作者:yongyinmg

本文实例为大家分享了iOS使用AVFoundation展示视频的具体代码,供大家参考,具体内容如下

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
//
// Capter2ViewController.m
// IosTest
//
// Created by garin on 13-7-19.
// Copyright (c) 2013年 garin. All rights reserved.
//
 
#import "Capter2ViewController.h"
 
@interface Capter2ViewController ()
@end
@implementation Capter2ViewController
-(void) dealloc
{
  [session release];
  [super dealloc];
}
 
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
  self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
  if (self) {
    // Custom initialization
  }
  return self;
}
 
- (void)viewDidLoad
{
  [super viewDidLoad];
  videoPreviewView=[[UIView alloc] initWithFrame:CGRectMake(10, 10, 320, 200)];
  [self.view addSubview:videoPreviewView];
  [videoPreviewView release];
 // Do any additional setup after loading the view.
  
  //在viewdidload调用下面的函数显示摄像信息
  [self setupCaptureSession];
  
//  imgView=[[UIImageView alloc] initWithFrame:CGRectMake(10, 230, 320, 100)];
//  imgView.backgroundColor=[UIColor grayColor];
//  [self.view addSubview:imgView];
//  [imgView release];
  
  UIButton *cloeseBtn=[UIButton buttonWithType:UIButtonTypeRoundedRect];
  cloeseBtn.frame=CGRectMake(10, 220, 300, 50);
  [cloeseBtn setTitle:@"Press" forState:UIControlStateNormal];
  [cloeseBtn addTarget:self action:@selector(closeBtnClick:) forControlEvents:UIControlEventTouchUpInside];
  [self.view addSubview:cloeseBtn];
}
 
-(void) closeBtnClick:(id) sender
{
  [session stopRunning];
}
 
- (void)didReceiveMemoryWarning
{
  [super didReceiveMemoryWarning];
  // Dispose of any resources that can be recreated.
}
 
- (void)setupCaptureSession
{
  NSError *error = nil;
  
  // Create the session
  session = [[AVCaptureSession alloc] init];
  
  // Configure the session to produce lower resolution video frames, if your
  // processing algorithm can cope. We'll specify medium quality for the
  // chosen device.
  session.sessionPreset = AVCaptureSessionPresetLow;
  
  // Find a suitable AVCaptureDevice
  AVCaptureDevice *device = [AVCaptureDevice
                defaultDeviceWithMediaType:AVMediaTypeVideo];
  
  // Create a device input with the device and add it to the session.
  AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
                                    error:&error];
  if (!input) {
    // Handling the error appropriately.
  }
  [session addInput:input];
  
  // Create a VideoDataOutput and add it to the session
  AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
  [session addOutput:output];
  
  // Configure your output.
  dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
  [output setSampleBufferDelegate:self queue:queue];
  dispatch_release(queue);
  
  // Specify the pixel format
  output.videoSettings =
  [NSDictionary dictionaryWithObject:
   [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  
  
  // If you wish to cap the frame rate to a known value, such as 15 fps, set
  // minFrameDuration.
  //output.minFrameDuration = CMTimeMake(1, 15);
  //AVCaptureConnection *avcaptureconn=[[AVCaptureConnection alloc] init];
  //[avcaptureconn setVideoMinFrameDuration:CMTimeMake(1, 15)];
  // Start the session running to start the flow of data
  [session startRunning];
  AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: session];
  previewLayer.frame = videoPreviewView.bounds; //视频显示到的UIView
  previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
//  [previewLayer setOrientation:AVCaptureVideoOrientationLandscapeRight];
  //  if(previewLayer.orientationSupported){
  //   previewLayer.orientation = mOrientation;
  //  }
  
  [videoPreviewView.layer addSublayer: previewLayer];
  
  if(![session isRunning]){
    [session startRunning];
  }
  
  // Assign session to an ivar.
  //[self setSession:session];
}
 
//得到视频流
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
    fromConnection:(AVCaptureConnection *)connection
{
  // Create a UIImage from the sample buffer data
  return;
  
  UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
  //得到的视频流图片
  imgView.image=image;
}
 
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
  // Get a CMSampleBuffer's Core Video image buffer for the media data
  CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  // Lock the base address of the pixel buffer
  CVPixelBufferLockBaseAddress(imageBuffer, 0);
  
  // Get the number of bytes per row for the pixel buffer
  void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
  
  // Get the number of bytes per row for the pixel buffer
  size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
  // Get the pixel buffer width and height
  size_t width = CVPixelBufferGetWidth(imageBuffer);
  size_t height = CVPixelBufferGetHeight(imageBuffer);
  
  // Create a device-dependent RGB color space
  CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  
  // Create a bitmap graphics context with the sample buffer data
  CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                         bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
  // Create a Quartz image from the pixel data in the bitmap graphics context
  CGImageRef quartzImage = CGBitmapContextCreateImage(context);
  // Unlock the pixel buffer
  CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  
  // Free up the context and color space
  CGContextRelease(context);
  CGColorSpaceRelease(colorSpace);
  
  // Create an image object from the Quartz image
  UIImage *image = [UIImage imageWithCGImage:quartzImage];
  
  // Release the Quartz image
  CGImageRelease(quartzImage);
  
  return (image);
}
 
@end

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持服务器之家。

原文链接:https://blog.csdn.net/yongyinmg/article/details/9379151

标签:

相关文章

热门资讯

2020微信伤感网名听哭了 让对方看到心疼的伤感网名大全
2020微信伤感网名听哭了 让对方看到心疼的伤感网名大全 2019-12-26
yue是什么意思 网络流行语yue了是什么梗
yue是什么意思 网络流行语yue了是什么梗 2020-10-11
背刺什么意思 网络词语背刺是什么梗
背刺什么意思 网络词语背刺是什么梗 2020-05-22
苹果12mini价格表官网报价 iPhone12mini全版本价格汇总
苹果12mini价格表官网报价 iPhone12mini全版本价格汇总 2020-11-13
2021德云社封箱演出完整版 2021年德云社封箱演出在线看
2021德云社封箱演出完整版 2021年德云社封箱演出在线看 2021-03-15
返回顶部