欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页  >  移动技术

iOS仿微信相机拍照、视频录制功能

程序员文章站 2023-12-18 14:06:10
网上有很多自定义相机的例子,这里只是我临时写的一个ios自定义相机(仿微信)拍照、视频录制demo,仅供参考: 用到了下面几个库: #import...

网上有很多自定义相机的例子,这里只是我临时写的一个ios自定义相机(仿微信)拍照、视频录制demo,仅供参考:

用到了下面几个库:

#import <avfoundation/avfoundation.h> 
#import <assetslibrary/assetslibrary.h>

在使用的时候需要在info.plist中把相关权限写进去:

privacy - microphone usage description 
privacy - photo library usage description 
privacy - camera usage description

我在写这个demo时,是按照微信的样式写的,同样是点击拍照、长按录制视频,视频录制完直接进行播放,这里封装了一个简易的播放器:

m文件

#import "havplayer.h"
#import <avfoundation/avfoundation.h>

@interface havplayer ()

@property (nonatomic,strong) avplayer *player;//播放器对象

@end

@implementation havplayer

/*
// only override drawrect: if you perform custom drawing.
// an empty implementation adversely affects performance during animation.
- (void)drawrect:(cgrect)rect {
 // drawing code
}
*/

- (instancetype)initwithframe:(cgrect)frame withshowinview:(uiview *)bgview url:(nsurl *)url {
 if (self = [self initwithframe:frame]) {
  //创建播放器层
  avplayerlayer *playerlayer = [avplayerlayer playerlayerwithplayer:self.player];
  playerlayer.frame = self.bounds;

  [self.layer addsublayer:playerlayer];
  if (url) {
   self.videourl = url;
  }

  [bgview addsubview:self];
 }
 return self;
}

- (void)dealloc {
 [self removeavplayerntf];
 [self stopplayer];
 self.player = nil;
}

- (avplayer *)player {
 if (!_player) {
  _player = [avplayer playerwithplayeritem:[self getavplayeritem]];
  [self addavplayerntf:_player.currentitem];

 }

 return _player;
}

- (avplayeritem *)getavplayeritem {
 avplayeritem *playeritem=[avplayeritem playeritemwithurl:self.videourl];
 return playeritem;
}

- (void)setvideourl:(nsurl *)videourl {
 _videourl = videourl;
 [self removeavplayerntf];
 [self nextplayer];
}

- (void)nextplayer {
 [self.player seektotime:cmtimemakewithseconds(0, _player.currentitem.duration.timescale)];
 [self.player replacecurrentitemwithplayeritem:[self getavplayeritem]];
 [self addavplayerntf:self.player.currentitem];
 if (self.player.rate == 0) {
  [self.player play];
 }
}

- (void) addavplayerntf:(avplayeritem *)playeritem {
 //监控状态属性
 [playeritem addobserver:self forkeypath:@"status" options:nskeyvalueobservingoptionnew context:nil];
 //监控网络加载情况属性
 [playeritem addobserver:self forkeypath:@"loadedtimeranges" options:nskeyvalueobservingoptionnew context:nil];

 [[nsnotificationcenter defaultcenter] addobserver:self selector:@selector(playbackfinished:) name:avplayeritemdidplaytoendtimenotification object:self.player.currentitem];
}

- (void)removeavplayerntf {
 avplayeritem *playeritem = self.player.currentitem;
 [playeritem removeobserver:self forkeypath:@"status"];
 [playeritem removeobserver:self forkeypath:@"loadedtimeranges"];
 [[nsnotificationcenter defaultcenter] removeobserver:self];
}

- (void)stopplayer {
 if (self.player.rate == 1) {
  [self.player pause];//如果在播放状态就停止
 }
}

/**
 * 通过kvo监控播放器状态
 *
 * @param keypath 监控属性
 * @param object 监视器
 * @param change 状态改变
 * @param context 上下文
 */
-(void)observevalueforkeypath:(nsstring *)keypath ofobject:(id)object change:(nsdictionary *)change context:(void *)context{
 avplayeritem *playeritem = object;
 if ([keypath isequaltostring:@"status"]) {
  avplayerstatus status= [[change objectforkey:@"new"] intvalue];
  if(status==avplayerstatusreadytoplay){
   nslog(@"正在播放...,视频总长度:%.2f",cmtimegetseconds(playeritem.duration));
  }
 }else if([keypath isequaltostring:@"loadedtimeranges"]){
  nsarray *array=playeritem.loadedtimeranges;
  cmtimerange timerange = [array.firstobject cmtimerangevalue];//本次缓冲时间范围
  float startseconds = cmtimegetseconds(timerange.start);
  float durationseconds = cmtimegetseconds(timerange.duration);
  nstimeinterval totalbuffer = startseconds + durationseconds;//缓冲总长度
  nslog(@"共缓冲:%.2f",totalbuffer);
 }
}

- (void)playbackfinished:(nsnotification *)ntf {
 plog(@"视频播放完成");
 [self.player seektotime:cmtimemake(0, 1)];
 [self.player play];
}

@end

另外微信下面的按钮长按会出现圆弧时间条:

m文件

#import "hprogressview.h"

@interface hprogressview ()

/**
 * 进度值0-1.0之间
 */
@property (nonatomic,assign)cgfloat progressvalue;

@property (nonatomic, assign) cgfloat currenttime;

@end

@implementation hprogressview


// only override drawrect: if you perform custom drawing.
// an empty implementation adversely affects performance during animation.
- (void)drawrect:(cgrect)rect {
 // drawing code
 cgcontextref ctx = uigraphicsgetcurrentcontext();//获取上下文
 plog(@"width = %f",self.frame.size.width);
 cgpoint center = cgpointmake(self.frame.size.width/2.0, self.frame.size.width/2.0); //设置圆心位置
 cgfloat radius = self.frame.size.width/2.0-5; //设置半径
 cgfloat starta = - m_pi_2; //圆起点位置
 cgfloat enda = -m_pi_2 + m_pi * 2 * _progressvalue; //圆终点位置

 uibezierpath *path = [uibezierpath bezierpathwitharccenter:center radius:radius startangle:starta endangle:enda clockwise:yes];

 cgcontextsetlinewidth(ctx, 10); //设置线条宽度
 [[uicolor whitecolor] setstroke]; //设置描边颜色

 cgcontextaddpath(ctx, path.cgpath); //把路径添加到上下文

 cgcontextstrokepath(ctx); //渲染
}

- (void)settimemax:(nsinteger)timemax {
 _timemax = timemax;
 self.currenttime = 0;
 self.progressvalue = 0;
 [self setneedsdisplay];
 self.hidden = no;
 [self performselector:@selector(startprogress) withobject:nil afterdelay:0.1];
}

- (void)clearprogress {
 _currenttime = _timemax;
 self.hidden = yes;
}

- (void)startprogress {
 _currenttime += 0.1;
 if (_timemax > _currenttime) {
  _progressvalue = _currenttime/_timemax;
  plog(@"progress = %f",_progressvalue);
  [self setneedsdisplay];
  [self performselector:@selector(startprogress) withobject:nil afterdelay:0.1];
 }

 if (_timemax <= _currenttime) {
  [self clearprogress];

 }
}

@end

接下来就是相机的控制器了,由于是临时写的,所以用的xib,大家不要直接使用,直接上m文件代码吧:

#import "hvideoviewcontroller.h"
#import <avfoundation/avfoundation.h>
#import "havplayer.h"
#import "hprogressview.h"
#import <foundation/foundation.h>
#import <assetslibrary/assetslibrary.h>

typedef void(^propertychangeblock)(avcapturedevice *capturedevice);
@interface hvideoviewcontroller ()<avcapturefileoutputrecordingdelegate>

//轻触拍照,按住摄像
@property (strong, nonatomic) iboutlet uilabel *labeltiptitle;

//视频输出流
@property (strong,nonatomic) avcapturemoviefileoutput *capturemoviefileoutput;
//图片输出流
//@property (strong,nonatomic) avcapturestillimageoutput *capturestillimageoutput;//照片输出流
//负责从avcapturedevice获得输入数据
@property (strong,nonatomic) avcapturedeviceinput *capturedeviceinput;
//后台任务标识
@property (assign,nonatomic) uibackgroundtaskidentifier backgroundtaskidentifier;

@property (assign,nonatomic) uibackgroundtaskidentifier lastbackgroundtaskidentifier;

@property (weak, nonatomic) iboutlet uiimageview *focuscursor; //聚焦光标

//负责输入和输出设备之间的数据传递
@property(nonatomic)avcapturesession *session;

//图像预览层,实时显示捕获的图像
@property(nonatomic)avcapturevideopreviewlayer *previewlayer;

@property (strong, nonatomic) iboutlet uibutton *btnback;
//重新录制
@property (strong, nonatomic) iboutlet uibutton *btnafresh;
//确定
@property (strong, nonatomic) iboutlet uibutton *btnensure;
//摄像头切换
@property (strong, nonatomic) iboutlet uibutton *btncamera;

@property (strong, nonatomic) iboutlet uiimageview *bgview;
//记录录制的时间 默认最大60秒
@property (assign, nonatomic) nsinteger seconds;

//记录需要保存视频的路径
@property (strong, nonatomic) nsurl *savevideourl;

//是否在对焦
@property (assign, nonatomic) bool isfocus;
@property (strong, nonatomic) iboutlet nslayoutconstraint *afreshcenterx;
@property (strong, nonatomic) iboutlet nslayoutconstraint *ensurecenterx;
@property (strong, nonatomic) iboutlet nslayoutconstraint *backcenterx;

//视频播放
@property (strong, nonatomic) havplayer *player;

@property (strong, nonatomic) iboutlet hprogressview *progressview;

//是否是摄像 yes 代表是录制 no 表示拍照
@property (assign, nonatomic) bool isvideo;

@property (strong, nonatomic) uiimage *takeimage;
@property (strong, nonatomic) uiimageview *takeimageview;
@property (strong, nonatomic) iboutlet uiimageview *imgrecord;


@end

//时间大于这个就是视频,否则为拍照
#define timemax 1

@implementation hvideoviewcontroller


-(void)dealloc{
 [self removenotification];


}

- (void)viewdidload {
 [super viewdidload];
 // do any additional setup after loading the view.

 uiimage *image = [uiimage imagenamed:@"sc_btn_take.png"];
 self.backcenterx.constant = -(screen_width/2/2)-image.size.width/2/2;

 self.progressview.layer.cornerradius = self.progressview.frame.size.width/2;

 if (self.hseconds == 0) {
  self.hseconds = 60;
 }

 [self performselector:@selector(hiddentipslabel) withobject:nil afterdelay:4];
}

- (void)hiddentipslabel {
 self.labeltiptitle.hidden = yes;
}

- (void)didreceivememorywarning {
 [super didreceivememorywarning];
 // dispose of any resources that can be recreated.
}

- (void)viewwillappear:(bool)animated {
 [super viewwillappear:animated];
 [[uiapplication sharedapplication] setstatusbarhidden:yes];
 [self customcamera];
 [self.session startrunning];
}


-(void)viewdidappear:(bool)animated{
 [super viewdidappear:animated];
}

-(void)viewdiddisappear:(bool)animated{
 [super viewdiddisappear:animated];
 [self.session stoprunning];
}

- (void)viewwilldisappear:(bool)animated {
 [super viewwilldisappear:animated];
 [[uiapplication sharedapplication] setstatusbarhidden:no];
}

- (void)customcamera {

 //初始化会话,用来结合输入输出
 self.session = [[avcapturesession alloc] init];
 //设置分辨率 (设备支持的最高分辨率)
 if ([self.session cansetsessionpreset:avcapturesessionpresethigh]) {
  self.session.sessionpreset = avcapturesessionpresethigh;
 }
 //取得后置摄像头
 avcapturedevice *capturedevice = [self getcameradevicewithposition:avcapturedevicepositionback];
 //添加一个音频输入设备
 avcapturedevice *audiocapturedevice=[[avcapturedevice deviceswithmediatype:avmediatypeaudio] firstobject];

 //初始化输入设备
 nserror *error = nil;
 self.capturedeviceinput = [[avcapturedeviceinput alloc] initwithdevice:capturedevice error:&error];
 if (error) {
  plog(@"取得设备输入对象时出错,错误原因:%@",error.localizeddescription);
  return;
 }

 //添加音频
 error = nil;
 avcapturedeviceinput *audiocapturedeviceinput=[[avcapturedeviceinput alloc]initwithdevice:audiocapturedevice error:&error];
 if (error) {
  nslog(@"取得设备输入对象时出错,错误原因:%@",error.localizeddescription);
  return;
 }

 //输出对象
 self.capturemoviefileoutput = [[avcapturemoviefileoutput alloc] init];//视频输出

 //将输入设备添加到会话
 if ([self.session canaddinput:self.capturedeviceinput]) {
  [self.session addinput:self.capturedeviceinput];
  [self.session addinput:audiocapturedeviceinput];
  //设置视频防抖
  avcaptureconnection *connection = [self.capturemoviefileoutput connectionwithmediatype:avmediatypevideo];
  if ([connection isvideostabilizationsupported]) {
   connection.preferredvideostabilizationmode = avcapturevideostabilizationmodecinematic;
  }
 }

 //将输出设备添加到会话 (刚开始 是照片为输出对象)
 if ([self.session canaddoutput:self.capturemoviefileoutput]) {
  [self.session addoutput:self.capturemoviefileoutput];
 }

 //创建视频预览层,用于实时展示摄像头状态
 self.previewlayer = [[avcapturevideopreviewlayer alloc] initwithsession:self.session];
 self.previewlayer.frame = self.view.bounds;//cgrectmake(0, 0, self.view.width, self.view.height);
 self.previewlayer.videogravity = avlayervideogravityresizeaspectfill;//填充模式
 [self.bgview.layer addsublayer:self.previewlayer];

 [self addnotificationtocapturedevice:capturedevice];
 [self addgensturerecognizer];
}

 

- (ibaction)oncancelaction:(uibutton *)sender {
 [self dismissviewcontrolleranimated:yes completion:^{
  [utility hideprogressdialog];
 }];
}


- (void)touchesbegan:(nsset<uitouch *> *)touches withevent:(uievent *)event {
 if ([[touches anyobject] view] == self.imgrecord) {
  plog(@"开始录制");
  //根据设备输出获得连接
  avcaptureconnection *connection = [self.capturemoviefileoutput connectionwithmediatype:avmediatypeaudio];
  //根据连接取得设备输出的数据
  if (![self.capturemoviefileoutput isrecording]) {
   //如果支持多任务则开始多任务
   if ([[uidevice currentdevice] ismultitaskingsupported]) {
    self.backgroundtaskidentifier = [[uiapplication sharedapplication] beginbackgroundtaskwithexpirationhandler:nil];
   }
   if (self.savevideourl) {
    [[nsfilemanager defaultmanager] removeitematurl:self.savevideourl error:nil];
   }
   //预览图层和视频方向保持一致
   connection.videoorientation = [self.previewlayer connection].videoorientation;
   nsstring *outputfielpath=[nstemporarydirectory() stringbyappendingstring:@"mymovie.mov"];
   nslog(@"save path is :%@",outputfielpath);
   nsurl *fileurl=[nsurl fileurlwithpath:outputfielpath];
   nslog(@"fileurl:%@",fileurl);
   [self.capturemoviefileoutput startrecordingtooutputfileurl:fileurl recordingdelegate:self];
  } else {
   [self.capturemoviefileoutput stoprecording];
  }
 }
}


- (void)touchesended:(nsset<uitouch *> *)touches withevent:(uievent *)event {
 if ([[touches anyobject] view] == self.imgrecord) {
  plog(@"结束触摸");
  if (!self.isvideo) {
   [self performselector:@selector(endrecord) withobject:nil afterdelay:0.3];
  } else {
   [self endrecord];
  }
 }
}

- (void)endrecord {
 [self.capturemoviefileoutput stoprecording];//停止录制
}

- (ibaction)onafreshaction:(uibutton *)sender {
 plog(@"重新录制");
 [self recoverlayout];
}

- (ibaction)onensureaction:(uibutton *)sender {
 plog(@"确定 这里进行保存或者发送出去");
 if (self.savevideourl) {
  ws(weakself)
  [utility showprogressdialogtext:@"视频处理中..."];
  alassetslibrary *assetslibrary=[[alassetslibrary alloc]init];
  [assetslibrary writevideoatpathtosavedphotosalbum:self.savevideourl completionblock:^(nsurl *asseturl, nserror *error) {
   plog(@"outputurl:%@",weakself.savevideourl);
   [[nsfilemanager defaultmanager] removeitematurl:weakself.savevideourl error:nil];
   if (weakself.lastbackgroundtaskidentifier!= uibackgroundtaskinvalid) {
    [[uiapplication sharedapplication] endbackgroundtask:weakself.lastbackgroundtaskidentifier];
   }
   if (error) {
    plog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizeddescription);
    [utility showalltextdialog:kappdelegate.window text:@"保存视频到相册发生错误"];
   } else {
    if (weakself.takeblock) {
     weakself.takeblock(asseturl);
    }
    plog(@"成功保存视频到相簿.");
    [weakself oncancelaction:nil];
   }
  }];
 } else {
  //照片
  uiimagewritetosavedphotosalbum(self.takeimage, self, nil, nil);
  if (self.takeblock) {
   self.takeblock(self.takeimage);
  }

  [self oncancelaction:nil];
 }
}

//前后摄像头的切换
- (ibaction)oncameraaction:(uibutton *)sender {
 plog(@"切换摄像头");
 avcapturedevice *currentdevice=[self.capturedeviceinput device];
 avcapturedeviceposition currentposition=[currentdevice position];
 [self removenotificationfromcapturedevice:currentdevice];
 avcapturedevice *tochangedevice;
 avcapturedeviceposition tochangeposition = avcapturedevicepositionfront;//前
 if (currentposition == avcapturedevicepositionunspecified || currentposition == avcapturedevicepositionfront) {
  tochangeposition = avcapturedevicepositionback;//后
 }
 tochangedevice=[self getcameradevicewithposition:tochangeposition];
 [self addnotificationtocapturedevice:tochangedevice];
 //获得要调整的设备输入对象
 avcapturedeviceinput *tochangedeviceinput=[[avcapturedeviceinput alloc]initwithdevice:tochangedevice error:nil];

 //改变会话的配置前一定要先开启配置,配置完成后提交配置改变
 [self.session beginconfiguration];
 //移除原有输入对象
 [self.session removeinput:self.capturedeviceinput];
 //添加新的输入对象
 if ([self.session canaddinput:tochangedeviceinput]) {
  [self.session addinput:tochangedeviceinput];
  self.capturedeviceinput = tochangedeviceinput;
 }
 //提交会话配置
 [self.session commitconfiguration];
}

- (void)onstarttranscribe:(nsurl *)fileurl {
 if ([self.capturemoviefileoutput isrecording]) {
  -- self.seconds;
  if (self.seconds > 0) {
   if (self.hseconds - self.seconds >= timemax && !self.isvideo) {
    self.isvideo = yes;//长按时间超过timemax 表示是视频录制
    self.progressview.timemax = self.seconds;
   }
   [self performselector:@selector(onstarttranscribe:) withobject:fileurl afterdelay:1.0];
  } else {
   if ([self.capturemoviefileoutput isrecording]) {
    [self.capturemoviefileoutput stoprecording];
   }
  }
 }
}


#pragma mark - 视频输出代理
-(void)captureoutput:(avcapturefileoutput *)captureoutput didstartrecordingtooutputfileaturl:(nsurl *)fileurl fromconnections:(nsarray *)connections{
 plog(@"开始录制...");
 self.seconds = self.hseconds;
 [self performselector:@selector(onstarttranscribe:) withobject:fileurl afterdelay:1.0];
}


-(void)captureoutput:(avcapturefileoutput *)captureoutput didfinishrecordingtooutputfileaturl:(nsurl *)outputfileurl fromconnections:(nsarray *)connections error:(nserror *)error{
 plog(@"视频录制完成.");
 [self changelayout];
 if (self.isvideo) {
  self.savevideourl = outputfileurl;
  if (!self.player) {
   self.player = [[havplayer alloc] initwithframe:self.bgview.bounds withshowinview:self.bgview url:outputfileurl];
  } else {
   if (outputfileurl) {
    self.player.videourl = outputfileurl;
    self.player.hidden = no;
   }
  }
 } else {
  //照片
  self.savevideourl = nil;
  [self videohandlephoto:outputfileurl];
 }

}

- (void)videohandlephoto:(nsurl *)url {
 avurlasset *urlset = [avurlasset assetwithurl:url];
 avassetimagegenerator *imagegenerator = [avassetimagegenerator assetimagegeneratorwithasset:urlset];
 imagegenerator.appliespreferredtracktransform = yes; // 截图的时候调整到正确的方向
 nserror *error = nil;
 cmtime time = cmtimemake(0,30);//缩略图创建时间 cmtime是表示电影时间信息的结构体,第一个参数表示是视频第几秒,第二个参数表示每秒帧数.(如果要获取某一秒的第几帧可以使用cmtimemake方法)
 cmtime actucaltime; //缩略图实际生成的时间
 cgimageref cgimage = [imagegenerator copycgimageattime:time actualtime:&actucaltime error:&error];
 if (error) {
  plog(@"截取视频图片失败:%@",error.localizeddescription);
 }
 cmtimeshow(actucaltime);
 uiimage *image = [uiimage imagewithcgimage:cgimage];

 cgimagerelease(cgimage);
 if (image) {
  plog(@"视频截取成功");
 } else {
  plog(@"视频截取失败");
 }


 self.takeimage = image;//[uiimage imagewithcgimage:cgimage];

 [[nsfilemanager defaultmanager] removeitematurl:url error:nil];

 if (!self.takeimageview) {
  self.takeimageview = [[uiimageview alloc] initwithframe:self.view.frame];
  [self.bgview addsubview:self.takeimageview];
 }
 self.takeimageview.hidden = no;
 self.takeimageview.image = self.takeimage;
}

#pragma mark - 通知

//注册通知
- (void)setupobservers
{
 nsnotificationcenter *notification = [nsnotificationcenter defaultcenter];
 [notification addobserver:self selector:@selector(applicationdidenterbackground:) name:uiapplicationwillresignactivenotification object:[uiapplication sharedapplication]];
}

//进入后台就退出视频录制
- (void)applicationdidenterbackground:(nsnotification *)notification {
 [self oncancelaction:nil];
}

/**
 * 给输入设备添加通知
 */
-(void)addnotificationtocapturedevice:(avcapturedevice *)capturedevice{
 //注意添加区域改变捕获通知必须首先设置设备允许捕获
 [self changedeviceproperty:^(avcapturedevice *capturedevice) {
  capturedevice.subjectareachangemonitoringenabled=yes;
 }];
 nsnotificationcenter *notificationcenter= [nsnotificationcenter defaultcenter];
 //捕获区域发生改变
 [notificationcenter addobserver:self selector:@selector(areachange:) name:avcapturedevicesubjectareadidchangenotification object:capturedevice];
}
-(void)removenotificationfromcapturedevice:(avcapturedevice *)capturedevice{
 nsnotificationcenter *notificationcenter= [nsnotificationcenter defaultcenter];
 [notificationcenter removeobserver:self name:avcapturedevicesubjectareadidchangenotification object:capturedevice];
}
/**
 * 移除所有通知
 */
-(void)removenotification{
 nsnotificationcenter *notificationcenter= [nsnotificationcenter defaultcenter];
 [notificationcenter removeobserver:self];
}

-(void)addnotificationtocapturesession:(avcapturesession *)capturesession{
 nsnotificationcenter *notificationcenter= [nsnotificationcenter defaultcenter];
 //会话出错
 [notificationcenter addobserver:self selector:@selector(sessionruntimeerror:) name:avcapturesessionruntimeerrornotification object:capturesession];
}

/**
 * 设备连接成功
 *
 * @param notification 通知对象
 */
-(void)deviceconnected:(nsnotification *)notification{
 nslog(@"设备已连接...");
}
/**
 * 设备连接断开
 *
 * @param notification 通知对象
 */
-(void)devicedisconnected:(nsnotification *)notification{
 nslog(@"设备已断开.");
}
/**
 * 捕获区域改变
 *
 * @param notification 通知对象
 */
-(void)areachange:(nsnotification *)notification{
 nslog(@"捕获区域改变...");
}

/**
 * 会话出错
 *
 * @param notification 通知对象
 */
-(void)sessionruntimeerror:(nsnotification *)notification{
 nslog(@"会话发生错误.");
}

 

/**
 * 取得指定位置的摄像头
 *
 * @param position 摄像头位置
 *
 * @return 摄像头设备
 */
-(avcapturedevice *)getcameradevicewithposition:(avcapturedeviceposition )position{
 nsarray *cameras= [avcapturedevice deviceswithmediatype:avmediatypevideo];
 for (avcapturedevice *camera in cameras) {
  if ([camera position] == position) {
   return camera;
  }
 }
 return nil;
}

/**
 * 改变设备属性的统一操作方法
 *
 * @param propertychange 属性改变操作
 */
-(void)changedeviceproperty:(propertychangeblock)propertychange{
 avcapturedevice *capturedevice= [self.capturedeviceinput device];
 nserror *error;
 //注意改变设备属性前一定要首先调用lockforconfiguration:调用完之后使用unlockforconfiguration方法解锁
 if ([capturedevice lockforconfiguration:&error]) {
  //自动白平衡
  if ([capturedevice iswhitebalancemodesupported:avcapturewhitebalancemodecontinuousautowhitebalance]) {
   [capturedevice setwhitebalancemode:avcapturewhitebalancemodecontinuousautowhitebalance];
  }
  //自动根据环境条件开启闪光灯
  if ([capturedevice isflashmodesupported:avcaptureflashmodeauto]) {
   [capturedevice setflashmode:avcaptureflashmodeauto];
  }

  propertychange(capturedevice);
  [capturedevice unlockforconfiguration];
 }else{
  nslog(@"设置设备属性过程发生错误,错误信息:%@",error.localizeddescription);
 }
}

/**
 * 设置闪光灯模式
 *
 * @param flashmode 闪光灯模式
 */
-(void)setflashmode:(avcaptureflashmode )flashmode{
 [self changedeviceproperty:^(avcapturedevice *capturedevice) {
  if ([capturedevice isflashmodesupported:flashmode]) {
   [capturedevice setflashmode:flashmode];
  }
 }];
}
/**
 * 设置聚焦模式
 *
 * @param focusmode 聚焦模式
 */
-(void)setfocusmode:(avcapturefocusmode )focusmode{
 [self changedeviceproperty:^(avcapturedevice *capturedevice) {
  if ([capturedevice isfocusmodesupported:focusmode]) {
   [capturedevice setfocusmode:focusmode];
  }
 }];
}
/**
 * 设置曝光模式
 *
 * @param exposuremode 曝光模式
 */
-(void)setexposuremode:(avcaptureexposuremode)exposuremode{
 [self changedeviceproperty:^(avcapturedevice *capturedevice) {
  if ([capturedevice isexposuremodesupported:exposuremode]) {
   [capturedevice setexposuremode:exposuremode];
  }
 }];
}
/**
 * 设置聚焦点
 *
 * @param point 聚焦点
 */
-(void)focuswithmode:(avcapturefocusmode)focusmode exposuremode:(avcaptureexposuremode)exposuremode atpoint:(cgpoint)point{
 [self changedeviceproperty:^(avcapturedevice *capturedevice) {
//  if ([capturedevice isfocuspointofinterestsupported]) {
//   [capturedevice setfocuspointofinterest:point];
//  }
//  if ([capturedevice isexposurepointofinterestsupported]) {
//   [capturedevice setexposurepointofinterest:point];
//  }
  if ([capturedevice isexposuremodesupported:exposuremode]) {
   [capturedevice setexposuremode:exposuremode];
  }
  if ([capturedevice isfocusmodesupported:focusmode]) {
   [capturedevice setfocusmode:focusmode];
  }
 }];
}

/**
 * 添加点按手势,点按时聚焦
 */
-(void)addgensturerecognizer{
 uitapgesturerecognizer *tapgesture=[[uitapgesturerecognizer alloc]initwithtarget:self action:@selector(tapscreen:)];
 [self.bgview addgesturerecognizer:tapgesture];
}

-(void)tapscreen:(uitapgesturerecognizer *)tapgesture{
 if ([self.session isrunning]) {
  cgpoint point= [tapgesture locationinview:self.bgview];
  //将ui坐标转化为摄像头坐标
  cgpoint camerapoint= [self.previewlayer capturedevicepointofinterestforpoint:point];
  [self setfocuscursorwithpoint:point];
  [self focuswithmode:avcapturefocusmodecontinuousautofocus exposuremode:avcaptureexposuremodecontinuousautoexposure atpoint:camerapoint];
 }
}

/**
 * 设置聚焦光标位置
 *
 * @param point 光标位置
 */
-(void)setfocuscursorwithpoint:(cgpoint)point{
 if (!self.isfocus) {
  self.isfocus = yes;
  self.focuscursor.center=point;
  self.focuscursor.transform = cgaffinetransformmakescale(1.25, 1.25);
  self.focuscursor.alpha = 1.0;
  [uiview animatewithduration:0.5 animations:^{
   self.focuscursor.transform = cgaffinetransformidentity;
  } completion:^(bool finished) {
   [self performselector:@selector(onhiddenfocuscursoraction) withobject:nil afterdelay:0.5];
  }];
 }
}

- (void)onhiddenfocuscursoraction {
 self.focuscursor.alpha=0;
 self.isfocus = no;
}

//拍摄完成时调用
- (void)changelayout {
 self.imgrecord.hidden = yes;
 self.btncamera.hidden = yes;
 self.btnafresh.hidden = no;
 self.btnensure.hidden = no;
 self.btnback.hidden = yes;
 if (self.isvideo) {
  [self.progressview clearprogress];
 }
 self.afreshcenterx.constant = -(screen_width/2/2);
 self.ensurecenterx.constant = screen_width/2/2;
 [uiview animatewithduration:0.25 animations:^{
  [self.view layoutifneeded];
 }];

 self.lastbackgroundtaskidentifier = self.backgroundtaskidentifier;
 self.backgroundtaskidentifier = uibackgroundtaskinvalid;
 [self.session stoprunning];
}


//重新拍摄时调用
- (void)recoverlayout {
 if (self.isvideo) {
  self.isvideo = no;
  [self.player stopplayer];
  self.player.hidden = yes;
 }
 [self.session startrunning];

 if (!self.takeimageview.hidden) {
  self.takeimageview.hidden = yes;
 }
// self.savevideourl = nil;
 self.afreshcenterx.constant = 0;
 self.ensurecenterx.constant = 0;
 self.imgrecord.hidden = no;
 self.btncamera.hidden = no;
 self.btnafresh.hidden = yes;
 self.btnensure.hidden = yes;
 self.btnback.hidden = no;
 [uiview animatewithduration:0.25 animations:^{
  [self.view layoutifneeded];
 }];
}

/*
#pragma mark - navigation

// in a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareforsegue:(uistoryboardsegue *)segue sender:(id)sender {
 // get the new view controller using [segue destinationviewcontroller].
 // pass the selected object to the new view controller.
}
*/

@end

使用也挺简单:

- (ibaction)oncameraaction:(uibutton *)sender {
 //额 。。由于是demo,所以用的xib,大家根据需求自己更改,该demo只是提供一个思路,使用时不要直接拖入项目
 hvideoviewcontroller *ctrl = [[nsbundle mainbundle] loadnibnamed:@"hvideoviewcontroller" owner:nil options:nil].lastobject;
 ctrl.hseconds = 30;//设置可录制最长时间
 ctrl.takeblock = ^(id item) {
  if ([item iskindofclass:[nsurl class]]) {
   nsurl *videourl = item;
   //视频url

  } else {
   //图片

  }
 };
 [self presentviewcontroller:ctrl animated:yes completion:nil];
}

demo地址也给出来吧:不喜勿碰-_-\

kjcamera

自此就结束啦,写的比较简单,希望能帮助到大家,谢谢!

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持。

上一篇:

下一篇: