本來用的ZBar開源庫實現的掃描二維碼,但是貌似不支持arm64了,也沒有在更新。
#import @interface QRCodeReadController : BaseViewControllervcapturemetadataoutputobjectsdelegate> @property (weak, nonatomic) IBOutlet UIView *viewPreview; @end在xib上加一個viewPreview,用來掃碼時動態顯示獲取到的攝像頭的內容
@interface QRCodeReadController ()
{
NSInteger maxY;
NSInteger minY;
NSTimer *timer;
UIImageView *line;
}
@property (nonatomic) BOOL isReading;
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
-(BOOL)startReading;
-(void)stopReading;
@end
@implementation QRCodeReadController
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
_isReading = NO;
if ([self startReading]) {
maxY = 280;
minY = 2;
line =[[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 280, 10)]; // 0 -200
[line setImage:[UIImage imageNamed:@e0]];
[_viewPreview addSubview:line];
timer = [NSTimer scheduledTimerWithTimeInterval:1.0/40 target:self selector:@selector(move) userInfo:nil repeats:YES];
};
}
/*
*
*
AVCaptureMetadataOutput object. This class in combination with the AVCaptureMetadataOutputObjectsDelegate protocol will manage to intercept any metadata found in the input device (meaning data in a QR code captured by our camera) and translate it to a human readable format.
*/
- (BOOL)startReading {
NSError *error;
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (!input) {
NSLog(@%@, [error localizedDescription]);
return NO;
}
_captureSession = [[AVCaptureSession alloc] init];
[_captureSession addInput:input];
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[_captureSession addOutput:captureMetadataOutput];
dispatch_queue_t dispatchQueue;
dispatchQueue = dispatch_queue_create(myQueue, NULL);
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
[captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeQRCode]];
//show to user what the camera of the device sees using a AVCaptureVideoPreviewLayer
_videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
[_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[_videoPreviewLayer setFrame:_viewPreview.layer.bounds];
[_viewPreview.layer addSublayer:_videoPreviewLayer];
[_captureSession startRunning];
return YES;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if (metadataObjects != nil && [metadataObjects count] > 0) {
AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];
if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeQRCode]) {
[self performSelectorOnMainThread:@selector(stopReading) withObject:nil waitUntilDone:NO];
NSLog(@metadataObj string = %@,[metadataObj stringValue]);
_isReading = NO;
}
}
}
-(void)stopReading{
[_captureSession stopRunning];
_captureSession = nil;
[_videoPreviewLayer removeFromSuperlayer];
}
// 掃描時,移動掃描線
-(void)move
{
NSLog(@+++);
static BOOL flag = TRUE; // true down and false up
if (flag) {
if (line.frame.origin.y minY) {
line.frame = CGRectMake(
line.frame.origin.x, line.frame.origin.y -5,
line.frame.size.width, line.frame.size.height
);
}else
{
flag = !flag;
}
}
}