2 * Copyright 2009 Jeff Verkoeyen
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #import "ZXingWidgetController.h"
19 #import "NSString+HTML.h"
20 #import "ResultParser.h"
21 #import "ParsedResult.h"
22 #import "ResultAction.h"
23 #include <sys/types.h>
24 #include <sys/sysctl.h>
26 #import <AVFoundation/AVFoundation.h>
28 #define CAMERA_SCALAR 1.12412 // scalar = (480 / (2048 / 480))
29 #define FIRST_TAKE_DELAY 1.0
30 #define ONE_D_BAND_HEIGHT 10.0
32 @interface ZXingWidgetController ()
34 @property BOOL showCancel;
35 @property BOOL oneDMode;
42 @implementation ZXingWidgetController
45 @synthesize captureSession;
46 @synthesize prevLayer;
48 @synthesize result, delegate, soundToPlay;
49 @synthesize overlayView;
50 @synthesize oneDMode, showCancel;
54 - (id)initWithDelegate:(id<ZXingDelegate>)scanDelegate showCancel:(BOOL)shouldShowCancel OneDMode:(BOOL)shouldUseoOneDMode {
55 if (self = [super init]) {
56 [self setDelegate:scanDelegate];
57 self.oneDMode = shouldUseoOneDMode;
58 self.showCancel = shouldShowCancel;
59 self.wantsFullScreenLayout = YES;
62 OverlayView *theOverLayView = [[OverlayView alloc] initWithFrame:[UIScreen mainScreen].bounds
63 cancelEnabled:showCancel
65 [theOverLayView setDelegate:self];
66 self.overlayView = theOverLayView;
67 [theOverLayView release];
74 if (beepSound != -1) {
75 AudioServicesDisposeSystemSoundID(beepSound);
80 [soundToPlay release];
81 [overlayView release];
88 [[UIApplication sharedApplication] setStatusBarHidden:NO];
90 if (delegate != nil) {
91 [delegate zxingControllerDidCancel:self];
95 - (NSString *)getPlatform {
97 sysctlbyname("hw.machine", NULL, &size, NULL, 0);
98 char *machine = malloc(size);
99 sysctlbyname("hw.machine", machine, &size, NULL, 0);
100 NSString *platform = [NSString stringWithCString:machine encoding:NSASCIIStringEncoding];
106 NSString *platform = [self getPlatform];
107 if ([platform isEqualToString:@"iPhone1,1"] ||
108 [platform isEqualToString:@"iPhone1,2"]) return YES;
112 - (void)viewWillAppear:(BOOL)animated {
113 [super viewWillAppear:animated];
114 self.wantsFullScreenLayout = YES;
115 //[[UIApplication sharedApplication] setStatusBarHidden:YES];
116 if ([self soundToPlay] != nil) {
117 OSStatus error = AudioServicesCreateSystemSoundID((CFURLRef)[self soundToPlay], &beepSound);
118 if (error != kAudioServicesNoError) {
119 NSLog(@"Problem loading nearSound.caf");
124 - (void)viewDidAppear:(BOOL)animated {
125 [super viewDidAppear:animated];
126 [[UIApplication sharedApplication] setStatusBarHidden:YES];
127 //self.wantsFullScreenLayout = YES;
132 [self.view addSubview:overlayView];
133 // [self loadImagePicker];
134 // self.view = imagePicker.view;
136 [overlayView setPoints:nil];
140 - (void)viewDidDisappear:(BOOL)animated {
141 [super viewDidDisappear:animated];
142 [[UIApplication sharedApplication] setStatusBarHidden:NO];
143 [self.overlayView removeFromSuperview];
147 - (CGImageRef)CGImageRotated90:(CGImageRef)imgRef
149 CGFloat angleInRadians = -90 * (M_PI / 180);
150 CGFloat width = CGImageGetWidth(imgRef);
151 CGFloat height = CGImageGetHeight(imgRef);
153 CGRect imgRect = CGRectMake(0, 0, width, height);
154 CGAffineTransform transform = CGAffineTransformMakeRotation(angleInRadians);
155 CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, transform);
157 CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
158 CGContextRef bmContext = CGBitmapContextCreate(NULL,
159 rotatedRect.size.width,
160 rotatedRect.size.height,
164 kCGImageAlphaPremultipliedFirst);
165 CGContextSetAllowsAntialiasing(bmContext, FALSE);
166 CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
167 CGColorSpaceRelease(colorSpace);
168 // CGContextTranslateCTM(bmContext,
169 // +(rotatedRect.size.width/2),
170 // +(rotatedRect.size.height/2));
171 CGContextScaleCTM(bmContext, rotatedRect.size.width/rotatedRect.size.height, 1.0);
172 CGContextTranslateCTM(bmContext, 0.0, rotatedRect.size.height);
173 CGContextRotateCTM(bmContext, angleInRadians);
174 // CGContextTranslateCTM(bmContext,
175 // -(rotatedRect.size.width/2),
176 // -(rotatedRect.size.height/2));
177 CGContextDrawImage(bmContext, CGRectMake(0, 0,
178 rotatedRect.size.width,
179 rotatedRect.size.height),
182 CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
183 CFRelease(bmContext);
184 [(id)rotatedImage autorelease];
189 - (CGImageRef)CGImageRotated180:(CGImageRef)imgRef
191 CGFloat angleInRadians = M_PI;
192 CGFloat width = CGImageGetWidth(imgRef);
193 CGFloat height = CGImageGetHeight(imgRef);
195 CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
196 CGContextRef bmContext = CGBitmapContextCreate(NULL,
202 kCGImageAlphaPremultipliedFirst);
203 CGContextSetAllowsAntialiasing(bmContext, FALSE);
204 CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
205 CGColorSpaceRelease(colorSpace);
206 CGContextTranslateCTM(bmContext,
209 CGContextRotateCTM(bmContext, angleInRadians);
210 CGContextTranslateCTM(bmContext,
213 CGContextDrawImage(bmContext, CGRectMake(0, 0, width, height), imgRef);
215 CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
216 CFRelease(bmContext);
217 [(id)rotatedImage autorelease];
222 // DecoderDelegate methods
224 - (void)decoder:(Decoder *)decoder willDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset{
226 NSLog(@"DecoderViewController MessageWhileDecodingWithDimensions: Decoding image (%.0fx%.0f) ...", image.size.width, image.size.height);
230 - (void)decoder:(Decoder *)decoder
231 decodingImage:(UIImage *)image
232 usingSubset:(UIImage *)subset {
235 - (void)presentResultForString:(NSString *)resultString {
236 self.result = [ResultParser parsedResultForString:resultString];
237 if (beepSound != -1) {
238 AudioServicesPlaySystemSound(beepSound);
241 NSLog(@"result string = %@", resultString);
245 - (void)presentResultPoints:(NSMutableArray *)resultPoints
246 forImage:(UIImage *)image
247 usingSubset:(UIImage *)subset {
248 // simply add the points to the image view
249 [overlayView setPoints:resultPoints];
252 - (void)decoder:(Decoder *)decoder didDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset withResult:(TwoDDecoderResult *)twoDResult {
253 [self presentResultForString:[twoDResult text]];
254 [self presentResultPoints:[twoDResult points] forImage:image usingSubset:subset];
255 // now, in a selector, call the delegate to give this overlay time to show the points
256 [self performSelector:@selector(alertDelegate:) withObject:[[twoDResult text] copy] afterDelay:0.0];
257 decoder.delegate = nil;
260 - (void)alertDelegate:(id)text {
261 [[UIApplication sharedApplication] setStatusBarHidden:NO];
262 if (delegate != nil) {
263 [delegate zxingController:self didScanResult:text];
268 - (void)decoder:(Decoder *)decoder failedToDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset reason:(NSString *)reason {
269 decoder.delegate = nil;
270 [overlayView setPoints:nil];
273 - (void)decoder:(Decoder *)decoder foundPossibleResultPoint:(CGPoint)point {
274 [overlayView setPoint:point];
278 - (void)stopPreview:(NSNotification*)notification {
279 // NSLog(@"stop preview");
282 - (void)notification:(NSNotification*)notification {
283 // NSLog(@"notification %@", notification.name);
287 - (void)initCapture {
289 AVCaptureDeviceInput *captureInput =
290 [AVCaptureDeviceInput deviceInputWithDevice:
291 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
293 AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
294 captureOutput.alwaysDiscardsLateVideoFrames = YES;
295 [captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
296 NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
297 NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
298 NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
299 [captureOutput setVideoSettings:videoSettings];
300 self.captureSession = [[AVCaptureSession alloc] init];
301 [self.captureSession release];
302 self.captureSession.sessionPreset = AVCaptureSessionPresetMedium; // 480x360 on a 4
304 [self.captureSession addInput:captureInput];
305 [self.captureSession addOutput:captureOutput];
307 [captureOutput release];
310 [[NSNotificationCenter defaultCenter]
312 selector:@selector(stopPreview:)
313 name:AVCaptureSessionDidStopRunningNotification
314 object:self.captureSession];
316 [[NSNotificationCenter defaultCenter]
318 selector:@selector(notification:)
319 name:AVCaptureSessionDidStopRunningNotification
320 object:self.captureSession];
322 [[NSNotificationCenter defaultCenter]
324 selector:@selector(notification:)
325 name:AVCaptureSessionRuntimeErrorNotification
326 object:self.captureSession];
328 [[NSNotificationCenter defaultCenter]
330 selector:@selector(notification:)
331 name:AVCaptureSessionDidStartRunningNotification
332 object:self.captureSession];
334 [[NSNotificationCenter defaultCenter]
336 selector:@selector(notification:)
337 name:AVCaptureSessionWasInterruptedNotification
338 object:self.captureSession];
340 [[NSNotificationCenter defaultCenter]
342 selector:@selector(notification:)
343 name:AVCaptureSessionInterruptionEndedNotification
344 object:self.captureSession];
347 if (!self.prevLayer) {
348 self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
350 // NSLog(@"prev %p %@", self.prevLayer, self.prevLayer);
351 self.prevLayer.frame = self.view.bounds;
352 self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
353 [self.view.layer addSublayer: self.prevLayer];
355 [self.captureSession startRunning];
360 - (void)captureOutput:(AVCaptureOutput *)captureOutput
361 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
362 fromConnection:(AVCaptureConnection *)connection
367 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
368 /*Lock the image buffer*/
369 CVPixelBufferLockBaseAddress(imageBuffer,0);
370 /*Get information about the image*/
371 size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
372 size_t width = CVPixelBufferGetWidth(imageBuffer);
373 size_t height = CVPixelBufferGetHeight(imageBuffer);
375 uint8_t* baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
377 if (true) { // iOS bug?
378 uint8_t* tmp = baseAddress;
379 int bytes = bytesPerRow*height;
380 free_me = baseAddress = (uint8_t*)malloc(bytes);
381 baseAddress[0] = 0xdb;
382 memcpy(baseAddress,tmp,bytes);
385 CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
386 CGContextRef newContext =
387 CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace,
388 kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst);
390 CGImageRef capture = CGBitmapContextCreateImage(newContext);
391 CVPixelBufferUnlockBaseAddress(imageBuffer,0);
394 CGContextRelease(newContext);
395 CGColorSpaceRelease(colorSpace);
397 CGRect cropRect = [overlayView cropRect];
399 // let's just give the decoder a vertical band right above the red line
400 cropRect.origin.x = cropRect.origin.x + (cropRect.size.width / 2) - (ONE_D_BAND_HEIGHT + 1);
401 cropRect.size.width = ONE_D_BAND_HEIGHT;
403 CGImageRef croppedImg = CGImageCreateWithImageInRect(capture, cropRect);
404 capture = [self CGImageRotated90:croppedImg];
405 capture = [self CGImageRotated180:capture];
406 // UIImageWriteToSavedPhotosAlbum([UIImage imageWithCGImage:capture], nil, nil, nil);
407 CGImageRelease(croppedImg);
408 cropRect.origin.x = 0.0;
409 cropRect.origin.y = 0.0;
410 cropRect.size.width = CGImageGetWidth(capture);
411 cropRect.size.height = CGImageGetHeight(capture);
414 // Won't work if the overlay becomes uncentered ...
415 // iOS always takes videos in landscape
416 // images are always 4x3; device is not
417 // iOS uses virtual pixels for non-image stuff
420 float height = CGImageGetHeight(capture);
421 float width = CGImageGetWidth(capture);
423 CGRect screen = UIScreen.mainScreen.bounds;
424 float tmp = screen.size.width;
425 screen.size.width = screen.size.height;;
426 screen.size.height = tmp;
428 cropRect.origin.x = (width-cropRect.size.width)/2;
429 cropRect.origin.y = (height-cropRect.size.height)/2;
431 CGImageRef newImage = CGImageCreateWithImageInRect(capture, cropRect);
432 CGImageRelease(capture);
433 UIImage *scrn = [[UIImage alloc] initWithCGImage:newImage];
434 CGImageRelease(newImage);
435 Decoder *d = [[Decoder alloc] init];
438 cropRect.origin.x = 0.0;
439 cropRect.origin.y = 0.0;
440 decoding = [d decodeImage:scrn cropRect:cropRect] == YES ? NO : YES;
446 - (void)stopCapture {
449 [captureSession stopRunning];
450 AVCaptureInput* input = [captureSession.inputs objectAtIndex:0];
451 [captureSession removeInput:input];
452 AVCaptureVideoDataOutput* output = (AVCaptureVideoDataOutput*)[captureSession.outputs objectAtIndex:0];
453 [captureSession removeOutput:output];
454 [self.prevLayer removeFromSuperlayer];
457 // heebee jeebees here ... is iOS still writing into the layer?
458 if (self.prevLayer) {
460 AVCaptureVideoPreviewLayer* layer = prevLayer;
461 [self.prevLayer retain];
462 dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 12000000000), dispatch_get_main_queue(), ^{
468 self.prevLayer = nil;
469 self.captureSession = nil;