Small style stuff
[zxing.git] / iphone / ZXingWidget / Classes / ZXingWidgetController.m
1 /**
2  * Copyright 2009 Jeff Verkoeyen
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *    http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 #import "ZXingWidgetController.h"
18 #import "Decoder.h"
19 #import "NSString+HTML.h"
20 #import "ResultParser.h"
21 #import "ParsedResult.h"
22 #import "ResultAction.h"
23 #include <sys/types.h>
24 #include <sys/sysctl.h>
25
26 #import <AVFoundation/AVFoundation.h>
27
28 #define CAMERA_SCALAR 1.12412 // scalar = (480 / (2048 / 480))
29 #define FIRST_TAKE_DELAY 1.0
30 #define ONE_D_BAND_HEIGHT 10.0
31
32 @interface ZXingWidgetController ()
33
34 @property BOOL showCancel;
35 @property BOOL oneDMode;
36
37 - (void)initCapture;
38 - (void)stopCapture;
39
40 @end
41
42 @implementation ZXingWidgetController
43
44 #if HAS_AVFF
45 @synthesize captureSession;
46 @synthesize prevLayer;
47 #endif
48 @synthesize result, delegate, soundToPlay;
49 @synthesize overlayView;
50 @synthesize oneDMode, showCancel;
51 @synthesize readers;
52
53
54 - (id)initWithDelegate:(id<ZXingDelegate>)scanDelegate showCancel:(BOOL)shouldShowCancel OneDMode:(BOOL)shouldUseoOneDMode {
55   if (self = [super init]) {
56     [self setDelegate:scanDelegate];
57     self.oneDMode = shouldUseoOneDMode;
58     self.showCancel = shouldShowCancel;
59     self.wantsFullScreenLayout = YES;
60     beepSound = -1;
61     decoding = NO;
62     OverlayView *theOverLayView = [[OverlayView alloc] initWithFrame:[UIScreen mainScreen].bounds 
63                                                        cancelEnabled:showCancel 
64                                                             oneDMode:oneDMode];
65     [theOverLayView setDelegate:self];
66     self.overlayView = theOverLayView;
67     [theOverLayView release];
68   }
69   
70   return self;
71 }
72
73 - (void)dealloc {
74   if (beepSound != -1) {
75     AudioServicesDisposeSystemSoundID(beepSound);
76   }
77   
78   [self stopCapture];
79
80   [soundToPlay release];
81   [overlayView release];
82   [readers release];
83   [super dealloc];
84 }
85
86 - (void)cancelled {
87   [self stopCapture];
88   [[UIApplication sharedApplication] setStatusBarHidden:NO];
89   wasCancelled = YES;
90   if (delegate != nil) {
91     [delegate zxingControllerDidCancel:self];
92   }
93 }
94
95 - (NSString *)getPlatform {
96   size_t size;
97   sysctlbyname("hw.machine", NULL, &size, NULL, 0);
98   char *machine = malloc(size);
99   sysctlbyname("hw.machine", machine, &size, NULL, 0);
100   NSString *platform = [NSString stringWithCString:machine encoding:NSASCIIStringEncoding];
101   free(machine);
102   return platform;
103 }
104
105 - (BOOL)fixedFocus {
106   NSString *platform = [self getPlatform];
107   if ([platform isEqualToString:@"iPhone1,1"] ||
108       [platform isEqualToString:@"iPhone1,2"]) return YES;
109   return NO;
110 }
111
112 - (void)viewWillAppear:(BOOL)animated {
113   [super viewWillAppear:animated];
114   self.wantsFullScreenLayout = YES;
115   //[[UIApplication sharedApplication] setStatusBarHidden:YES];
116   if ([self soundToPlay] != nil) {
117     OSStatus error = AudioServicesCreateSystemSoundID((CFURLRef)[self soundToPlay], &beepSound);
118     if (error != kAudioServicesNoError) {
119       NSLog(@"Problem loading nearSound.caf");
120     }
121   }
122 }
123
124 - (void)viewDidAppear:(BOOL)animated {
125   [super viewDidAppear:animated];
126   [[UIApplication sharedApplication] setStatusBarHidden:YES];
127   //self.wantsFullScreenLayout = YES;
128
129   decoding = YES;
130
131   [self initCapture];
132   [self.view addSubview:overlayView];
133   // [self loadImagePicker];
134   // self.view = imagePicker.view;
135   
136   [overlayView setPoints:nil];
137   wasCancelled = NO;
138 }
139
140 - (void)viewDidDisappear:(BOOL)animated {
141   [super viewDidDisappear:animated];
142   [[UIApplication sharedApplication] setStatusBarHidden:NO];
143   [self.overlayView removeFromSuperview];
144   [self stopCapture];
145 }
146
147 - (CGImageRef)CGImageRotated90:(CGImageRef)imgRef
148 {
149   CGFloat angleInRadians = -90 * (M_PI / 180);
150   CGFloat width = CGImageGetWidth(imgRef);
151   CGFloat height = CGImageGetHeight(imgRef);
152   
153   CGRect imgRect = CGRectMake(0, 0, width, height);
154   CGAffineTransform transform = CGAffineTransformMakeRotation(angleInRadians);
155   CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, transform);
156   
157   CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
158   CGContextRef bmContext = CGBitmapContextCreate(NULL,
159                                                  rotatedRect.size.width,
160                                                  rotatedRect.size.height,
161                                                  8,
162                                                  0,
163                                                  colorSpace,
164                                                  kCGImageAlphaPremultipliedFirst);
165   CGContextSetAllowsAntialiasing(bmContext, FALSE);
166   CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
167   CGColorSpaceRelease(colorSpace);
168   //      CGContextTranslateCTM(bmContext,
169   //                                                +(rotatedRect.size.width/2),
170   //                                                +(rotatedRect.size.height/2));
171   CGContextScaleCTM(bmContext, rotatedRect.size.width/rotatedRect.size.height, 1.0);
172   CGContextTranslateCTM(bmContext, 0.0, rotatedRect.size.height);
173   CGContextRotateCTM(bmContext, angleInRadians);
174   //      CGContextTranslateCTM(bmContext,
175   //                                                -(rotatedRect.size.width/2),
176   //                                                -(rotatedRect.size.height/2));
177   CGContextDrawImage(bmContext, CGRectMake(0, 0,
178                                            rotatedRect.size.width,
179                                            rotatedRect.size.height),
180                      imgRef);
181   
182   CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
183   CFRelease(bmContext);
184   [(id)rotatedImage autorelease];
185   
186   return rotatedImage;
187 }
188
189 - (CGImageRef)CGImageRotated180:(CGImageRef)imgRef
190 {
191   CGFloat angleInRadians = M_PI;
192   CGFloat width = CGImageGetWidth(imgRef);
193   CGFloat height = CGImageGetHeight(imgRef);
194   
195   CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
196   CGContextRef bmContext = CGBitmapContextCreate(NULL,
197                                                  width,
198                                                  height,
199                                                  8,
200                                                  0,
201                                                  colorSpace,
202                                                  kCGImageAlphaPremultipliedFirst);
203   CGContextSetAllowsAntialiasing(bmContext, FALSE);
204   CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
205   CGColorSpaceRelease(colorSpace);
206   CGContextTranslateCTM(bmContext,
207                         +(width/2),
208                         +(height/2));
209   CGContextRotateCTM(bmContext, angleInRadians);
210   CGContextTranslateCTM(bmContext,
211                         -(width/2),
212                         -(height/2));
213   CGContextDrawImage(bmContext, CGRectMake(0, 0, width, height), imgRef);
214   
215   CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
216   CFRelease(bmContext);
217   [(id)rotatedImage autorelease];
218   
219   return rotatedImage;
220 }
221
222 // DecoderDelegate methods
223
224 - (void)decoder:(Decoder *)decoder willDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset{
225 #ifdef DEBUG
226   NSLog(@"DecoderViewController MessageWhileDecodingWithDimensions: Decoding image (%.0fx%.0f) ...", image.size.width, image.size.height);
227 #endif
228 }
229
230 - (void)decoder:(Decoder *)decoder
231   decodingImage:(UIImage *)image
232      usingSubset:(UIImage *)subset {
233 }
234
235 - (void)presentResultForString:(NSString *)resultString {
236   self.result = [ResultParser parsedResultForString:resultString];
237   if (beepSound != -1) {
238     AudioServicesPlaySystemSound(beepSound);
239   }
240 #ifdef DEBUG
241   NSLog(@"result string = %@", resultString);
242 #endif
243 }
244
245 - (void)presentResultPoints:(NSMutableArray *)resultPoints
246                    forImage:(UIImage *)image
247                 usingSubset:(UIImage *)subset {
248   // simply add the points to the image view
249   [overlayView setPoints:resultPoints];
250 }
251
252 - (void)decoder:(Decoder *)decoder didDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset withResult:(TwoDDecoderResult *)twoDResult {
253   [self presentResultForString:[twoDResult text]];
254   [self presentResultPoints:[twoDResult points] forImage:image usingSubset:subset];
255   // now, in a selector, call the delegate to give this overlay time to show the points
256   [self performSelector:@selector(alertDelegate:) withObject:[[twoDResult text] copy] afterDelay:0.0];
257   decoder.delegate = nil;
258 }
259
260 - (void)alertDelegate:(id)text {        
261   [[UIApplication sharedApplication] setStatusBarHidden:NO];
262   if (delegate != nil) {
263     [delegate zxingController:self didScanResult:text];
264   }
265   [text release];
266 }
267
268 - (void)decoder:(Decoder *)decoder failedToDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset reason:(NSString *)reason {
269   decoder.delegate = nil;
270   [overlayView setPoints:nil];
271 }
272
273 - (void)decoder:(Decoder *)decoder foundPossibleResultPoint:(CGPoint)point {
274   [overlayView setPoint:point];
275 }
276
277 /*
278 - (void)stopPreview:(NSNotification*)notification {
279   // NSLog(@"stop preview");
280 }
281
282 - (void)notification:(NSNotification*)notification {
283   // NSLog(@"notification %@", notification.name);
284 }
285 */
286
287 - (void)initCapture {
288 #if HAS_AVFF
289   AVCaptureDeviceInput *captureInput =
290     [AVCaptureDeviceInput deviceInputWithDevice:
291             [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] 
292                                           error:nil];
293   AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; 
294   captureOutput.alwaysDiscardsLateVideoFrames = YES; 
295   [captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
296   NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
297   NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
298   NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 
299   [captureOutput setVideoSettings:videoSettings]; 
300   self.captureSession = [[AVCaptureSession alloc] init];
301   [self.captureSession release];
302   self.captureSession.sessionPreset = AVCaptureSessionPresetMedium; // 480x360 on a 4
303
304   [self.captureSession addInput:captureInput];
305   [self.captureSession addOutput:captureOutput];
306
307   [captureOutput release];
308
309 /*
310   [[NSNotificationCenter defaultCenter]
311       addObserver:self
312          selector:@selector(stopPreview:)
313              name:AVCaptureSessionDidStopRunningNotification
314            object:self.captureSession];
315
316   [[NSNotificationCenter defaultCenter]
317       addObserver:self
318          selector:@selector(notification:)
319              name:AVCaptureSessionDidStopRunningNotification
320            object:self.captureSession];
321
322   [[NSNotificationCenter defaultCenter]
323       addObserver:self
324          selector:@selector(notification:)
325              name:AVCaptureSessionRuntimeErrorNotification
326            object:self.captureSession];
327
328   [[NSNotificationCenter defaultCenter]
329       addObserver:self
330          selector:@selector(notification:)
331              name:AVCaptureSessionDidStartRunningNotification
332            object:self.captureSession];
333
334   [[NSNotificationCenter defaultCenter]
335       addObserver:self
336          selector:@selector(notification:)
337              name:AVCaptureSessionWasInterruptedNotification
338            object:self.captureSession];
339
340   [[NSNotificationCenter defaultCenter]
341       addObserver:self
342          selector:@selector(notification:)
343              name:AVCaptureSessionInterruptionEndedNotification
344            object:self.captureSession];
345 */
346
347   if (!self.prevLayer) {
348     self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
349   }
350   // NSLog(@"prev %p %@", self.prevLayer, self.prevLayer);
351   self.prevLayer.frame = self.view.bounds;
352   self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
353   [self.view.layer addSublayer: self.prevLayer];
354
355   [self.captureSession startRunning];
356 #endif
357 }
358
359 #if HAS_AVFF
360 - (void)captureOutput:(AVCaptureOutput *)captureOutput 
361 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
362        fromConnection:(AVCaptureConnection *)connection 
363
364   if (!decoding) {
365     return;
366   }
367   CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
368   /*Lock the image buffer*/
369   CVPixelBufferLockBaseAddress(imageBuffer,0); 
370   /*Get information about the image*/
371   size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
372   size_t width = CVPixelBufferGetWidth(imageBuffer); 
373   size_t height = CVPixelBufferGetHeight(imageBuffer); 
374     
375   uint8_t* baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 
376   void* free_me = 0;
377   if (true) { // iOS bug?
378     uint8_t* tmp = baseAddress;
379     int bytes = bytesPerRow*height;
380     free_me = baseAddress = (uint8_t*)malloc(bytes);
381     baseAddress[0] = 0xdb;
382     memcpy(baseAddress,tmp,bytes);
383   }
384
385   CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
386   CGContextRef newContext =
387     CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace,
388                           kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst); 
389
390   CGImageRef capture = CGBitmapContextCreateImage(newContext); 
391   CVPixelBufferUnlockBaseAddress(imageBuffer,0);
392   free(free_me);
393
394   CGContextRelease(newContext); 
395   CGColorSpaceRelease(colorSpace);
396
397   CGRect cropRect = [overlayView cropRect];
398   if (oneDMode) {
399     // let's just give the decoder a vertical band right above the red line
400     cropRect.origin.x = cropRect.origin.x + (cropRect.size.width / 2) - (ONE_D_BAND_HEIGHT + 1);
401     cropRect.size.width = ONE_D_BAND_HEIGHT;
402     // do a rotate
403     CGImageRef croppedImg = CGImageCreateWithImageInRect(capture, cropRect);
404     capture = [self CGImageRotated90:croppedImg];
405     capture = [self CGImageRotated180:capture];
406     //              UIImageWriteToSavedPhotosAlbum([UIImage imageWithCGImage:capture], nil, nil, nil);
407     CGImageRelease(croppedImg);
408     cropRect.origin.x = 0.0;
409     cropRect.origin.y = 0.0;
410     cropRect.size.width = CGImageGetWidth(capture);
411     cropRect.size.height = CGImageGetHeight(capture);
412   }
413
414   // Won't work if the overlay becomes uncentered ...
415   // iOS always takes videos in landscape
416   // images are always 4x3; device is not
417   // iOS uses virtual pixels for non-image stuff
418
419   {
420     float height = CGImageGetHeight(capture);
421     float width = CGImageGetWidth(capture);
422
423     CGRect screen = UIScreen.mainScreen.bounds;
424     float tmp = screen.size.width;
425     screen.size.width = screen.size.height;;
426     screen.size.height = tmp;
427
428     cropRect.origin.x = (width-cropRect.size.width)/2;
429     cropRect.origin.y = (height-cropRect.size.height)/2;
430   }
431   CGImageRef newImage = CGImageCreateWithImageInRect(capture, cropRect);
432   CGImageRelease(capture);
433   UIImage *scrn = [[UIImage alloc] initWithCGImage:newImage];
434   CGImageRelease(newImage);
435   Decoder *d = [[Decoder alloc] init];
436   d.readers = readers;
437   d.delegate = self;
438   cropRect.origin.x = 0.0;
439   cropRect.origin.y = 0.0;
440   decoding = [d decodeImage:scrn cropRect:cropRect] == YES ? NO : YES;
441   [d release];
442   [scrn release];
443
444 #endif
445
446 - (void)stopCapture {
447   decoding = NO;
448 #if HAS_AVFF
449   [captureSession stopRunning];
450   AVCaptureInput* input = [captureSession.inputs objectAtIndex:0];
451   [captureSession removeInput:input];
452   AVCaptureVideoDataOutput* output = (AVCaptureVideoDataOutput*)[captureSession.outputs objectAtIndex:0];
453   [captureSession removeOutput:output];
454   [self.prevLayer removeFromSuperlayer];
455
456 /*
457   // heebee jeebees here ... is iOS still writing into the layer?
458   if (self.prevLayer) {
459     layer.session = nil;
460     AVCaptureVideoPreviewLayer* layer = prevLayer;
461     [self.prevLayer retain];
462     dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 12000000000), dispatch_get_main_queue(), ^{
463         [layer release];
464     });
465   }
466 */
467
468   self.prevLayer = nil;
469   self.captureSession = nil;
470 #endif
471 }
472
473 @end