Issue 479
[zxing.git] / iphone / ZXingWidget / Classes / ZXingWidgetController.m
index 3ea90e5..4599e3e 100755 (executable)
 #include <sys/types.h>
 #include <sys/sysctl.h>
 
+#import <AVFoundation/AVFoundation.h>
+
 #define CAMERA_SCALAR 1.12412 // scalar = (480 / (2048 / 480))
 #define FIRST_TAKE_DELAY 1.0
 #define ONE_D_BAND_HEIGHT 10.0
 
-CGImageRef UIGetScreenImage(void);
-
 @interface ZXingWidgetController ()
 
 @property BOOL showCancel;
 @property BOOL oneDMode;
 
-@property (nonatomic, retain) UIImagePickerController* imagePicker;
+- (void)initCapture;
+- (void)stopCapture;
 
 @end
 
-
-
-
-
 @implementation ZXingWidgetController
-@synthesize result, actions, delegate, soundToPlay;
+
+#if HAS_AVFF
+@synthesize captureSession;
+@synthesize prevLayer;
+#endif
+@synthesize result, delegate, soundToPlay;
 @synthesize overlayView;
 @synthesize oneDMode, showCancel;
-@synthesize imagePicker;
 @synthesize readers;
 
 
--(void)loadImagePicker {
-  if (self.imagePicker)
-  {
-    [imagePicker release];
-    imagePicker = nil;
-  }
-  UIImagePickerController* imController = [[UIImagePickerController alloc] init];
-  self.imagePicker = imController;
-  imagePicker.delegate = self;
-  [imController release];
-  imagePicker.wantsFullScreenLayout = YES;
-  if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera])
-    imagePicker.sourceType = UIImagePickerControllerSourceTypeCamera;
-  float zoomFactor = CAMERA_SCALAR;
-  if ([self fixedFocus]) {
-    zoomFactor *= 2.0;
-  }
-  if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera])
-    imagePicker.cameraViewTransform = CGAffineTransformScale(
-                                                             imagePicker.cameraViewTransform, zoomFactor, zoomFactor);
-  if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera])
-  {
-    imagePicker.showsCameraControls = NO;
-    imagePicker.cameraOverlayView = overlayView;
-    imagePicker.allowsEditing = NO;
-  }
-}
-
-- (void)unloadImagePicker {
-  if (self.imagePicker)
-  {
-    [imagePicker release];
-    imagePicker = nil;
-  }
-}
-
 - (id)initWithDelegate:(id<ZXingDelegate>)scanDelegate showCancel:(BOOL)shouldShowCancel OneDMode:(BOOL)shouldUseoOneDMode {
   if (self = [super init]) {
     [self setDelegate:scanDelegate];
@@ -93,6 +58,7 @@ CGImageRef UIGetScreenImage(void);
     self.showCancel = shouldShowCancel;
     self.wantsFullScreenLayout = YES;
     beepSound = -1;
+    decoding = NO;
     OverlayView *theOverLayView = [[OverlayView alloc] initWithFrame:[UIScreen mainScreen].bounds 
                                                        cancelEnabled:showCancel 
                                                             oneDMode:oneDMode];
@@ -108,14 +74,17 @@ CGImageRef UIGetScreenImage(void);
   if (beepSound != -1) {
     AudioServicesDisposeSystemSoundID(beepSound);
   }
-  imagePicker.cameraOverlayView = nil;
-  [imagePicker release];
+  
+  [self stopCapture];
+
+  [soundToPlay release];
   [overlayView release];
   [readers release];
   [super dealloc];
 }
 
 - (void)cancelled {
+  [self stopCapture];
   [[UIApplication sharedApplication] setStatusBarHidden:NO];
   wasCancelled = YES;
   if (delegate != nil) {
@@ -153,32 +122,28 @@ CGImageRef UIGetScreenImage(void);
 }
 
 - (void)viewDidAppear:(BOOL)animated {
-  NSLog(@"View did appear");
   [super viewDidAppear:animated];
   [[UIApplication sharedApplication] setStatusBarHidden:YES];
   //self.wantsFullScreenLayout = YES;
-  [self loadImagePicker];
-  self.view = imagePicker.view;
+
+  decoding = YES;
+
+  [self initCapture];
+  [self.view addSubview:overlayView];
+  // [self loadImagePicker];
+  // self.view = imagePicker.view;
   
   [overlayView setPoints:nil];
   wasCancelled = NO;
-  if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
-
-    [NSTimer scheduledTimerWithTimeInterval: FIRST_TAKE_DELAY
-                                     target: self
-                                   selector: @selector(takePicture:)
-                                   userInfo: nil
-                                    repeats: NO];
-  }
 }
 
-- (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker {
-  //self.wantsFullScreenLayout = NO;
-  [UIApplication sharedApplication].statusBarHidden = NO;
-  [self cancelled];
+- (void)viewDidDisappear:(BOOL)animated {
+  [super viewDidDisappear:animated];
+  [[UIApplication sharedApplication] setStatusBarHidden:NO];
+  [self.overlayView removeFromSuperview];
+  [self stopCapture];
 }
 
-
 - (CGImageRef)CGImageRotated90:(CGImageRef)imgRef
 {
   CGFloat angleInRadians = -90 * (M_PI / 180);
@@ -254,41 +219,6 @@ CGImageRef UIGetScreenImage(void);
   return rotatedImage;
 }
 
-- (void)takePicture:(NSTimer*)theTimer {
-  CGImageRef capture = UIGetScreenImage();
-  static int cpt = 0;
-  if (cpt%10 == 0)
-    UIImageWriteToSavedPhotosAlbum([UIImage imageWithCGImage:capture], nil, nil,nil);
-  CGRect cropRect = [overlayView cropRect];
-  if (oneDMode) {
-    // let's just give the decoder a vertical band right above the red line
-    cropRect.origin.x = cropRect.origin.x + (cropRect.size.width / 2) - (ONE_D_BAND_HEIGHT + 1);
-    cropRect.size.width = ONE_D_BAND_HEIGHT;
-    // do a rotate
-    CGImageRef croppedImg = CGImageCreateWithImageInRect(capture, cropRect);
-    capture = [self CGImageRotated90:croppedImg];
-    capture = [self CGImageRotated180:capture];
-    //              UIImageWriteToSavedPhotosAlbum([UIImage imageWithCGImage:capture], nil, nil, nil);
-    CGImageRelease(croppedImg);
-    cropRect.origin.x = 0.0;
-    cropRect.origin.y = 0.0;
-    cropRect.size.width = CGImageGetWidth(capture);
-    cropRect.size.height = CGImageGetHeight(capture);
-  }
-  CGImageRef newImage = CGImageCreateWithImageInRect(capture, cropRect);
-  CGImageRelease(capture);
-  //UIImage *scrn = [UIImage imageWithCGImage:newImage];
-  UIImage *scrn = [[UIImage alloc] initWithCGImage:newImage];
-  CGImageRelease(newImage);
-  Decoder *d = [[Decoder alloc] init];
-  d.readers = readers;
-  d.delegate = self;
-  cropRect.origin.x = 0.0;
-  cropRect.origin.y = 0.0;
-  [d decodeImage:scrn cropRect:cropRect];
-  [scrn release];
-}
-
 // DecoderDelegate methods
 
 - (void)decoder:(Decoder *)decoder willDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset{
@@ -299,24 +229,20 @@ CGImageRef UIGetScreenImage(void);
 
 - (void)decoder:(Decoder *)decoder
   decodingImage:(UIImage *)image
-    usingSubset:(UIImage *)subset
-       progress:(NSString *)message {
+     usingSubset:(UIImage *)subset {
 }
 
 - (void)presentResultForString:(NSString *)resultString {
   self.result = [ResultParser parsedResultForString:resultString];
-  
   if (beepSound != -1) {
     AudioServicesPlaySystemSound(beepSound);
   }
 #ifdef DEBUG
   NSLog(@"result string = %@", resultString);
-  NSLog(@"result has %d actions", actions ? 0 : actions.count);
 #endif
-  //      [self updateToolbar];
 }
 
-- (void)presentResultPoints:(NSArray *)resultPoints
+- (void)presentResultPoints:(NSMutableArray *)resultPoints
                    forImage:(UIImage *)image
                 usingSubset:(UIImage *)subset {
   // simply add the points to the image view
@@ -327,9 +253,8 @@ CGImageRef UIGetScreenImage(void);
   [self presentResultForString:[twoDResult text]];
   [self presentResultPoints:[twoDResult points] forImage:image usingSubset:subset];
   // now, in a selector, call the delegate to give this overlay time to show the points
-  [self performSelector:@selector(alertDelegate:) withObject:[[twoDResult text] copy] afterDelay:1.0];
+  [self performSelector:@selector(alertDelegate:) withObject:[[twoDResult text] copy] afterDelay:0.0];
   decoder.delegate = nil;
-  [decoder release];
 }
 
 - (void)alertDelegate:(id)text {        
@@ -337,16 +262,212 @@ CGImageRef UIGetScreenImage(void);
   if (delegate != nil) {
     [delegate zxingController:self didScanResult:text];
   }
+  [text release];
 }
 
 - (void)decoder:(Decoder *)decoder failedToDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset reason:(NSString *)reason {
   decoder.delegate = nil;
-  [decoder release];
   [overlayView setPoints:nil];
-  if (!wasCancelled) {
-    [self takePicture:nil];
+}
+
+- (void)decoder:(Decoder *)decoder foundPossibleResultPoint:(CGPoint)point {
+  [overlayView setPoint:point];
+}
+
+/*
+- (void)stopPreview:(NSNotification*)notification {
+  // NSLog(@"stop preview");
+}
+
+- (void)notification:(NSNotification*)notification {
+  // NSLog(@"notification %@", notification.name);
+}
+*/
+
+- (void)initCapture {
+#if HAS_AVFF
+  AVCaptureDeviceInput *captureInput =
+    [AVCaptureDeviceInput deviceInputWithDevice:
+            [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] 
+                                          error:nil];
+  AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; 
+  captureOutput.alwaysDiscardsLateVideoFrames = YES; 
+  [captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
+  NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
+  NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
+  NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 
+  [captureOutput setVideoSettings:videoSettings]; 
+  self.captureSession = [[AVCaptureSession alloc] init];
+  [self.captureSession release];
+  self.captureSession.sessionPreset = AVCaptureSessionPresetMedium; // 480x360 on a 4
+
+  [self.captureSession addInput:captureInput];
+  [self.captureSession addOutput:captureOutput];
+
+  [captureOutput release];
+
+/*
+  [[NSNotificationCenter defaultCenter]
+      addObserver:self
+         selector:@selector(stopPreview:)
+             name:AVCaptureSessionDidStopRunningNotification
+           object:self.captureSession];
+
+  [[NSNotificationCenter defaultCenter]
+      addObserver:self
+         selector:@selector(notification:)
+             name:AVCaptureSessionDidStopRunningNotification
+           object:self.captureSession];
+
+  [[NSNotificationCenter defaultCenter]
+      addObserver:self
+         selector:@selector(notification:)
+             name:AVCaptureSessionRuntimeErrorNotification
+           object:self.captureSession];
+
+  [[NSNotificationCenter defaultCenter]
+      addObserver:self
+         selector:@selector(notification:)
+             name:AVCaptureSessionDidStartRunningNotification
+           object:self.captureSession];
+
+  [[NSNotificationCenter defaultCenter]
+      addObserver:self
+         selector:@selector(notification:)
+             name:AVCaptureSessionWasInterruptedNotification
+           object:self.captureSession];
+
+  [[NSNotificationCenter defaultCenter]
+      addObserver:self
+         selector:@selector(notification:)
+             name:AVCaptureSessionInterruptionEndedNotification
+           object:self.captureSession];
+*/
+
+  [self.captureSession startRunning];
+
+  if (!self.prevLayer) {
+    self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
   }
-  //[self updateToolbar];
+  // NSLog(@"prev %p %@", self.prevLayer, self.prevLayer);
+  self.prevLayer.frame = self.view.bounds;
+  self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
+  [self.view.layer addSublayer: self.prevLayer];
+#endif
+}
+
+#if HAS_AVFF
+- (void)captureOutput:(AVCaptureOutput *)captureOutput 
+didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
+       fromConnection:(AVCaptureConnection *)connection 
+{ 
+  if (!decoding) {
+    return;
+  }
+  CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
+  /*Lock the image buffer*/
+  CVPixelBufferLockBaseAddress(imageBuffer,0); 
+  /*Get information about the image*/
+  size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
+  size_t width = CVPixelBufferGetWidth(imageBuffer); 
+  size_t height = CVPixelBufferGetHeight(imageBuffer); 
+    
+  uint8_t* baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 
+  void* free_me = 0;
+  if (true) { // iOS bug?
+    uint8_t* tmp = baseAddress;
+    int bytes = bytesPerRow*height;
+    free_me = baseAddress = (uint8_t*)malloc(bytes);
+    baseAddress[0] = 0xdb;
+    memcpy(baseAddress,tmp,bytes);
+  }
+
+  CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
+  CGContextRef newContext =
+    CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace,
+                          kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst); 
+
+  CGImageRef capture = CGBitmapContextCreateImage(newContext); 
+  CVPixelBufferUnlockBaseAddress(imageBuffer,0);
+  free(free_me);
+
+  CGContextRelease(newContext); 
+  CGColorSpaceRelease(colorSpace);
+
+  CGRect cropRect = [overlayView cropRect];
+  if (oneDMode) {
+    // let's just give the decoder a vertical band right above the red line
+    cropRect.origin.x = cropRect.origin.x + (cropRect.size.width / 2) - (ONE_D_BAND_HEIGHT + 1);
+    cropRect.size.width = ONE_D_BAND_HEIGHT;
+    // do a rotate
+    CGImageRef croppedImg = CGImageCreateWithImageInRect(capture, cropRect);
+    capture = [self CGImageRotated90:croppedImg];
+    capture = [self CGImageRotated180:capture];
+    //              UIImageWriteToSavedPhotosAlbum([UIImage imageWithCGImage:capture], nil, nil, nil);
+    CGImageRelease(croppedImg);
+    cropRect.origin.x = 0.0;
+    cropRect.origin.y = 0.0;
+    cropRect.size.width = CGImageGetWidth(capture);
+    cropRect.size.height = CGImageGetHeight(capture);
+  }
+
+  // Won't work if the overlay becomes uncentered ...
+  // iOS always takes videos in landscape
+  // images are always 4x3; device is not
+  // iOS uses virtual pixels for non-image stuff
+
+  {
+    float height = CGImageGetHeight(capture);
+    float width = CGImageGetWidth(capture);
+
+    CGRect screen = UIScreen.mainScreen.bounds;
+    float tmp = screen.size.width;
+    screen.size.width = screen.size.height;;
+    screen.size.height = tmp;
+
+    cropRect.origin.x = (width-cropRect.size.width)/2;
+    cropRect.origin.y = (height-cropRect.size.height)/2;
+  }
+  CGImageRef newImage = CGImageCreateWithImageInRect(capture, cropRect);
+  CGImageRelease(capture);
+  UIImage *scrn = [[UIImage alloc] initWithCGImage:newImage];
+  CGImageRelease(newImage);
+  Decoder *d = [[Decoder alloc] init];
+  d.readers = readers;
+  d.delegate = self;
+  cropRect.origin.x = 0.0;
+  cropRect.origin.y = 0.0;
+  decoding = [d decodeImage:scrn cropRect:cropRect] == YES ? NO : YES;
+  [d release];
+  [scrn release];
+} 
+#endif
+
+- (void)stopCapture {
+  decoding = NO;
+#if HAS_AVFF
+  [captureSession stopRunning];
+  AVCaptureInput* input = [captureSession.inputs objectAtIndex:0];
+  [captureSession removeInput:input];
+  AVCaptureVideoDataOutput* output = (AVCaptureVideoDataOutput*)[captureSession.outputs objectAtIndex:0];
+  [captureSession removeOutput:output];
+  [self.prevLayer removeFromSuperlayer];
+
+/*
+  // heebee jeebees here ... is iOS still writing into the layer?
+  if (self.prevLayer) {
+    layer.session = nil;
+    AVCaptureVideoPreviewLayer* layer = prevLayer;
+    [self.prevLayer retain];
+    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 12000000000), dispatch_get_main_queue(), ^{
+        [layer release];
+    });
+  }
+*/
+
+  self.prevLayer = nil;
+  self.captureSession = nil;
+#endif
 }
 
 @end