Skip to content

Commit

Permalink
Merge pull request #31 from hyperoslo/swift-2.0
Browse files Browse the repository at this point in the history
Swift 2.0
  • Loading branch information
RamonGilabert committed Sep 23, 2015
2 parents 468fa85 + c3001cd commit 31c8ca5
Show file tree
Hide file tree
Showing 15 changed files with 182 additions and 88 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,8 @@
29D699D11B70ABFC0021FA73 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0640;
LastSwiftUpdateCheck = 0700;
LastUpgradeCheck = 0700;
ORGANIZATIONNAME = "Ramon Gilabert Llop";
TargetAttributes = {
29D699D81B70ABFC0021FA73 = {
Expand Down Expand Up @@ -340,6 +341,7 @@
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
Expand Down
2 changes: 1 addition & 1 deletion Demo/ImagePickerDemo/ImagePickerDemo/Info.plist
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>Hyper.$(PRODUCT_NAME:rfc1034identifier)</string>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
Expand Down
2 changes: 1 addition & 1 deletion Demo/ImagePickerDemo/ImagePickerDemoTests/Info.plist
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>Hyper.$(PRODUCT_NAME:rfc1034identifier)</string>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
Expand Down
4 changes: 2 additions & 2 deletions Demo/ImagePickerDemo/Podfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ DEPENDENCIES:

EXTERNAL SOURCES:
ImagePicker:
:path: ../../
:path: "../../"

SPEC CHECKSUMS:
ImagePicker: 32becfa25b8e9179e60c45411b577340d35e3e32

COCOAPODS: 0.37.2
COCOAPODS: 0.38.2
4 changes: 2 additions & 2 deletions Source/BottomView/BottomContainerView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class BottomContainerView: UIView {

for view in [borderPickerButton, pickerButton, doneButton, stackView, topSeparator] {
addSubview(view)
view.setTranslatesAutoresizingMaskIntoConstraints(false)
view.translatesAutoresizingMaskIntoConstraints = false
}

backgroundColor = configuration.backgroundColor
Expand All @@ -79,7 +79,7 @@ class BottomContainerView: UIView {
setupConstraints()
}

required init(coder aDecoder: NSCoder) {
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}

Expand Down
12 changes: 6 additions & 6 deletions Source/BottomView/ButtonPicker.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class ButtonPicker: UIButton {

lazy var numberLabel: UILabel = { [unowned self] in
let label = UILabel()
label.setTranslatesAutoresizingMaskIntoConstraints(false)
label.translatesAutoresizingMaskIntoConstraints = false
label.font = self.configuration.numberLabelFont

return label
Expand All @@ -33,7 +33,7 @@ class ButtonPicker: UIButton {
override init(frame: CGRect) {
super.init(frame: frame)

[numberLabel].map { self.addSubview($0) }
addSubview(numberLabel)

subscribe()
setupButton()
Expand Down Expand Up @@ -61,7 +61,7 @@ class ButtonPicker: UIButton {
object: nil)
}

required init(coder aDecoder: NSCoder) {
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}

Expand All @@ -79,9 +79,9 @@ class ButtonPicker: UIButton {
func setupConstraints() {
let attributes: [NSLayoutAttribute] = [.CenterX, .CenterY]

attributes.map {
self.addConstraint(NSLayoutConstraint(item: self.numberLabel, attribute: $0,
relatedBy: .Equal, toItem: self, attribute: $0,
for attribute in attributes {
addConstraint(NSLayoutConstraint(item: numberLabel, attribute: attribute,
relatedBy: .Equal, toItem: self, attribute: attribute,
multiplier: 1, constant: 0))
}
}
Expand Down
2 changes: 1 addition & 1 deletion Source/BottomView/ImageStack.swift
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,6 @@ public class ImageStack {
}

public func containsImage(image: UIImage) -> Bool {
return contains(images, image)
return images.contains(image)
}
}
21 changes: 12 additions & 9 deletions Source/BottomView/StackView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,18 @@ class ImageStackView: UIView {

override init(frame: CGRect) {
super.init(frame: frame)

subscribe()
views.map { self.addSubview($0) }

for view in views {
addSubview(view)
}

views[0].alpha = 1
layoutSubviews()
}

required init(coder aDecoder: NSCoder) {
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}

Expand Down Expand Up @@ -74,10 +78,10 @@ class ImageStackView: UIView {
let offset = -step * CGFloat(views.count)
var origin = CGPoint(x: offset, y: offset)

for (i, view) in enumerate(views) {
for (_, view) in views.enumerate() {
origin.x += step
origin.y += step
var frame = CGRect(origin: origin, size: viewSize)
let frame = CGRect(origin: origin, size: viewSize)
view.frame = frame
}
}
Expand Down Expand Up @@ -116,9 +120,9 @@ extension ImageStackView {
return
}

let photos = suffix(images, 4)
let photos = Array(images.suffix(4))

for (index, view) in enumerate(views) {
for (index, view) in views.enumerate() {
if index <= photos.count - 1 {
view.image = photos[index]
view.alpha = 1
Expand All @@ -132,7 +136,7 @@ extension ImageStackView {
private func animateImageView(imageView: UIImageView) {
imageView.transform = CGAffineTransformMakeScale(0, 0)

UIView.animateWithDuration(0.3, animations: { [unowned self] in
UIView.animateWithDuration(0.3, animations: {
imageView.transform = CGAffineTransformMakeScale(1.05, 1.05)
}, completion: { _ in
UIView.animateWithDuration(0.2, animations: { _ in
Expand All @@ -141,4 +145,3 @@ extension ImageStackView {
})
}
}

124 changes: 92 additions & 32 deletions Source/CameraView/CameraView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,23 @@ class CameraView: UIViewController {
lazy var focusImageView: UIImageView = { [unowned self] in
let imageView = UIImageView()
imageView.image = self.getImage("focusIcon")
imageView.backgroundColor = UIColor.clearColor()
imageView.backgroundColor = .clearColor()
imageView.frame = CGRectMake(0, 0, 110, 110)
imageView.alpha = 0
self.view.addSubview(imageView)

return imageView
}()

lazy var capturedImageView: UIView = { [unowned self] in
let view = UIView()
view.backgroundColor = .blackColor()
view.alpha = 0
self.view.addSubview(view)

return view
}()

lazy var containerView: UIView = {
let view = UIView()
view.alpha = 0
Expand Down Expand Up @@ -62,7 +71,7 @@ class CameraView: UIViewController {
// MARK: - Initialize camera

func initializeCamera() {
captureSession.sessionPreset = AVCaptureSessionPreset640x480
captureSession.sessionPreset = AVCaptureSessionPreset1280x720
capturedDevices = NSMutableArray()

let authorizationStatus = AVCaptureDevice.authorizationStatusForMediaType(AVMediaTypeVideo)
Expand All @@ -84,6 +93,8 @@ class CameraView: UIViewController {
}
}

captureDevice = capturedDevices?.firstObject as? AVCaptureDevice

if captureDevice != nil {
beginSession()
}
Expand Down Expand Up @@ -111,16 +122,21 @@ class CameraView: UIViewController {

delegate?.handleFlashButton(captureDevice?.position == .Front)

var error: NSError? = nil

UIView.animateWithDuration(0.3, animations: { [unowned self] in
self.containerView.alpha = 1
}, completion: { finished in
self.captureSession.beginConfiguration()
self.captureSession.removeInput(currentDeviceInput)
self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureDevice, error: &error))

if self.captureDevice!.supportsAVCaptureSessionPreset(AVCaptureSessionPreset1280x720) {
self.captureSession.sessionPreset = AVCaptureSessionPreset1280x720
} else {
self.captureSession.sessionPreset = AVCaptureSessionPreset640x480
}

try! self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureDevice))
self.captureSession.commitConfiguration()
UIView.animateWithDuration(1.3, animations: { [unowned self] in
UIView.animateWithDuration(0.7, animations: { [unowned self] in
self.containerView.alpha = 0
})
})
Expand All @@ -129,7 +145,10 @@ class CameraView: UIViewController {
func flashCamera(title: String) {

if (captureDevice?.hasFlash != nil) {
captureDevice?.lockForConfiguration(nil)
do {
try captureDevice?.lockForConfiguration()
} catch _ {
}
switch title {
case "ON":
captureDevice?.flashMode = .On
Expand All @@ -143,22 +162,57 @@ class CameraView: UIViewController {
}

func takePicture() {
capturedImageView.frame = view.bounds

UIView.animateWithDuration(0.1, animations: {
self.capturedImageView.alpha = 1
}, completion: { _ in
UIView.animateWithDuration(0.1, animations: {
self.capturedImageView.alpha = 0
})
})

let queue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL)
let videoOrientation = previewLayer?.connection.videoOrientation

stillImageOutput?.connectionWithMediaType(AVMediaTypeVideo).videoOrientation = videoOrientation!

dispatch_async(queue, { [unowned self] in
self.stillImageOutput!.captureStillImageAsynchronouslyFromConnection(self.stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo), completionHandler: { (buffer, error) -> Void in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
let image = UIImage(data: imageData)
self.delegate?.imageToLibrary(image!)
let orientation = ALAssetOrientation(rawValue: image!.imageOrientation.rawValue)
let image = self.cropImage(UIImage(data: imageData)!)
let orientation = self.pictureOrientation()
let assetsLibrary = ALAssetsLibrary()
assetsLibrary.writeImageToSavedPhotosAlbum(image!.CGImage, orientation: orientation!, completionBlock: nil)
assetsLibrary.writeImageToSavedPhotosAlbum(image.CGImage, orientation: orientation, completionBlock: nil)

let rotatedImage = UIImage(CGImage: image.CGImage!,
scale: 1.0,
orientation: UIImageOrientation(rawValue: orientation.rawValue)!)
self.delegate?.imageToLibrary(rotatedImage)
})
})
}

func cropImage(image: UIImage) -> UIImage {
let imageReference = CGImageCreateWithImageInRect(image.CGImage, CGRect(x: 0, y: 0, width: image.size.height - 200, height: image.size.width))
let normalizedImage = UIImage(CGImage: imageReference!, scale: 1, orientation: .Right)

return normalizedImage
}

func pictureOrientation() -> ALAssetOrientation {
switch UIDevice.currentDevice().orientation {
case .LandscapeLeft:
return .Up
case .LandscapeRight:
return .Down
case .PortraitUpsideDown:
return .Left
default:
return .Right
}
}

// MARK: - Timer methods

func timerDidFire() {
Expand All @@ -173,24 +227,25 @@ class CameraView: UIViewController {

func focusTo(point: CGPoint) {
if let device = captureDevice {
if device.lockForConfiguration(nil)
&& device.isFocusModeSupported(AVCaptureFocusMode.Locked) {
device.focusPointOfInterest = CGPointMake(point.x / UIScreen.mainScreen().bounds.width, point.y / UIScreen.mainScreen().bounds.height)
device.unlockForConfiguration()
focusImageView.center = point
UIView.animateWithDuration(0.5, animations: { [unowned self] in
self.focusImageView.alpha = 1
self.focusImageView.transform = CGAffineTransformMakeScale(0.6, 0.6)
}, completion: { _ in
self.animationTimer = NSTimer.scheduledTimerWithTimeInterval(1, target: self,
selector: "timerDidFire", userInfo: nil, repeats: false)
})

do { try device.lockForConfiguration() } catch { }
if device.isFocusModeSupported(AVCaptureFocusMode.Locked) {
device.focusPointOfInterest = CGPointMake(point.x / UIScreen.mainScreen().bounds.width, point.y / UIScreen.mainScreen().bounds.height)
device.unlockForConfiguration()
focusImageView.center = point
UIView.animateWithDuration(0.5, animations: { [unowned self] in
self.focusImageView.alpha = 1
self.focusImageView.transform = CGAffineTransformMakeScale(0.6, 0.6)
}, completion: { _ in
self.animationTimer = NSTimer.scheduledTimerWithTimeInterval(1, target: self,
selector: "timerDidFire", userInfo: nil, repeats: false)
})
}
}
}

override func touchesBegan(touches: Set<NSObject>, withEvent event: UIEvent) {
let anyTouch = touches.first as! UITouch
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
let anyTouch = touches.first!
let touchX = anyTouch.locationInView(view).x
let touchY = anyTouch.locationInView(view).y
focusImageView.transform = CGAffineTransformIdentity
Expand All @@ -200,26 +255,31 @@ class CameraView: UIViewController {

func configureDevice() {
if let device = captureDevice {
device.lockForConfiguration(nil)
do {
try device.lockForConfiguration()
} catch _ {
}
device.unlockForConfiguration()
}
}

func beginSession() {
configureDevice()
var error: NSError? = nil
if captureSession.inputs.count == 0 {
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &error))

if error != nil {
println("error: \(error?.localizedDescription)")
let captureDeviceInput: AVCaptureDeviceInput?
do { try
captureDeviceInput = AVCaptureDeviceInput(device: self.captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print("failed to capture device")
}


previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.autoreverses = true
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
view.clipsToBounds = true
view.layer.addSublayer(previewLayer)
view.layer.addSublayer(previewLayer!)
previewLayer?.frame = view.layer.frame
captureSession.startRunning()
delegate?.handleFlashButton(captureDevice?.position == .Front)
Expand Down
Loading

0 comments on commit 31c8ca5

Please sign in to comment.