I have a Swift 4 app I'm building, and the saved image shows the overlay on the preview screen moved slightly upwards. Trying to center the overlay image on the preview screen looks good on the preview, but when I take the snapshot, the center point of the overlay circle is shifted slightly upward.
The attached image shows the white dot shifted slightly toward the top of the home button on the preview image of the iphone. The white dot in the white circle. When the image is taken, the dot is dead center in the circle, but the shaved image is this one. Slightly shifted.
Any ideas from any experts her would be much appreciated. Thanks
if the image isn't displaying, please look here. https://robert-chalmers.uk/images/IMG_0055.JPG
The code that generates the image above and saves it to the camera is shown below this output.
The output of the various print statements sprinkled through the code is here. You will note the changing values of screenWidth and screenHeight.
The attached image shows the white dot shifted slightly toward the top of the home button on the preview image of the iphone. The white dot in the white circle. When the image is taken, the dot is dead center in the circle, but the shaved image is this one. Slightly shifted.
Any ideas from any experts her would be much appreciated. Thanks
if the image isn't displaying, please look here. https://robert-chalmers.uk/images/IMG_0055.JPG
The code that generates the image above and saves it to the camera is shown below this output.
The output of the various print statements sprinkled through the code is here. You will note the changing values of screenWidth and screenHeight.
Code:
Device name: Back Camera
previewLayer found
PL: 768.0
Width 1: 768.0 Height 1: 1024.0
ImageOverlay: 768.0
Capture session running - waiting for button press if any
No Device 1
No Device 2
No Device 3
Device name: Front Camera
No Device 2
No Device 3
Device name: iPad Microphone
No Device 3
No Device 4
previewLayer found
PL: 768.0
ScreenWidth 768.0
Width 1: 768.0 Height 1: 1024.0
screenWidth 2: 1080.0 screenHeight 2: 1920.0
Blink Width: 1080.0
Blink Height: 1920.0
Width 1: 1080.0 Height 1: 1920.0
ViewController.swift
import UIKit
import AVFoundation
import Foundation
class ViewController: UIViewController {
@IBOutlet weak var navigationBar: UINavigationBar!
@IBOutlet weak var imgOverlay: UIImageView!
@IBOutlet weak var btnCapture: UIButton!
@IBOutlet weak var btnInfo: UIButton!
@IBOutlet weak var btnSocial: UIButton!
@IBOutlet weak var shapeLayer: UIView!
/
@IBOutlet var spinner: UIActivityIndicatorView!
let captureSession = AVCaptureSession()
let stillImageOutput = AVCaptureStillImageOutput()
var previewLayer : AVCaptureVideoPreviewLayer?
/
let screenWidth = UIScreen.main.bounds.size.width
let screenHeight = UIScreen.main.bounds.size.height
var aspectRatio: CGFloat = 1.0
var viewFinderHeight: CGFloat = 0.0
var viewFinderWidth: CGFloat = 0.0
var viewFinderMarginLeft: CGFloat = 0.0
var viewFinderMarginTop: CGFloat = 0.0
/
/
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSession.Preset.high
if #available(iOS 10.0, *) {
if let devices = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: .back) {
print("Device name: \(devices.localizedName)")
}
} else {
}
let devices = AVCaptureDevice.devices()
/
for device in devices {
print("Device name: \(device.localizedName)")
/
if (device.hasMediaType(AVMediaType.video)) {
/
if(device.position == AVCaptureDevice.Position.back) {
captureDevice = device
if captureDevice != nil {
/
/
beginSession()
}
print("No Device 1")
}
print("No Device 2")
}
print("No Device 3")
}
print("No Device 4")
/
/
previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer?.bounds = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height)
previewLayer?.position = CGPoint(x: self.view.bounds.midX, y: self.view.bounds.midY)
previewLayer?.videoGravity = AVLayerVideoGravity.resize
/
self.view.layer.addSublayer(previewLayer!)
print("previewLayer found")
print("PL: \(self.view.layer.bounds.size.width)")
print("ScreenWidth \(screenWidth)")
/
/
/
imgOverlay.frame = self.view.frame
/
imgOverlay.image = self.drawCirclesOnImage(fromImage: nil, targetSize: imgOverlay.bounds.size)
/
self.view.bringSubview(toFront: navigationBar)
self.navigationItem.title = "Navigation"
self.view.bringSubview(toFront: imgOverlay)
self.view.bringSubview(toFront: btnCapture)
self.view.bringSubview(toFront: btnInfo)
self.view.bringSubview(toFront: btnSocial)
self.view.bringSubview(toFront: shapeLayer)
}
@IBAction func actionCameraCapture(_ sender: AnyObject) {
/
saveToCamera()
}
func beginSession() {
/
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
catch {
print("error: \(error.localizedDescription)")
}
/
/
/
previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer?.bounds = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height)
previewLayer?.position = CGPoint(x: self.view.bounds.midX, y: self.view.bounds.midY)
previewLayer?.videoGravity = AVLayerVideoGravity.resize
/
self.view.layer.addSublayer(previewLayer!)
print("previewLayer found")
print("PL: \(self.view.layer.bounds.size.width)")
/
/
/
imgOverlay.frame = self.view.frame
/
imgOverlay.image = self.drawCirclesOnImage(fromImage: nil, targetSize: imgOverlay.bounds.size)
/
self.view.bringSubview(toFront: navigationBar)
self.navigationItem.title = "Draw To Jack"
let iox = imgOverlay.bounds.size.width
print("ImageOverlay: \(iox)")
self.view.bringSubview(toFront: imgOverlay)
self.view.bringSubview(toFront: btnCapture)
self.view.bringSubview(toFront: btnInfo)
self.view.bringSubview(toFront: btnSocial)
self.view.bringSubview(toFront: shapeLayer)
print("Capture session running - waiting for button press if any")
captureSession.startRunning()
/
}
func getImageWithColor(color: UIColor, size: CGSize) -> UIImage {
let rect = CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width: size.width, height: size.height))
/
/
UIGraphicsBeginImageContextWithOptions(size, false, 0)
color.setFill()
UIRectFill(rect)
let image: UIImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return image
}
func drawCirclesOnImage(fromImage: UIImage? = nil, targetSize: CGSize? = CGSize.zero) -> UIImage? {
if fromImage == nil && targetSize == CGSize.zero {
return nil
}
var tmpimg: UIImage?
if targetSize == CGSize.zero {
tmpimg = fromImage
} else {
tmpimg = getImageWithColor(color: UIColor.clear, size: targetSize!)
}
guard let img = tmpimg else {
return nil
}
let imageSize = img.size
let scale: CGFloat = 1
UIGraphicsBeginImageContextWithOptions(imageSize, false, scale)
img.draw(at: CGPoint.zero)
let w = imageSize.width
/
let h = imageSize.height
/
print("Width 1: \(w) Height 1: \(h)")
let midX = imageSize.width / 2
let midY = imageSize.height / 2
/
/
/
let circleRads = [ 0.07, 0.13, 0.17, 0.22, 0.29, 0.36, 0.40, 0.48, 0.60, 0.75 ]
/
var circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: CGFloat(w * 0.010), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
UIColor.white.setFill()
circlePath.stroke()
circlePath.fill()
/
/
/
/
/
UIColor.red.setStroke()
for pct in circleRads {
let rad = w * CGFloat(pct)
circlePath = UIBezierPath(arcCenter: CGPoint(x: midX, y: midY), radius: CGFloat(rad), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
circlePath.lineWidth = 2.5
circlePath.stroke()
}
/
let now = Date()
let formatter = DateFormatter()
formatter.timeZone = TimeZone.current
formatter.dateFormat = "yyyy-MM-dd HH:mm"
let dateString = formatter.string(from: now)
/
let paragraphStyle = NSMutableParagraphStyle()
paragraphStyle.alignment = .center
let attrs = [NSAttributedStringKey.font: UIFont(name: "HelveticaNeue-Thin", size: 26)!, NSAttributedStringKey.paragraphStyle: paragraphStyle]
let string = dateString
string.draw(with: CGRect(x: 12, y: 38, width: 448, height: 448), options: .usesLineFragmentOrigin, attributes: attrs, context: nil)
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
func saveToCamera() {
if let videoConnection = stillImageOutput.connection(with: AVMediaType.video) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer!) {
if let cameraImage = UIImage(data: imageData,scale: UIScreen.main.scale) {
/
let screenWidth = cameraImage.size.width
let screenHeight = cameraImage.size.height
/
let w = screenWidth
let h = screenHeight
print("screenWidth 2: \(w) screenHeight 2: \(h)")
/
/
let blinkWidth = cameraImage.size.width
let blinkHeight = cameraImage.size.height
print("Blink Width: \(blinkWidth)")
print("Blink Height: \(blinkHeight)")
/
blinkWidth(x): 1080.0
blinkHeight(y): 1920.0
*/
/
let rect = CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width: blinkWidth, height: blinkHeight))
/
/
let shutterView = UIView(frame: rect)
shutterView.backgroundColor = UIColor.black
self.view.addSubview(shutterView)
UIView.animate(withDuration: 0.3, animations: {
shutterView.alpha = 0
}, completion: { (_) in
shutterView.removeFromSuperview()
})
/
let cameraShutterSoundID: SystemSoundID = 1108
AudioServicesPlaySystemSound(cameraShutterSoundID)
if let nImage = self.drawCirclesOnImage(fromImage: cameraImage, targetSize: CGSize.zero) {
UIImageWriteToSavedPhotosAlbum(nImage, nil, nil, nil)
}
}
}
})
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
/
}
}
Last edited by a moderator: