Skip to content
This repository was archived by the owner on Oct 9, 2022. It is now read-only.

Commit 8c2941e

Browse files
committed
Support device orientation
1 parent 0acff83 commit 8c2941e

File tree

5 files changed

+72
-20
lines changed
  • CoreML.playgroundbook/Contents
    • Chapters/CoreML.playgroundchapter/Pages
      • ObjectDetectionCamera.playgroundpage
      • ObjectDetectionCameraVision.playgroundpage
      • ObjectRecognitionCamera.playgroundpage
      • ObjectRecognitionCameraVision.playgroundpage
    • UserModules/UserCode.playgroundmodule/Sources

5 files changed

+72
-20
lines changed

CoreML.playgroundbook/Contents/Chapters/CoreML.playgroundchapter/Pages/ObjectDetectionCamera.playgroundpage/main.swift

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import ARKit
22
import PlaygroundSupport
33
import UIKit
4-
import VideoToolbox
54
import Vision
65

76
// Parameters
@@ -24,7 +23,7 @@ let imageOptions: [MLFeatureValue.ImageOption: Any] = [
2423
]
2524

2625
// ViewControllers
27-
class ViewController: PreviewViewController {
26+
final class ViewController: PreviewViewController {
2827
let fpsLabel: UILabel = {
2928
let label = UILabel()
3029
label.translatesAutoresizingMaskIntoConstraints = false
@@ -121,7 +120,12 @@ extension ViewController: ARSessionDelegate {
121120
func session(_ session: ARSession, didUpdate frame: ARFrame) {
122121
let imageBuffer = frame.capturedImage
123122

124-
let size = CVImageBufferGetDisplaySize(imageBuffer)
123+
let orientation = CGImagePropertyOrientation(interfaceOrientation: UIScreen.main.orientation)
124+
let ciImage = CIImage(cvPixelBuffer: imageBuffer).oriented(orientation)
125+
let context = CIContext(options: [.useSoftwareRenderer: false])
126+
let cgImage = context.createCGImage(ciImage, from: ciImage.extent)!
127+
128+
let size = CGSize(width: cgImage.width, height: cgImage.height)
125129
let scale = self.view.bounds.size / size
126130
let maxScale = fmax(scale.width, scale.height)
127131
CATransaction.begin()
@@ -131,8 +135,8 @@ extension ViewController: ARSessionDelegate {
131135
self.bboxLayer.position = CGPoint(x: self.view.bounds.midX, y: self.view.bounds.midY)
132136
CATransaction.commit()
133137

134-
var cgImage: CGImage!
135-
VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &cgImage)
138+
// var cgImage: CGImage!
139+
// VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &cgImage)
136140
let featureValue = try! MLFeatureValue(cgImage: cgImage, constraint: imageConstraint, options: imageOptions)
137141
let input = try! MLDictionaryFeatureProvider(dictionary: [
138142
inputName: featureValue,

CoreML.playgroundbook/Contents/Chapters/CoreML.playgroundchapter/Pages/ObjectDetectionCameraVision.playgroundpage/main.swift

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ model.featureProvider = try MLDictionaryFeatureProvider(dictionary: [
1515
])
1616

1717
// ViewControllers
18-
class ViewController: PreviewViewController {
18+
final class ViewController: PreviewViewController {
1919
let bboxLayer = CALayer()
2020

2121
lazy var request: VNCoreMLRequest = {
@@ -37,8 +37,9 @@ class ViewController: PreviewViewController {
3737
self.bboxLayer.position = CGPoint(x: self.view.bounds.midX, y: self.view.bounds.midY)
3838
}
3939

40-
func detect(imageBuffer: CVImageBuffer) {
41-
try! VNImageRequestHandler(cvPixelBuffer: imageBuffer).perform([self.request])
40+
func detect(imageBuffer: CVImageBuffer, orientation: CGImagePropertyOrientation) {
41+
try! VNImageRequestHandler(cvPixelBuffer: imageBuffer, orientation: orientation)
42+
.perform([self.request])
4243
}
4344

4445
func processDetections(for request: VNRequest, error: Error?) {
@@ -85,7 +86,12 @@ extension ViewController: ARSessionDelegate {
8586
func session(_ session: ARSession, didUpdate frame: ARFrame) {
8687
let imageBuffer = frame.capturedImage
8788

88-
let size = CVImageBufferGetDisplaySize(imageBuffer)
89+
let orientation = CGImagePropertyOrientation(interfaceOrientation: UIScreen.main.orientation)
90+
91+
var size = CVImageBufferGetDisplaySize(imageBuffer)
92+
if orientation == .right || orientation == .left {
93+
size = CGSize(width: size.height, height: size.width)
94+
}
8995
let scale = self.view.bounds.size / size
9096
let maxScale = fmax(scale.width, scale.height)
9197
CATransaction.begin()
@@ -95,7 +101,7 @@ extension ViewController: ARSessionDelegate {
95101
self.bboxLayer.position = CGPoint(x: self.view.bounds.midX, y: self.view.bounds.midY)
96102
CATransaction.commit()
97103

98-
self.detect(imageBuffer: imageBuffer)
104+
self.detect(imageBuffer: imageBuffer, orientation: orientation)
99105
}
100106
}
101107

CoreML.playgroundbook/Contents/Chapters/CoreML.playgroundchapter/Pages/ObjectRecognitionCamera.playgroundpage/main.swift

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import ARKit
22
import PlaygroundSupport
33
import UIKit
4-
import VideoToolbox
54
import Vision
65

76
// Parameters
@@ -21,7 +20,7 @@ let imageOptions: [MLFeatureValue.ImageOption: Any] = [
2120
]
2221

2322
// ViewControllers
24-
class ViewController: PreviewViewController {
23+
final class ViewController: PreviewViewController {
2524
let fpsLabel: UILabel = {
2625
let label = UILabel()
2726
label.translatesAutoresizingMaskIntoConstraints = false
@@ -87,8 +86,11 @@ extension ViewController: ARSessionDelegate {
8786
func session(_ session: ARSession, didUpdate frame: ARFrame) {
8887
let imageBuffer = frame.capturedImage
8988

90-
var cgImage: CGImage!
91-
VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &cgImage)
89+
let orientation = CGImagePropertyOrientation(interfaceOrientation: UIScreen.main.orientation)
90+
let ciImage = CIImage(cvPixelBuffer: imageBuffer).oriented(orientation)
91+
let context = CIContext(options: [.useSoftwareRenderer: false])
92+
let cgImage = context.createCGImage(ciImage, from: ciImage.extent)!
93+
9294
let featureValue = try! MLFeatureValue(cgImage: cgImage, constraint: imageConstraint, options: imageOptions)
9395
let input = try! MLDictionaryFeatureProvider(dictionary: [inputName: featureValue])
9496

CoreML.playgroundbook/Contents/Chapters/CoreML.playgroundchapter/Pages/ObjectRecognitionCameraVision.playgroundpage/main.swift

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ let model = try! compileModel(at: #fileLiteral(resourceName: "MobileNetV2Int8LUT
1212
let threshold: Float = 0.5
1313

1414
// ViewControllers
15-
class ViewController: PreviewViewController {
15+
final class ViewController: PreviewViewController {
1616
let classesLabel: UILabel = {
1717
let label = UILabel()
1818
label.translatesAutoresizingMaskIntoConstraints = false
@@ -42,8 +42,9 @@ class ViewController: PreviewViewController {
4242
])
4343
}
4444

45-
func detect(imageBuffer: CVImageBuffer) {
46-
try! VNImageRequestHandler(cvPixelBuffer: imageBuffer).perform([self.request])
45+
func detect(imageBuffer: CVImageBuffer, orientation: CGImagePropertyOrientation) {
46+
try! VNImageRequestHandler(cvPixelBuffer: imageBuffer, orientation: orientation)
47+
.perform([self.request])
4748
}
4849

4950
func processClassifications(for request: VNRequest, error: Error?) {
@@ -61,7 +62,10 @@ class ViewController: PreviewViewController {
6162

6263
extension ViewController: ARSessionDelegate {
6364
func session(_ session: ARSession, didUpdate frame: ARFrame) {
64-
self.detect(imageBuffer: frame.capturedImage)
65+
self.detect(
66+
imageBuffer: frame.capturedImage,
67+
orientation: CGImagePropertyOrientation(interfaceOrientation: UIScreen.main.orientation)
68+
)
6569
}
6670
}
6771

CoreML.playgroundbook/Contents/UserModules/UserCode.playgroundmodule/Sources/UserSource.swift

Lines changed: 38 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
import Vision
2-
import CoreML
31
import Accelerate
2+
import CoreML
3+
import UIKit
4+
import Vision
45

56
public func compileModel(at url: URL) throws -> VNCoreMLModel {
67
try compileModel(at: url, configuration: MLModelConfiguration())
@@ -28,6 +29,41 @@ public func argmax(_ array: UnsafePointer<Double>, count: UInt) -> (Int, Double)
2829
return (Int(maxIndex), maxValue)
2930
}
3031

32+
extension CGImagePropertyOrientation {
33+
public init(interfaceOrientation: UIInterfaceOrientation) {
34+
switch interfaceOrientation {
35+
case .portrait:
36+
self = .right
37+
case .portraitUpsideDown:
38+
self = .left
39+
case .landscapeLeft:
40+
self = .down
41+
case .landscapeRight:
42+
self = .up
43+
default:
44+
self = .right
45+
}
46+
}
47+
}
48+
49+
extension UIScreen {
50+
public var orientation: UIInterfaceOrientation {
51+
let point = self.coordinateSpace.convert(CGPoint.zero, to: self.fixedCoordinateSpace)
52+
switch (point.x, point.y) {
53+
case (0, 0):
54+
return .portrait
55+
case let (x, y) where x != 0 && y != 0:
56+
return .portraitUpsideDown
57+
case let (0, y) where y != 0:
58+
return .landscapeLeft
59+
case let (x, 0) where x != 0:
60+
return .landscapeRight
61+
default:
62+
return .unknown
63+
}
64+
}
65+
}
66+
3167
public let coco_classes = [
3268
"person",
3369
"bicycle",

0 commit comments

Comments
 (0)