Skip to content

Commit

Permalink
3.0.0 に対応したサンプルアプリ
Browse files Browse the repository at this point in the history
  • Loading branch information
kadu-v committed May 24, 2024
1 parent 7787e74 commit 4faedaf
Show file tree
Hide file tree
Showing 407 changed files with 811 additions and 940 deletions.
92 changes: 0 additions & 92 deletions .gitignore

This file was deleted.

58 changes: 24 additions & 34 deletions EdgeOCRSample.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
shouldAutocreateTestPlan = "YES">
</TestAction>
<LaunchAction
buildConfiguration = "Release"
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
Expand All @@ -51,7 +51,7 @@
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
buildConfiguration = "Debug"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
Expand Down
5 changes: 3 additions & 2 deletions EdgeOCRSample/Models/LoadModel/LoadModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,19 @@ import os

func loadModel(
path: String,
uid: String,
modelSettings: EdgeOCRSwift.ModelSettings = ModelSettings()
) async throws -> ModelInformation? {
let modelPath = Bundle.main.path(forResource: path, ofType: "")
guard let modelPath = modelPath else {
throw EdgeError.notFound(description: "Not found models at given the path: \(path)")
throw EdgeError.notFound(description: "Not found models at the given path: \(path)")
}

let edgeOCR = try ModelBuilder().fromPath(modelPath).build()
var model: Model?
for candidate in edgeOCR.availableModels() {
os_log("model candidate: %@", candidate.getUID())
if candidate.getUID() == "model-d320x320" {
if candidate.getUID() == uid {
model = candidate
}
}
Expand Down
40 changes: 18 additions & 22 deletions EdgeOCRSample/Views/Barcode/BarcodeViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class BarcodeViewController: ViewController {

// MARK: - バーコードスキャンオプション

private let barcodeScanOption = BarcodeScanOption(targetFormats: [BarcodeFormat.AnyFormat])
private let scanOptions = ScanOptions(targetFormats: [BarcodeFormat.AnyFormat])

init(
aspectRatio: Binding<Double>,
Expand All @@ -44,19 +44,18 @@ class BarcodeViewController: ViewController {

override func setupLayers() {
// 検出範囲を示すガイドを設定
let width = previewBounds.width
let height = previewBounds.height
let cropRect = barcodeScanOption.getCropRect()
let coropHorizontalBias = cropRect.horizontalBias
let cropVerticalBias = cropRect.verticalBias
let cropWidth = cropRect.width
let cropHeight = cropRect.height
let width = viewBounds.width
let height = viewBounds.height
// デフォルトの検出領域である画面中央にガイドを表示
let coropHorizontalBias = 0.5
let cropVerticalBias = 0.5
guideLayer = CALayer()
guideLayer.frame = CGRect(
x: coropHorizontalBias * (previewBounds.width - width),
y: cropVerticalBias * (previewBounds.height - height),
width: cropWidth * width,
height: cropHeight * height)
x: coropHorizontalBias * (viewBounds.width - width),
y: cropVerticalBias * (viewBounds.height - height),
width: width,
height: height)
print(viewBounds.width, viewBounds.height, width, height)

let borderWidth = 3.0
let boxColor = UIColor.green.cgColor
Expand All @@ -65,7 +64,7 @@ class BarcodeViewController: ViewController {

// 検出結果を表示させるレイヤーを作成
detectionLayer = CALayer()
detectionLayer.frame = previewBounds
detectionLayer.frame = viewBounds

DispatchQueue.main.async { [weak self] in
if let layer = self?.previewLayer {
Expand Down Expand Up @@ -109,11 +108,10 @@ class BarcodeViewController: ViewController {
detectionLayer.addSublayer(boxLayer)
}

func showDialog(detections: [Detection<Barcode>]) {
func showDialog(detections: [Barcode]) {
var messages: [String] = []
for detection in detections {
let text = detection.getScanObject().getText()
messages.append(text)
messages.append(detection.getText())
}
self.messages = messages
showDialog = true
Expand All @@ -124,17 +122,15 @@ class BarcodeViewController: ViewController {
}

func drawDetections(result: ScanResult) {
var detections: [Detection<Barcode>] = []
var detections: [Barcode] = []

CATransaction.begin()
CATransaction.setValue(kCFBooleanTrue, forKey: kCATransactionDisableActions)
detectionLayer.sublayers = nil
for detection in result.getBarcodeDetections() {
let text = detection.getScanObject().getText()
let status = detection.getStatus()
if status == ScanConfirmationStatus.Confirmed {
if detection.getStatus() == ScanConfirmationStatus.Confirmed {
let bbox = detection.getBoundingBox()
drawDetection(bbox: bbox, text: text)
drawDetection(bbox: bbox, text: detection.getText())
detections.append(detection)
}
}
Expand All @@ -150,7 +146,7 @@ class BarcodeViewController: ViewController {
do {
// MARK: - バーコードの読み取り

scanResult = try edgeOCR.scanBracodes(sampleBuffer, barcodeScanOption: barcodeScanOption, previewViewBounds: previewBounds)
scanResult = try edgeOCR.scan(sampleBuffer, scanOptions: scanOptions, viewBounds: viewBounds)

} catch {
os_log("Failed to scan texts: %@", type: .debug, error.localizedDescription)
Expand Down
4 changes: 2 additions & 2 deletions EdgeOCRSample/Views/Barcode/DialogView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ struct DialogView: View {
.cornerRadius(15)
Spacer().frame(height: 20)
VStack(spacing: 10) {
ForEach(messages, id: \.self) { message in
Text(message)
ForEach(0 ..< messages.count, id: \.self) { idx in
Text(messages[idx])
}
}
Spacer().frame(height: 25)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,19 @@ import Foundation
import os
import SwiftUI

struct BarcodeImageFotter: View {
struct BarcodeImageFooter: View {
@Binding var image: UIImage
var body: some View {
VStack {
Button(action: {
// MARK: - バーコード画像をスキャン

// NOTOE: エラーをオプショナル型に変換して,エラーメッセージを無視
if let scannedImage = try? image.scanBarcodeImage() {
image = scannedImage
do {
if let scannedImage = try image.scanBarcodeImage() {
image = scannedImage
}
} catch {
os_log(.error, log: .default, "error: %@", error.localizedDescription)
}

}, label: {
Expand All @@ -35,5 +38,5 @@ struct BarcodeImageFotter: View {
}

#Preview {
BarcodeImageFotter(image: .constant(UIImage(named: "sample_text")!))
BarcodeImageFooter(image: .constant(UIImage(named: "sample_text")!))
}
14 changes: 6 additions & 8 deletions EdgeOCRSample/Views/BarcodeImage/BarcodeImageScanner.swift
Original file line number Diff line number Diff line change
Expand Up @@ -19,25 +19,23 @@ extension UIImage {

// MARK: - 画像からバーコードを検出・認識

let barcodeScanOption = BarcodeScanOption(targetFormats: [BarcodeFormat.AnyFormat])
let detections = try edgeOCR.scanBarcodes(rotatedImage,
barcodeScanOption: barcodeScanOption)
let barcodeScanOption = ScanOptions(scanMode: ScanOptions.ScanMode.OneShot,
targetFormats: [BarcodeFormat.AnyFormat])
let detections = try edgeOCR.scan(rotatedImage, scanOptions: barcodeScanOption)

// MARK: - バウンデイングボックスの座標を画像の絶対座標へ変換

var boundingBoxes: [CGRect] = []
var texts: [String] = []
for detection in detections.getBarcodeDetections() {
let bbox = detection.getBoundingBox()
let scanObject = detection.getScanObject()
let text = scanObject.getText()
for barcode in detections.getBarcodeDetections() {
let bbox = barcode.getBoundingBox()
let x = self.size.width * bbox.minX
let y = self.size.height * bbox.minY
let width = self.size.width * bbox.width
let height = self.size.height * bbox.height
let rect = CGRect(x: x, y: y, width: width, height: height)
boundingBoxes.append(rect)
texts.append(text)
texts.append(barcode.getText())
}

// MARK: - 検出・認識結果を画像に反映させた新しい画像を生成
Expand Down
2 changes: 1 addition & 1 deletion EdgeOCRSample/Views/BarcodeImage/BarcodeImageView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ struct BarcodeImageView: View {
width: UIScreen.main.bounds.width * 0.8,
height: UIScreen.main.bounds.height * 0.5)
Spacer().frame(height: 30)
BarcodeImageFotter(image: $image)
BarcodeImageFooter(image: $image)
}
}
}
Expand Down
26 changes: 12 additions & 14 deletions EdgeOCRSample/Views/BoxesOverlay/BoxesOverlayViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,19 +32,17 @@ class BoxesOverlayViewController: ViewController {

override func setupLayers() {
// 検出範囲を示すガイドを設定
let width = previewBounds.width
let height = previewBounds.width * CGFloat(aspectRatio)
let defaultCropRect = CropRect()
let coropHorizontalBias = defaultCropRect.horizontalBias
let cropVerticalBias = defaultCropRect.verticalBias
let cropWidth = defaultCropRect.width
let cropHeight = defaultCropRect.height
let width = viewBounds.width
let height = viewBounds.width * CGFloat(aspectRatio)
// デフォルトの検出領域である画面中央にガイドを表示
let coropHorizontalBias = 0.5
let cropVerticalBias = 0.5
guideLayer = CALayer()
guideLayer.frame = CGRect(
x: coropHorizontalBias * (previewBounds.width - width),
y: cropVerticalBias * (previewBounds.height - height),
width: cropWidth * width,
height: cropHeight * height)
x: coropHorizontalBias * (viewBounds.width - width),
y: cropVerticalBias * (viewBounds.height - height),
width: width,
height: height)

let borderWidth = 3.0
let boxColor = UIColor.green.cgColor
Expand All @@ -53,7 +51,7 @@ class BoxesOverlayViewController: ViewController {

// 検出結果を表示させるレイヤーを作成
detectionLayer = CALayer()
detectionLayer.frame = previewBounds
detectionLayer.frame = viewBounds

DispatchQueue.main.async { [weak self] in
if let layer = self?.previewLayer {
Expand Down Expand Up @@ -108,7 +106,7 @@ class BoxesOverlayViewController: ViewController {
CATransaction.setValue(kCFBooleanTrue, forKey: kCATransactionDisableActions)
detectionLayer.sublayers = nil
for detection in result.getTextDetections() {
let text = detection.getScanObject().getText()
let text = detection.getText()
if !text.isEmpty {
let bbox = detection.getBoundingBox()
drawDetection(bbox: bbox, text: text)
Expand All @@ -120,7 +118,7 @@ class BoxesOverlayViewController: ViewController {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let scanResult: ScanResult
do {
scanResult = try edgeOCR.scanTexts(sampleBuffer, previewViewBounds: previewBounds)
scanResult = try edgeOCR.scan(sampleBuffer, viewBounds: viewBounds)
} catch {
os_log("Failed to scan texts: %@", type: .debug, error.localizedDescription)
return
Expand Down
6 changes: 3 additions & 3 deletions EdgeOCRSample/Views/Common/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele
private let sessionQueue = DispatchQueue(label: "sessionQueue")
private lazy var videoOutput = AVCaptureVideoDataOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
var previewBounds: CGRect! = nil // for view dimensions
var viewBounds: CGRect! = nil // for view dimensions

public init() {
super.init(nibName: nil, bundle: nil)
Expand Down Expand Up @@ -146,7 +146,7 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele
y: UIScreen.main.bounds.height * 0.1,
width: UIScreen.main.bounds.width,
height: UIScreen.main.bounds.height * 0.75)
previewBounds = CGRect(
viewBounds = CGRect(
x: 0,
y: 0,
width: previewFrame.width,
Expand All @@ -163,7 +163,7 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele
DispatchQueue.main.async { [weak self] in
self!.previewLayer = AVCaptureVideoPreviewLayer(session: self!.captureSession)
self!.previewLayer.frame = previewFrame
self!.previewLayer.bounds = self!.previewBounds
self!.previewLayer.bounds = self!.viewBounds
self!.previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
self!.view.layer.addSublayer(self!.previewLayer)
}
Expand Down
Loading

0 comments on commit 4faedaf

Please sign in to comment.