/
// ContentView.swift
// VideoCapture
//
// https://zenn.dev/yorifuji/articles/swiftui-avfoundation
//
import SwiftUI
import AVFoundation
import CoreMotion
struct ContentView: View {
let videoCapture = VideoCapture()
let motionManager = CMMotionManager()
@State var x = 0.0
@State var y = 0.0
@State var z = 0.0
@State var px = 5.0
@State var py = 5.0
@State var pz = 5.0
@State var image: UIImage? = nil
@ObservedObject var classifier: ImageClassifier = ImageClassifier() //他の所から使いまわせる変数の定義
//画面の表示の定義
var body: some View {
VStack {
if let image = image {
Image(uiImage: image)
.resizable()
.scaledToFit()
}
/* 認識ができたら選択肢が出るボタン、やり直しの2つを認識ができた時の処理として埋め込んでおく必要性がある
常時出るrunとstopは変更しなければいけないなので
if( if image != nil){
HStack {
Button("Retry") {
videoCapture.run { sampleBuffer in
if let convertImage = UIImageFromSampleBuffer(sampleBuffer) {
DispatchQueue.main.async {
self.image = convertImage
}
}
}
}
//stopボタンを押して読み込んでいた処理をフレーム
font(.largeTitle)
if let imageClass = classifier.imageClass{
Text(imageClass).font(.largeTitle) //実験器具表示
}
}
*/
//ボタンの配置の定義
HStack {
Button("run") {
start()
videoCapture.run { sampleBuffer in
if let convertImage = UIImageFromSampleBuffer(sampleBuffer) {
DispatchQueue.main.async {
self.image = convertImage
}
// ----- ここを制限
if image != nil{
classifier.detect(uiImage: image!)//ストップボタンを押した時Image
}
// ------
}
}
}
//stopボタンを押して読み込んでいた処理をフレーム
Button("stop") {
videoCapture.stop()
// if image != nil{
// classifier.detect(uiImage: image!)//ストップボタンを押した時Image
// }
}
}
.font(.largeTitle)
if let imageClass = classifier.imageClass{
Text(imageClass).font(.largeTitle) //実験器具表示
}
Text("accX: \(x)")
Text("accY: \(y)")
Text("accZ: \(z)")
}
}
func UIImageFromSampleBuffer(_ sampleBuffer: CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
let context = CIContext()
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image)
}
}
return nil
}
func start() {
if motionManager.isDeviceMotionAvailable {
motionManager.deviceMotionUpdateInterval = 1.0
motionManager.startDeviceMotionUpdates(to: OperationQueue.current!, withHandler: {(motion:CMDeviceMotion?, error:Error?) in
self.updateMotionData(deviceMotion: motion!)
})
}
}
func updateMotionData(deviceMotion: CMDeviceMotion) {
// x = deviceMotion.userAcceleration.x
// y = deviceMotion.userAcceleration.y
// z = deviceMotion.userAcceleration.z
px = x
py = y
pz = z
x = deviceMotion.attitude.pitch
y = deviceMotion.attitude.roll
z = deviceMotion.attitude.yaw
// print("x= \(x)")
// print("y= \(y)")
// print("z= \(z)")
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}