Upgrade to PRO for Only $50/Year—Limited-Time Offer! 🔥

visionOSでの空間表現実装とImmersive Video表示について / ai-imm...

CyberAgent
October 29, 2024

visionOSでの空間表現実装とImmersive Video表示について / ai-immersive-visionos

Apple Vision Proでは、動画の進行に応じて周囲の空間が変化したりエフェクトを表示したりする演出が可能です。
また180°の没入感の高い立体動画がImmersive VideoとしてAppleから配信されています。

どちらも印象に強く残り、今後の広告やアート表現の実装として広く普及する可能性があります。

これらの事例を紹介すると共に、実装方法と作成方法について調査、開発した内容を共有します。

- HLSへのmeta data付与、演出発火の実装
- Immersive Videoの特徴について
- Immersive Videoの作成、再生

https://cadc.cyberagent.co.jp/2024/sessions/ai-immersive-visionos/

CyberAgent

October 29, 2024
Tweet

More Decks by CyberAgent

Other Decks in Technology

Transcript

  1. visionOSでの空間表現の事例 0 1 ・ 現実空間に3D CG キャラクター表 示 ・ ポータル表

    示 、VR表 示 ・ ハンドトラッキングなど What If … ? An Immersive Story
  2. visionOSでの空間表現の事例 0 1 ・3 D CG キャラクター ・ ポータル表 示

    + 前 面 の3Dオブジェクト ・手 の動きを使ったインタラクション Kung Fu Panda: School of Chi
  3. $ id 3 taggenerator -o reset.id 3 -t "c_reset" -o

    | -output- fi le < fi le> Speci fi es the path where the generated ID 3 tag is written. -t | -text <string> Inserts a text frame with the given string.
  4. $ id 3 taggenerator -o reset.id 3 -t "c_reset" $

    id 3 taggenerator -o line_on.id 3 -t "c_on_line_particle" $ id 3 taggenerator -o line_o ff .id 3 -t "c_o ff _line_particle" $ id 3 taggenerator -o rain_on.id 3 -t "c_on_rain_particle" $ id 3 taggenerator -o rain_o ff .id 3 -t "c_o ff _rain_particle" $ id 3 taggenerator -o fi reworks_on.id 3 -t "c_on_ fi reworks_particle" $ id 3 taggenerator -o fi reworks_o ff .id 3 -t "c_o ff _ fi reworks_particle" $ id 3 taggenerator -o env_ 0 1 _on.id 3 -t "c_on_env_ 0 1 " $ id 3 taggenerator -o env_ 0 1 _o ff .id 3 -t "c_o ff _env_ 0 1 "
  5. 0 id 3 ./reset.id 3 2 id 3 ./line_on.id 3

    1 0 id 3 ./line_o ff .id 3 1 1 . 5 id 3 ./env_ 0 1 _on.id 3 2 0 . 5 id 3 ./env_ 0 1 _o ff .id 3 2 1 id 3 ./rain_on.id 3 3 0 id 3 ./rain_o ff .id 3 3 2 id 3 ./ fi reworks_on.id 3 4 0 id 3 ./ fi reworks_o ff .id 3 4 4 id 3 ./reset.id 3 Macro.txt
  6. $ media fi lesegmenter -f ./output/ -i index.m 3 u

    8 -B media- -t 1 \ -M ./macro.txt ./SpatialE ff ects 0 0 1 .mov -f | - fi le-base path Directory to store the media and index fi les. -i | -index- fi le fi leName This option de fi nes the index fi le name. The default is prog_index.m 3 u 8 . It is recommended that the index fi le have an extension of .m 3 u 8 or .m 3 u. -B | -base-media- fi le-name name This option de fi nes the base name of the media fi les. The default is fi leSequence. The current sequence number of the fi le is appended, and an extension added. For example, specifying name as AppleMediaFile will generate fi le names that look like AppleMediaFile 1 2 .ts. -t | -target-duration duration Speci fi es a target duration for the media fi les. The default duration is 1 0 seconds. The duration is calculated by looking at the PTS/DTS in the source fi le. -M | -meta-macro- fi le fi le Speci fi es the macro fi le to be used to insert timed metadata into the stream.
  7. @main struct SpatialEffectsVideoPlayerApp: App { @State private var appModel =

    AppModel() @State private var playerViewModel = AVPlayerViewModel() @State private var surroundingsEffect: SurroundingsEffect? = .semiDark var body: some Scene { WindowGroup { if playerViewModel.isPlaying { AVPlayerView(viewModel: playerViewModel) } else { ContentView() .environment(appModel) } } .windowResizability(.contentSize) .windowStyle(.plain) ImmersiveSpace(id: appModel.immersiveSpaceID) { ImmersiveView() .environment(appModel) .environment(playerViewModel) .onAppear { appModel.immersiveSpaceState = .open } .onDisappear { appModel.immersiveSpaceState = .closed } .preferredSurroundingsEffect(surroundingsEffect) } .immersionStyle(selection: .constant(.mixed), in: .mixed) } } 4QBUJBM&GGFDUT7JEFP1MBZFS"QQTXJGU
  8. @main struct SpatialEffectsVideoPlayerApp: App { @State private var appModel =

    AppModel() @State private var playerViewModel = AVPlayerViewModel() @State private var surroundingsEffect: SurroundingsEffect? = .semiDark var body: some Scene { WindowGroup { if playerViewModel.isPlaying { AVPlayerView(viewModel: playerViewModel) } else { ContentView() .environment(appModel) } } .windowResizability(.contentSize) .windowStyle(.plain) ImmersiveSpace(id: appModel.immersiveSpaceID) { ImmersiveView() .environment(appModel) .environment(playerViewModel) .onAppear { appModel.immersiveSpaceState = .open } .onDisappear { appModel.immersiveSpaceState = .closed } .preferredSurroundingsEffect(surroundingsEffect) } .immersionStyle(selection: .constant(.mixed), in: .mixed) } } 4QBUJBM&GGFDUT7JEFP1MBZFS"QQTXJGU
  9. import SwiftUI struct AVPlayerView: UIViewControllerRepresentable { let viewModel: AVPlayerViewModel func

    makeUIViewController(context: Context) -> some UIViewController { return viewModel.makePlayerViewController() } func updateUIViewController(_ uiViewController: UIViewControllerType, context: Context) { // Update the AVPlayerViewController as needed } } "71MBZFS7JFXTXJGU
  10. @Observable final class AVPlayerViewModel: NSObject { private(set) var isPlaying: Bool

    = false private var avPlayerViewController: AVPlayerViewController? private var avPlayer = AVPlayer() private let videoURL: URL? = { URL(string: "https://satoshi0212.github.io/hls/resources/index.m3u8") }() func makePlayerViewController() -> AVPlayerViewController { let controller = AVPlayerViewController() controller.player = avPlayer controller.delegate = self self.avPlayerViewController = controller self.avPlayerViewController?.delegate = self controller.modalPresentationStyle = .fullScreen return controller } func play() { guard !isPlaying, let videoURL else { return } isPlaying = true let item = AVPlayerItem(url: videoURL) let metadataOutput = AVPlayerItemMetadataOutput(identifiers: nil) metadataOutput.setDelegate(self, queue: DispatchQueue.main) item.add(metadataOutput) avPlayer.replaceCurrentItem(with: item) avPlayer.play() } func reset() { guard isPlaying else { return } isPlaying = false avPlayer.replaceCurrentItem(with: nil) } } "71MBZFS7JFX.PEFMTXJGU
  11. @Observable final class AVPlayerViewModel: NSObject { private(set) var isPlaying: Bool

    = false private var avPlayerViewController: AVPlayerViewController? private var avPlayer = AVPlayer() private let videoURL: URL? = { URL(string: "https://satoshi0212.github.io/hls/resources/index.m3u8") }() func makePlayerViewController() -> AVPlayerViewController { let controller = AVPlayerViewController() controller.player = avPlayer controller.delegate = self self.avPlayerViewController = controller self.avPlayerViewController?.delegate = self controller.modalPresentationStyle = .fullScreen return controller } func play() { guard !isPlaying, let videoURL else { return } isPlaying = true let item = AVPlayerItem(url: videoURL) let metadataOutput = AVPlayerItemMetadataOutput(identifiers: nil) metadataOutput.setDelegate(self, queue: DispatchQueue.main) item.add(metadataOutput) avPlayer.replaceCurrentItem(with: item) avPlayer.play() } func reset() { guard isPlaying else { return } isPlaying = false avPlayer.replaceCurrentItem(with: nil) } } "71MBZFS7JFX.PEFMTXJGU
  12. @Observable final class AVPlayerViewModel: NSObject { private(set) var isPlaying: Bool

    = false private var avPlayerViewController: AVPlayerViewController? private var avPlayer = AVPlayer() private let videoURL: URL? = { URL(string: "https://satoshi0212.github.io/hls/resources/index.m3u8") }() func makePlayerViewController() -> AVPlayerViewController { let controller = AVPlayerViewController() controller.player = avPlayer controller.delegate = self self.avPlayerViewController = controller self.avPlayerViewController?.delegate = self controller.modalPresentationStyle = .fullScreen return controller } func play() { guard !isPlaying, let videoURL else { return } isPlaying = true let item = AVPlayerItem(url: videoURL) let metadataOutput = AVPlayerItemMetadataOutput(identifiers: nil) metadataOutput.setDelegate(self, queue: DispatchQueue.main) item.add(metadataOutput) avPlayer.replaceCurrentItem(with: item) avPlayer.play() } func reset() { guard isPlaying else { return } isPlaying = false avPlayer.replaceCurrentItem(with: nil) } } "71MBZFS7JFX.PEFMTXJGU
  13. struct ImmersiveView: View { @Environment(AVPlayerViewModel.self) private var playerViewModel @State var

    immersiveViewModel = ImmersiveViewModel() var body: some View { ZStack { RealityView { content in let entity = Entity() content.add(entity) immersiveViewModel.setup(entity: entity) } .gesture(SpatialTapGesture().targetedToAnyEntity() .onEnded { value in if value.entity.name == "StartButton" { playerViewModel.play() } } ) .onChange(of: playerViewModel.isPlaying, initial: false) { _, newValue in immersiveViewModel.rootEntity?.getFirstChildByName(name: "StartButton")?.isEnabled = !newValue } .onDisappear { playerViewModel.reset() } .transition(.opacity) ... } } } *NNFSTJWF7JFXTXJGU
  14. struct ImmersiveView: View { @Environment(AVPlayerViewModel.self) private var playerViewModel @State var

    immersiveViewModel = ImmersiveViewModel() var body: some View { ZStack { RealityView { content in let entity = Entity() content.add(entity) immersiveViewModel.setup(entity: entity) } .gesture(SpatialTapGesture().targetedToAnyEntity() .onEnded { value in if value.entity.name == "StartButton" { playerViewModel.play() } } ) .onChange(of: playerViewModel.isPlaying, initial: false) { _, newValue in immersiveViewModel.rootEntity?.getFirstChildByName(name: "StartButton")?.isEnabled = !newValue } .onDisappear { playerViewModel.reset() } .transition(.opacity) ... } } } *NNFSTJWF7JFXTXJGU
  15. extension AVPlayerViewModel: AVPlayerItemMetadataOutputPushDelegate { func metadataOutput(_ output: AVPlayerItemMetadataOutput, didOutputTimedMetadataGroups groups:

    [AVTimedMetadataGroup], from track: AVPlayerItemTrack?) { if let item = groups.first?.items.first, let metadataValue = item.value(forKey: "value") as? String { print("Metadata value: \(metadataValue)") // videoAction = VideoAction(rawValue: metadataValue) ?? .none } } } "71MBZFS7JFX.PEFMTXJGU
  16. import SwiftUI import RealityKit struct LineParticleView: View { static let

    viewName = "LineParticleView" @State var viewModel = LineParticleViewModel() var body: some View { RealityView { content in let entity = Entity() content.add(entity) viewModel.setup(entity: entity) } } } -JOF1BSUJDMF7JFXTXJGU
  17. import SwiftUI import RealityKit struct RainParticleView: View { static let

    viewName = "RainParticleView" @State var viewModel = RainParticleViewModel() var body: some View { RealityView { content in let entity = Entity() content.add(entity) viewModel.setup(entity: entity) } } } 3BJO1BSUJDMF7JFXTXJGU
  18. import SwiftUI import RealityKit struct FireworksParticleView: View { static let

    viewName = "FireworksParticleView" @State var viewModel = FireworksParticleViewModel() var body: some View { RealityView { content in let entity = Entity() content.add(entity) viewModel.setup(entity: entity) } } } 'JSFXPSLT1BSUJDMF7JFXTXJGU
  19. import SwiftUI import RealityKit struct Env01View: View { static let

    viewName = "Env01View" @State var viewModel = Env01ViewModel() var body: some View { RealityView { content in let entity = Entity() content.add(entity) viewModel.setup(entity: entity) } } } &OW7JFXTXJGU
  20. import RealityKit import Observation import RealityKitContent @MainActor @Observable final class

    LineParticleViewModel: LiveSequenceOperation { private var rootEntity: Entity? func setup(entity: Entity) { rootEntity = entity rootEntity?.opacity = 0.0 Task { guard let scene = try? await Entity(named: "LineParticle", in: realityKitContentBundle), let particleEntity = scene.findEntity(named: "ParticleEmitter") else { return } particleEntity.name = "lineParticle" particleEntity.position = [0.0, 1.2, -0.8] rootEntity?.addChild(particleEntity) } } ... -JOF1BSUJDMF7JFX.PEFMTXJGU
  21. import RealityKit import Observation import RealityKitContent @MainActor @Observable final class

    LineParticleViewModel: LiveSequenceOperation { private var rootEntity: Entity? func setup(entity: Entity) { rootEntity = entity rootEntity?.opacity = 0.0 Task { guard let scene = try? await Entity(named: "LineParticle", in: realityKitContentBundle), let particleEntity = scene.findEntity(named: "ParticleEmitter") else { return } particleEntity.name = "lineParticle" particleEntity.position = [0.0, 1.2, -0.8] rootEntity?.addChild(particleEntity) } } ... -JOF1BSUJDMF7JFX.PEFMTXJGU
  22. import RealityKit import Observation import RealityKitContent @MainActor @Observable final class

    LineParticleViewModel: LiveSequenceOperation { private var rootEntity: Entity? func setup(entity: Entity) { rootEntity = entity rootEntity?.opacity = 0.0 Task { guard let scene = try? await Entity(named: "LineParticle", in: realityKitContentBundle), let particleEntity = scene.findEntity(named: "ParticleEmitter") else { return } particleEntity.name = "lineParticle" particleEntity.position = [0.0, 1.2, -0.8] rootEntity?.addChild(particleEntity) } } ... -JOF1BSUJDMF7JFX.PEFMTXJGU
  23. import RealityKit import Observation import RealityKitContent @MainActor @Observable final class

    LineParticleViewModel: LiveSequenceOperation { private var rootEntity: Entity? func setup(entity: Entity) { rootEntity = entity rootEntity?.opacity = 0.0 Task { guard let scene = try? await Entity(named: "LineParticle", in: realityKitContentBundle), let particleEntity = scene.findEntity(named: "ParticleEmitter") else { return } particleEntity.name = "lineParticle" particleEntity.position = [0.0, 1.2, -0.8] rootEntity?.addChild(particleEntity) } } ... -JOF1BSUJDMF7JFX.PEFMTXJGU
  24. ... func reset() { rootEntity?.opacity = 0.0 } func play()

    { rootEntity?.getFirstChildByName(name: "lineParticle")?.isEnabled = true } func fadeIn() { Task { await rootEntity?.setOpacity(1.0, animated: true, duration: 0.4) } } func fadeOut() { Task { await rootEntity?.setOpacity(0.0, animated: true, duration: 0.4) } } } -JOF1BSUJDMF7JFX.PEFMTXJGU
  25. ... func reset() { rootEntity?.opacity = 0.0 } func play()

    { rootEntity?.getFirstChildByName(name: "lineParticle")?.isEnabled = true } func fadeIn() { Task { await rootEntity?.setOpacity(1.0, animated: true, duration: 0.4) } } func fadeOut() { Task { await rootEntity?.setOpacity(0.0, animated: true, duration: 0.4) } } } -JOF1BSUJDMF7JFX.PEFMTXJGU
  26. @MainActor func setOpacity(_ opacity: Float, animated: Bool, duration: TimeInterval =

    0.2, delay: TimeInterval = 0, completion: (() -> Void) = {}) async { guard animated, let scene else { self.opacity = opacity return } if !components.has(OpacityComponent.self) { components[OpacityComponent.self] = OpacityComponent(opacity: 1.0) } let animation = FromToByAnimation(name: "Entity/setOpacity", to: opacity, duration: duration, timing: .linear, isAdditive: false, bindTarget: .opacity, delay: delay) do { let animationResource: AnimationResource = try .generate(with: animation) let animationPlaybackController = playAnimation(animationResource) let filtered = scene.publisher(for: AnimationEvents.PlaybackTerminated.self) .filter { $0.playbackController == animationPlaybackController } _ = filtered.values.filter { await $0.playbackController.isComplete } completion() } catch { print("Could not generate animation: \(error.localizedDescription)") } } &OUJUZ TXJGU
  27. @MainActor func setOpacity(_ opacity: Float, animated: Bool, duration: TimeInterval =

    0.2, delay: TimeInterval = 0, completion: (() -> Void) = {}) async { guard animated, let scene else { self.opacity = opacity return } if !components.has(OpacityComponent.self) { components[OpacityComponent.self] = OpacityComponent(opacity: 1.0) } let animation = FromToByAnimation(name: "Entity/setOpacity", to: opacity, duration: duration, timing: .linear, isAdditive: false, bindTarget: .opacity, delay: delay) do { let animationResource: AnimationResource = try .generate(with: animation) let animationPlaybackController = playAnimation(animationResource) let filtered = scene.publisher(for: AnimationEvents.PlaybackTerminated.self) .filter { $0.playbackController == animationPlaybackController } _ = filtered.values.filter { await $0.playbackController.isComplete } completion() } catch { print("Could not generate animation: \(error.localizedDescription)") } } &OUJUZ TXJGU
  28. @MainActor @Observable final class RainParticleViewModel: LiveSequenceOperation { private var rootEntity:

    Entity? func setup(entity: Entity) { rootEntity = entity rootEntity.opacity = 0.0 let skyBoxEntity = Entity() skyBoxEntity.components.set(ModelComponent( mesh: .generateSphere(radius: 1000), materials: [UnlitMaterial(color: .black)] )) skyBoxEntity.scale *= .init(x: -1, y: 1, z: 1) rootEntity.addChild(skyBoxEntity) Task { if let scene = try? await Entity(named: "RainParticle", in: realityKitContentBundle) { let particleEntity = scene.findEntity(named: "ParticleEmitter")! particleEntity.name = "rainParticle" particleEntity.position = [0.0, 3.0, -2.0] rootEntity.addChild(particleEntity) } } } ... 3BJO1BSUJDMF7JFX.PEFMTXJGU
  29. @MainActor @Observable final class RainParticleViewModel: LiveSequenceOperation { private var rootEntity:

    Entity? func setup(entity: Entity) { rootEntity = entity rootEntity.opacity = 0.0 let skyBoxEntity = Entity() skyBoxEntity.components.set(ModelComponent( mesh: .generateSphere(radius: 1000), materials: [UnlitMaterial(color: .black)] )) skyBoxEntity.scale *= .init(x: -1, y: 1, z: 1) rootEntity.addChild(skyBoxEntity) Task { if let scene = try? await Entity(named: "RainParticle", in: realityKitContentBundle) { let particleEntity = scene.findEntity(named: "ParticleEmitter")! particleEntity.name = "rainParticle" particleEntity.position = [0.0, 3.0, -2.0] rootEntity.addChild(particleEntity) } } } ... 3BJO1BSUJDMF7JFX.PEFMTXJGU
  30. ... func reset() { rootEntity?.opacity = 0.0 } func play()

    { rootEntity?.getFirstChildByName(name: "rainParticle")?.isEnabled = true } func fadeIn() { Task { await rootEntity?.setOpacity(1.0, animated: true, duration: 1.4) } } func fadeOut() { Task { await rootEntity?.setOpacity(0.0, animated: true, duration: 1.4) } } } 3BJO1BSUJDMF7JFX.PEFMTXJGU
  31. @MainActor @Observable final class FireworksParticleViewModel: LiveSequenceOperation { private var rootEntity:

    Entity? func setup(entity: Entity) { rootEntity = entity rootEntity?.opacity = 0.0 Task { guard let scene = try? await Entity(named: "Fireworks", in: realityKitContentBundle) else { return } rootEntity?.addChild(scene) } } ... } 'JSFXPSLT1BSUJDMF7JFX.PEFMTXJGU
  32. @MainActor @Observable final class Env01ViewModel: LiveSequenceOperation { private var rootEntity:

    Entity? func setup(entity: Entity) { rootEntity = entity rootEntity?.opacity = 0.0 Task { guard let scene = try? await Entity(named: "Env_01", in: realityKitContentBundle) else { return } rootEntity?.addChild(scene) } } ... } &OW7JFX.PEFMTXJGU
  33. @MainActor @Observable class ImmersiveViewModel { private(set) var rootEntity: Entity? let

    lineParticleView: LineParticleView = .init() let rainParticleView: RainParticleView = .init() let fireworksParticleView: FireworksParticleView = .init() let env01View: Env01View = .init() @ObservationIgnored private lazy var effectViewModels: [String : LiveSequenceOperation] = { return [ LineParticleView.viewName : self.lineParticleView.viewModel, RainParticleView.viewName : self.rainParticleView.viewModel, FireworksParticleView.viewName : self.fireworksParticleView.viewModel, Env01View.viewName : self.env01View.viewModel, ] }() ... *NNFSTJWF7JFX.PEFMTXJGU
  34. struct ImmersiveView: View { @State var immersiveViewModel = ImmersiveViewModel() var

    body: some View { ZStack { RealityView { content in let entity = Entity() content.add(entity) immersiveViewModel.setup(entity: entity) } .gesture(SpatialTapGesture().targetedToAnyEntity() .onEnded { value in if value.entity.name == "StartButton" { playerViewModel.play() } } ) .onChange(of: playerViewModel.videoAction, initial: true) { oldValue, newValue in immersiveViewModel.processVideoAction(oldValue: oldValue, newValue: newValue) } .onChange(of: playerViewModel.isPlaying, initial: false) { _, newValue in immersiveViewModel.rootEntity?.getFirstChildByName(name: "StartButton")?.isEnabled = !newValue } .onDisappear { playerViewModel.reset() } .transition(.opacity) // place effect views immersiveViewModel.lineParticleView immersiveViewModel.rainParticleView immersiveViewModel.fireworksParticleView immersiveViewModel.env01View } } } *NNFSTJWF7JFXTXJGU
  35. struct ImmersiveView: View { @State var immersiveViewModel = ImmersiveViewModel() var

    body: some View { ZStack { RealityView { content in let entity = Entity() content.add(entity) immersiveViewModel.setup(entity: entity) } .gesture(SpatialTapGesture().targetedToAnyEntity() .onEnded { value in if value.entity.name == "StartButton" { playerViewModel.play() } } ) .onChange(of: playerViewModel.videoAction, initial: true) { oldValue, newValue in immersiveViewModel.processVideoAction(oldValue: oldValue, newValue: newValue) } .onChange(of: playerViewModel.isPlaying, initial: false) { _, newValue in immersiveViewModel.rootEntity?.getFirstChildByName(name: "StartButton")?.isEnabled = !newValue } .onDisappear { playerViewModel.reset() } .transition(.opacity) // place effect views immersiveViewModel.lineParticleView immersiveViewModel.rainParticleView immersiveViewModel.fireworksParticleView immersiveViewModel.env01View } } } *NNFSTJWF7JFXTXJGU
  36. enum VideoAction: String { case none case c_reset case c_on_line_particle

    case c_off_line_particle case c_on_rain_particle case c_off_rain_particle case c_on_fireworks_particle case c_off_fireworks_particle case c_on_env_01 case c_off_env_01 }
  37. extension AVPlayerViewModel: AVPlayerItemMetadataOutputPushDelegate { func metadataOutput(_ output: AVPlayerItemMetadataOutput, didOutputTimedMetadataGroups groups:

    [AVTimedMetadataGroup], from track: AVPlayerItemTrack?) { if let item = groups.first?.items.first, let metadataValue = item.value(forKey: "value") as? String { print("Metadata value: \(metadataValue)") videoAction = VideoAction(rawValue: metadataValue) ?? .none } } } "71MBZFS7JFX.PEFMTXJGU
  38. extension AVPlayerViewModel: AVPlayerItemMetadataOutputPushDelegate { func metadataOutput(_ output: AVPlayerItemMetadataOutput, didOutputTimedMetadataGroups groups:

    [AVTimedMetadataGroup], from track: AVPlayerItemTrack?) { if let item = groups.first?.items.first, let metadataValue = item.value(forKey: "value") as? String { print("Metadata value: \(metadataValue)") videoAction = VideoAction(rawValue: metadataValue) ?? .none } } } "71MBZFS7JFX.PEFMTXJGU
  39. struct ImmersiveView: View { @State var immersiveViewModel = ImmersiveViewModel() var

    body: some View { ZStack { RealityView { content in let entity = Entity() content.add(entity) immersiveViewModel.setup(entity: entity) } .gesture(SpatialTapGesture().targetedToAnyEntity() .onEnded { value in if value.entity.name == "StartButton" { playerViewModel.play() } } ) .onChange(of: playerViewModel.videoAction, initial: true) { oldValue, newValue in immersiveViewModel.processVideoAction(oldValue: oldValue, newValue: newValue) } .onChange(of: playerViewModel.isPlaying, initial: false) { _, newValue in immersiveViewModel.rootEntity?.getFirstChildByName(name: "StartButton")?.isEnabled = !newValue } .onDisappear { playerViewModel.reset() } .transition(.opacity) // place effect views immersiveViewModel.lineParticleView immersiveViewModel.rainParticleView immersiveViewModel.fireworksParticleView immersiveViewModel.env01View } } } *NNFSTJWF7JFXTXJGU
  40. func processVideoAction(oldValue: VideoAction = .none, newValue: VideoAction = .none) {

    // avoid continuous firing of actions other than reset action if newValue != .c_reset && oldValue == newValue { return } switch newValue { case .none: break case .c_reset: resetAction() case .c_on_line_particle: Task { await play(viewName: LineParticleView.viewName) await fadeIn(viewName: LineParticleView.viewName) } case .c_off_line_particle: Task { await fadeOut(viewName: LineParticleView.viewName) } case .c_on_rain_particle: ... *NNFSTJWF7JFX.PEFMTXJGU
  41. func processVideoAction(oldValue: VideoAction = .none, newValue: VideoAction = .none) {

    // avoid continuous firing of actions other than reset action if newValue != .c_reset && oldValue == newValue { return } switch newValue { case .none: break case .c_reset: resetAction() case .c_on_line_particle: Task { await play(viewName: LineParticleView.viewName) await fadeIn(viewName: LineParticleView.viewName) } case .c_off_line_particle: Task { await fadeOut(viewName: LineParticleView.viewName) } case .c_on_rain_particle: ... *NNFSTJWF7JFX.PEFMTXJGU
  42. 0 3 Immersive VideoのViewerを実装する Apple Immersive Video Apple Vision Proで視聴できる、180度の視野

    角 と 空間オーディオを備えた8Kの3Dビデオコンテンツ https://www.apple.com/jp/apple-vision-pro/
  43. 0 3 Immersive VideoのViewerを実装する 「Submerged」 「Submerged」はApple Immersive Videoで撮影 された初の脚本のある短編映画。視聴者は潜 水

    艦に 乗り、乗組員たちは激しい 魚 雷攻撃を必死で 生 き延 びようとします。 https://www.apple.com/jp/newsroom/ 2 0 2 4 / 0 7 /new-apple-immersive-video-series-and-films-premiere-on-vision-pro/ (2024/10/11に公開されました)
  44. static func getVideoInfo(asset: AVAsset) async -> VideoInfo? { let videoInfo

    = VideoInfo() guard let videoTrack = try? await asset.loadTracks(withMediaType: .video).first else { print("No video track found") return nil } guard let (naturalSize, formatDescriptions, mediaCharacteristics) = try? await videoTrack.load(.naturalSize, .formatDescriptions, .mediaCharacteristics), let formatDescription = formatDescriptions.first else { print("Failed to load video properties") return nil } videoInfo.size = naturalSize videoInfo.isSpatial = mediaCharacteristics.contains(.containsStereoMultiviewVideo) let projection = VideoTools.getProjection(formatDescription: formatDescription) videoInfo.projectionType = projection.projectionType videoInfo.horizontalFieldOfView = projection.horizontalFieldOfView return videoInfo }
  45. static func getVideoInfo(asset: AVAsset) async -> VideoInfo? { let videoInfo

    = VideoInfo() guard let videoTrack = try? await asset.loadTracks(withMediaType: .video).first else { print("No video track found") return nil } guard let (naturalSize, formatDescriptions, mediaCharacteristics) = try? await videoTrack.load(.naturalSize, .formatDescriptions, .mediaCharacteristics), let formatDescription = formatDescriptions.first else { print("Failed to load video properties") return nil } videoInfo.size = naturalSize videoInfo.isSpatial = mediaCharacteristics.contains(.containsStereoMultiviewVideo) let projection = VideoTools.getProjection(formatDescription: formatDescription) videoInfo.projectionType = projection.projectionType videoInfo.horizontalFieldOfView = projection.horizontalFieldOfView return videoInfo }
  46. static func getVideoInfo(asset: AVAsset) async -> VideoInfo? { let videoInfo

    = VideoInfo() guard let videoTrack = try? await asset.loadTracks(withMediaType: .video).first else { print("No video track found") return nil } guard let (naturalSize, formatDescriptions, mediaCharacteristics) = try? await videoTrack.load(.naturalSize, .formatDescriptions, .mediaCharacteristics), let formatDescription = formatDescriptions.first else { print("Failed to load video properties") return nil } videoInfo.size = naturalSize videoInfo.isSpatial = mediaCharacteristics.contains(.containsStereoMultiviewVideo) let projection = VideoTools.getProjection(formatDescription: formatDescription) videoInfo.projectionType = projection.projectionType videoInfo.horizontalFieldOfView = projection.horizontalFieldOfView return videoInfo }
  47. static func getVideoInfo(asset: AVAsset) async -> VideoInfo? { let videoInfo

    = VideoInfo() guard let videoTrack = try? await asset.loadTracks(withMediaType: .video).first else { print("No video track found") return nil } guard let (naturalSize, formatDescriptions, mediaCharacteristics) = try? await videoTrack.load(.naturalSize, .formatDescriptions, .mediaCharacteristics), let formatDescription = formatDescriptions.first else { print("Failed to load video properties") return nil } videoInfo.size = naturalSize videoInfo.isSpatial = mediaCharacteristics.contains(.containsStereoMultiviewVideo) let projection = VideoTools.getProjection(formatDescription: formatDescription) videoInfo.projectionType = projection.projectionType videoInfo.horizontalFieldOfView = projection.horizontalFieldOfView return videoInfo }
  48. static func getProjection(formatDescription: CMFormatDescription) -> ( projectionType: CMProjectionType?, horizontalFieldOfView: Float?)

    { var projectionType: CMProjectionType? var horizontalFieldOfView: Float? if let extensions = CMFormatDescriptionGetExtensions(formatDescription) as Dictionary? { if let projectionKind = extensions["ProjectionKind" as CFString] as? String { projectionType = CMProjectionType(fromString: projectionKind) ?? .rectangular } if let horizontalFieldOfViewValue = extensions[kCMFormatDescriptionExtension_HorizontalFieldOfView] as? UInt32 { horizontalFieldOfView = Float(horizontalFieldOfViewValue) / 1000.0 } } return (projectionType, horizontalFieldOfView) }
  49. static func getProjection(formatDescription: CMFormatDescription) -> ( projectionType: CMProjectionType?, horizontalFieldOfView: Float?)

    { var projectionType: CMProjectionType? var horizontalFieldOfView: Float? if let extensions = CMFormatDescriptionGetExtensions(formatDescription) as Dictionary? { if let projectionKind = extensions["ProjectionKind" as CFString] as? String { projectionType = CMProjectionType(fromString: projectionKind) ?? .rectangular } if let horizontalFieldOfViewValue = extensions[kCMFormatDescriptionExtension_HorizontalFieldOfView] as? UInt32 { horizontalFieldOfView = Float(horizontalFieldOfViewValue) / 1000.0 } } return (projectionType, horizontalFieldOfView) } 1SPKFDUJPO,JOE͕)BMG&RVJSFDUBOHVMBSͩͱ૝ఆ௨Γ
  50. static func getProjection(formatDescription: CMFormatDescription) -> ( projectionType: CMProjectionType?, horizontalFieldOfView: Float?)

    { var projectionType: CMProjectionType? var horizontalFieldOfView: Float? if let extensions = CMFormatDescriptionGetExtensions(formatDescription) as Dictionary? { if let projectionKind = extensions["ProjectionKind" as CFString] as? String { projectionType = CMProjectionType(fromString: projectionKind) ?? .rectangular } if let horizontalFieldOfViewValue = extensions[kCMFormatDescriptionExtension_HorizontalFieldOfView] as? UInt32 { horizontalFieldOfView = Float(horizontalFieldOfViewValue) / 1000.0 } } return (projectionType, horizontalFieldOfView) } )PSJ[POUBM'JFME0G7JFX͕ͩͱ૝ఆ௨Γ
  51. static func makeVideoMesh(videoInfo: VideoInfo) async -> ( mesh: MeshResource, transform:

    Transform)? { let horizontalFieldOfView = videoInfo.horizontalFieldOfView ?? 65.0 let mesh = VideoTools.generateVideoSphere( radius: 10000.0, sourceHorizontalFov: horizontalFieldOfView, sourceVerticalFov: 180.0, clipHorizontalFov: horizontalFieldOfView, clipVerticalFov: 180.0, verticalSlices: 60, horizontalSlices: Int(horizontalFieldOfView) / 3) let transform = Transform( scale: .init(x: 1, y: 1, z: 1), rotation: .init(angle: -Float.pi / 2, axis: .init(x: 0, y: 1, z: 0)), translation: .init(x: 0, y: 0, z: 0)) return (mesh: mesh!, transform: transform) }
  52. static func generateVideoSphere( radius: Float, sourceHorizontalFov: Float, sourceVerticalFov: Float, clipHorizontalFov:

    Float, clipVerticalFov: Float, verticalSlices: Int, horizontalSlices: Int ) -> MeshResource? { // Vertices ... // Normals ... // UVs ... // Indices ... var meshDescriptor = MeshDescriptor(name: "proceduralMesh") meshDescriptor.positions = MeshBuffer(vertices) meshDescriptor.normals = MeshBuffer(normals) meshDescriptor.primitives = .triangles(indices) meshDescriptor.textureCoordinates = MeshBuffer(uvCoordinates) let mesh = try? MeshResource.generate(from: [meshDescriptor]) return mesh }
  53. // Vertices var vertices: [simd_float3] = Array( repeating: simd_float3(), count:

    (verticalSlices + 1) * (horizontalSlices + 1)) let verticalScale: Float = clipVerticalFov / 180.0 let verticalOffset: Float = (1.0 - verticalScale) / 2.0 let horizontalScale: Float = clipHorizontalFov / 360.0 let horizontalOffset: Float = (1.0 - horizontalScale) / 2.0 for y: Int in 0...horizontalSlices { let angle1 = ((Float.pi * (Float(y) / Float(horizontalSlices))) * verticalScale) + (verticalOffset * Float.pi) let sin1 = sin(angle1) let cos1 = cos(angle1) for x: Int in 0...verticalSlices { let angle2 = ((Float.pi * 2 * (Float(x) / Float(verticalSlices))) * horizontalScale) + (horizontalOffset * Float.pi * 2) let sin2 = sin(angle2) let cos2 = cos(angle2) vertices[x + (y * (verticalSlices + 1))] = SIMD3<Float>(sin1 * cos2 * radius, cos1 * radius, sin1 * sin2 * radius) } }
  54. // Normals var normals: [SIMD3<Float>] = [] for vertex in

    vertices { normals.append(-normalize(vertex)) // Invert to show on inside of sphere } // UVs var uvCoordinates: [simd_float2] = Array(repeating: simd_float2(), count: vertices.count) let uvHorizontalScale = clipHorizontalFov / sourceHorizontalFov let uvHorizontalOffset = (1.0 - uvHorizontalScale) / 2.0 let uvVerticalScale = clipVerticalFov / sourceVerticalFov let uvVerticalOffset = (1.0 - uvVerticalScale) / 2.0 for y in 0...horizontalSlices { for x in 0...verticalSlices { var uv: simd_float2 = [ (Float(x) / Float(verticalSlices)), 1.0 - (Float(y) / Float(horizontalSlices)), ] uv.x = (uv.x * uvHorizontalScale) + uvHorizontalOffset uv.y = (uv.y * uvVerticalScale) + uvVerticalOffset uvCoordinates[x + (y * (verticalSlices + 1))] = uv } }
  55. // Normals var normals: [SIMD3<Float>] = [] for vertex in

    vertices { normals.append(-normalize(vertex)) // Invert to show on inside of sphere } // UVs var uvCoordinates: [simd_float2] = Array(repeating: simd_float2(), count: vertices.count) let uvHorizontalScale = clipHorizontalFov / sourceHorizontalFov let uvHorizontalOffset = (1.0 - uvHorizontalScale) / 2.0 let uvVerticalScale = clipVerticalFov / sourceVerticalFov let uvVerticalOffset = (1.0 - uvVerticalScale) / 2.0 for y in 0...horizontalSlices { for x in 0...verticalSlices { var uv: simd_float2 = [ (Float(x) / Float(verticalSlices)), 1.0 - (Float(y) / Float(horizontalSlices)), ] uv.x = (uv.x * uvHorizontalScale) + uvHorizontalOffset uv.y = (uv.y * uvVerticalScale) + uvVerticalOffset uvCoordinates[x + (y * (verticalSlices + 1))] = uv } }
  56. // Indices var indices: [UInt32] = [] for y in

    0..<horizontalSlices { for x in 0..<verticalSlices { let current: UInt32 = UInt32(x) + (UInt32(y) * UInt32(verticalSlices + 1)) let next: UInt32 = current + UInt32(verticalSlices + 1) indices.append(current + 1) indices.append(current) indices.append(next + 1) indices.append(next + 1) indices.append(current) indices.append(next) } }
  57. static func generateVideoSphere( radius: Float, sourceHorizontalFov: Float, sourceVerticalFov: Float, clipHorizontalFov:

    Float, clipVerticalFov: Float, verticalSlices: Int, horizontalSlices: Int ) -> MeshResource? { // Vertices ... // Normals ... // UVs ... // Indices ... var meshDescriptor = MeshDescriptor(name: "proceduralMesh") meshDescriptor.positions = MeshBuffer(vertices) meshDescriptor.normals = MeshBuffer(normals) meshDescriptor.primitives = .triangles(indices) meshDescriptor.textureCoordinates = MeshBuffer(uvCoordinates) let mesh = try? MeshResource.generate(from: [meshDescriptor]) return mesh }
  58. @State private var player: AVPlayer = AVPlayer() @State private var

    videoMaterial: VideoMaterial? RealityView { content in guard let url = viewModel.videoURL else { return } let asset = AVURLAsset(url: url) let playerItem = AVPlayerItem(asset: asset) guard let videoInfo = await VideoTools.getVideoInfo(asset: asset) else { return } viewModel.videoInfo = videoInfo viewModel.isSpatialVideoAvailable = videoInfo.isSpatial guard let (mesh, transform) = await VideoTools.makeVideoMesh(videoInfo: videoInfo) else { return } videoMaterial = VideoMaterial(avPlayer: player) guard let videoMaterial else { return } let videoEntity = Entity() videoEntity.components.set(ModelComponent(mesh: mesh, materials: [videoMaterial])) videoEntity.transform = transform content.add(videoEntity) player.replaceCurrentItem(with: playerItem) player.play() }
  59. @State private var player: AVPlayer = AVPlayer() @State private var

    videoMaterial: VideoMaterial? RealityView { content in guard let url = viewModel.videoURL else { return } let asset = AVURLAsset(url: url) let playerItem = AVPlayerItem(asset: asset) guard let videoInfo = await VideoTools.getVideoInfo(asset: asset) else { return } viewModel.videoInfo = videoInfo viewModel.isSpatialVideoAvailable = videoInfo.isSpatial guard let (mesh, transform) = await VideoTools.makeVideoMesh(videoInfo: videoInfo) else { return } videoMaterial = VideoMaterial(avPlayer: player) guard let videoMaterial else { return } let videoEntity = Entity() videoEntity.components.set(ModelComponent(mesh: mesh, materials: [videoMaterial])) videoEntity.transform = transform content.add(videoEntity) player.replaceCurrentItem(with: playerItem) player.play() }
  60. @State private var player: AVPlayer = AVPlayer() @State private var

    videoMaterial: VideoMaterial? RealityView { content in guard let url = viewModel.videoURL else { return } let asset = AVURLAsset(url: url) let playerItem = AVPlayerItem(asset: asset) guard let videoInfo = await VideoTools.getVideoInfo(asset: asset) else { return } viewModel.videoInfo = videoInfo viewModel.isSpatialVideoAvailable = videoInfo.isSpatial guard let (mesh, transform) = await VideoTools.makeVideoMesh(videoInfo: videoInfo) else { return } videoMaterial = VideoMaterial(avPlayer: player) guard let videoMaterial else { return } let videoEntity = Entity() videoEntity.components.set(ModelComponent(mesh: mesh, materials: [videoMaterial])) videoEntity.transform = transform content.add(videoEntity) player.replaceCurrentItem(with: playerItem) player.play() }
  61. @State private var player: AVPlayer = AVPlayer() @State private var

    videoMaterial: VideoMaterial? RealityView { content in guard let url = viewModel.videoURL else { return } let asset = AVURLAsset(url: url) let playerItem = AVPlayerItem(asset: asset) guard let videoInfo = await VideoTools.getVideoInfo(asset: asset) else { return } viewModel.videoInfo = videoInfo viewModel.isSpatialVideoAvailable = videoInfo.isSpatial guard let (mesh, transform) = await VideoTools.makeVideoMesh(videoInfo: videoInfo) else { return } videoMaterial = VideoMaterial(avPlayer: player) guard let videoMaterial else { return } let videoEntity = Entity() videoEntity.components.set(ModelComponent(mesh: mesh, materials: [videoMaterial])) videoEntity.transform = transform content.add(videoEntity) player.replaceCurrentItem(with: playerItem) player.play() }
  62. @State private var player: AVPlayer = AVPlayer() @State private var

    videoMaterial: VideoMaterial? RealityView { content in guard let url = viewModel.videoURL else { return } let asset = AVURLAsset(url: url) let playerItem = AVPlayerItem(asset: asset) guard let videoInfo = await VideoTools.getVideoInfo(asset: asset) else { return } viewModel.videoInfo = videoInfo viewModel.isSpatialVideoAvailable = videoInfo.isSpatial guard let (mesh, transform) = await VideoTools.makeVideoMesh(videoInfo: videoInfo) else { return } videoMaterial = VideoMaterial(avPlayer: player) guard let videoMaterial else { return } let videoEntity = Entity() videoEntity.components.set(ModelComponent(mesh: mesh, materials: [videoMaterial])) videoEntity.transform = transform content.add(videoEntity) player.replaceCurrentItem(with: playerItem) player.play() }