Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

iOS 15 Person Segmentation test #99

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions RealityMixer.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -752,16 +752,17 @@
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 18;
CURRENT_PROJECT_VERSION = 19;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
HEADER_SEARCH_PATHS = "";
INFOPLIST_FILE = "$(SRCROOT)/RealityMixer/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 15.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
LIBRARY_SEARCH_PATHS = "$(inherited)";
MARKETING_VERSION = 0.6.0;
MARKETING_VERSION = 0.7.0;
PRODUCT_BUNDLE_IDENTIFIER = "$(ORGANIZATION_IDENTIFIER).reality.mixer";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "RealityMixer/RealityMixer-Bridging-Header.h";
Expand All @@ -779,16 +780,17 @@
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 18;
CURRENT_PROJECT_VERSION = 19;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
HEADER_SEARCH_PATHS = "";
INFOPLIST_FILE = "$(SRCROOT)/RealityMixer/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 15.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
LIBRARY_SEARCH_PATHS = "$(inherited)";
MARKETING_VERSION = 0.6.0;
MARKETING_VERSION = 0.7.0;
PRODUCT_BUNDLE_IDENTIFIER = "$(ORGANIZATION_IDENTIFIER).reality.mixer";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "RealityMixer/RealityMixer-Bridging-Header.h";
Expand Down
10 changes: 5 additions & 5 deletions RealityMixer/Capture/Misc/ARConfigurationFactory.swift
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ final class ARConfigurationFactory {
private func buildPersonSegmentationConfiguration() -> ARConfiguration {
let configuration = buildWorldTrackingConfiguration()

if ARWorldTrackingConfiguration.supportsFrameSemantics(.personSegmentationWithDepth) {
configuration.frameSemantics.insert(.personSegmentationWithDepth)
} else if ARWorldTrackingConfiguration.supportsFrameSemantics(.personSegmentation) {
configuration.frameSemantics.insert(.personSegmentation)
}
// if ARWorldTrackingConfiguration.supportsFrameSemantics(.personSegmentationWithDepth) {
// configuration.frameSemantics.insert(.personSegmentationWithDepth)
// } else if ARWorldTrackingConfiguration.supportsFrameSemantics(.personSegmentation) {
// configuration.frameSemantics.insert(.personSegmentation)
// }

return configuration
}
Expand Down
2 changes: 1 addition & 1 deletion RealityMixer/Capture/Misc/ARKitHelpers.swift
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ struct ARKitHelpers {
textureCache: CVMetalTextureCache?
) -> MTLTexture? {
guard let textureCache = textureCache,
planeIndex >= 0, planeIndex < CVPixelBufferGetPlaneCount(pixelBuffer) //,
planeIndex >= 0/*, planeIndex < CVPixelBufferGetPlaneCount(pixelBuffer)*/ //,
// CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
else {
return nil
Expand Down
23 changes: 23 additions & 0 deletions RealityMixer/Capture/Shaders/Shaders.swift
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,29 @@ struct Shaders {
"""
}

static func surfaceSegmentation() -> String {
"""
\(yCrCbToRGB)

#pragma body

float luma = texture2D(u_transparentTexture, _surface.diffuseTexcoord).r;
vec2 chroma = texture2D(u_diffuseTexture, _surface.diffuseTexcoord).rg;

vec4 textureColor = yCbCrToRGB(luma, chroma);
_surface.diffuse = textureColor;

float maskTextureValue = texture2D(u_ambientTexture, _surface.diffuseTexcoord).r;
_surface.ambient = vec4(1.0, 1.0, 1.0, 1.0);

if (maskTextureValue > 0.5) {
_surface.transparent = vec4(0.0, 0.0, 0.0, 1.0);
} else {
_surface.transparent = vec4(1.0, 1.0, 1.0, 1.0);
}
"""
}

static func surfaceChromaKeyConfiguration() -> String {
"""
\(yCrCbToRGB)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import UIKit
import ARKit
import AVFoundation
import Vision
import SwiftSocket

final class MixedRealityViewController: UIViewController {
Expand Down Expand Up @@ -162,9 +163,16 @@ final class MixedRealityViewController: UIViewController {
}

private func configureMiddle(with frame: ARFrame) {
guard case .greenScreen = configuration.captureMode,
let chromaConfiguration = chromaConfiguration
else { return }

if case .greenScreen = configuration.captureMode,
let chromaConfiguration = chromaConfiguration {
configureGreenScreenMiddle(with: frame, chromaConfiguration: chromaConfiguration)
} else if case .personSegmentation = configuration.captureMode {
configurePersonSegmentationMiddle(with: frame)
}
}

private func configureGreenScreenMiddle(with frame: ARFrame, chromaConfiguration: ChromaKeyConfiguration) {
let middlePlaneNode = ARKitHelpers.makePlaneNodeForDistance(0.02, frame: frame)

middlePlaneNode.geometry?.firstMaterial?.transparencyMode = .rgbZero
Expand Down Expand Up @@ -202,6 +210,21 @@ final class MixedRealityViewController: UIViewController {
self.middlePlaneNode = middlePlaneNode
}

private func configurePersonSegmentationMiddle(with frame: ARFrame) {
let middlePlaneNode = ARKitHelpers.makePlaneNodeForDistance(0.02, frame: frame)

middlePlaneNode.geometry?.firstMaterial?.transparencyMode = .rgbZero

middlePlaneNode.geometry?.firstMaterial?.shaderModifiers = [
.surface: Shaders.surfaceSegmentation()
]

middlePlaneNode.geometry?.firstMaterial?.ambient.contents = UIColor.white

sceneView.pointOfView?.addChildNode(middlePlaneNode)
self.middlePlaneNode = middlePlaneNode
}

private func configureForeground(with frame: ARFrame) {
guard case .visible(let useMagentaAsTransparency) = configuration.foregroundLayerOptions.visibility else { return }
let foregroundPlaneNode = ARKitHelpers.makePlaneNodeForDistance(0.01, frame: frame)
Expand Down Expand Up @@ -243,14 +266,51 @@ final class MixedRealityViewController: UIViewController {
}

private func updateMiddle(with pixelBuffer: CVPixelBuffer) {
guard case .greenScreen = configuration.captureMode else { return }
switch configuration.captureMode {
case .greenScreen:
updateChromaKeyMiddle(with: pixelBuffer)
case .personSegmentation:
updatePersonSegmentationMiddle(with: pixelBuffer)
default:
break
}
}

private func updateChromaKeyMiddle(with pixelBuffer: CVPixelBuffer) {
let luma = ARKitHelpers.texture(from: pixelBuffer, format: .r8Unorm, planeIndex: 0, textureCache: textureCache)
let chroma = ARKitHelpers.texture(from: pixelBuffer, format: .rg8Unorm, planeIndex: 1, textureCache: textureCache)

middlePlaneNode?.geometry?.firstMaterial?.transparent.contents = luma
middlePlaneNode?.geometry?.firstMaterial?.diffuse.contents = chroma
}

private func updatePersonSegmentationMiddle(with pixelBuffer: CVPixelBuffer) {
let request = VNGeneratePersonSegmentationRequest()
request.qualityLevel = .fast
request.outputPixelFormat = kCVPixelFormatType_OneComponent8

let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:])

do {
try handler.perform([request])

guard let result = request.results?.first?.pixelBuffer else {
return
}

let mask = ARKitHelpers.texture(from: result, format: .r8Unorm, planeIndex: 0, textureCache: textureCache)
middlePlaneNode?.geometry?.firstMaterial?.ambient.contents = mask

let luma = ARKitHelpers.texture(from: pixelBuffer, format: .r8Unorm, planeIndex: 0, textureCache: textureCache)
let chroma = ARKitHelpers.texture(from: pixelBuffer, format: .rg8Unorm, planeIndex: 1, textureCache: textureCache)

middlePlaneNode?.geometry?.firstMaterial?.transparent.contents = luma
middlePlaneNode?.geometry?.firstMaterial?.diffuse.contents = chroma
} catch {
print("Error: \(error)")
}
}

override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
prepareARConfiguration()
Expand Down