Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
6d9a192
Add legacy attributes for future comparison with older implementations
himanshunaidu Jan 24, 2026
1131ffc
Start adding logic for plane fitting
himanshunaidu Jan 25, 2026
97aea75
Move the delta flip logic in LocalizationProcessor outside the delta …
himanshunaidu Jan 25, 2026
5494cf8
Add kernel for computing plane 3d world points
himanshunaidu Jan 25, 2026
2ab0aeb
Add Swift struct WorldPoints to utilize world points kernel
himanshunaidu Jan 26, 2026
954299a
Start incorporating world points calculation in planefit
himanshunaidu Jan 26, 2026
3178dc8
Separate out attribute estimation pipeline and create distinct legacy…
himanshunaidu Jan 26, 2026
77c6840
Extend usage of plane fitting and add debugging to Metal-based world …
himanshunaidu Jan 26, 2026
c9ed7cf
Add CPU based plane detection method for testing
himanshunaidu Jan 26, 2026
1a6f0f7
Add ways to compare CPU results with the GPU results for plane detection
himanshunaidu Jan 26, 2026
c6c6b52
Bring plane detection results onto the Annotation View
himanshunaidu Jan 26, 2026
319915d
Add plane projection logic for visualization and debugging
himanshunaidu Jan 27, 2026
4e687aa
Make logic for plane projection more robust to edge cases
himanshunaidu Jan 27, 2026
25929e9
Update plane projection to get corners of the image
himanshunaidu Jan 27, 2026
35eccc4
Rasterize projected plane for visualization and debugging
himanshunaidu Jan 28, 2026
781a1eb
Add trapezoid points as additional legacy property and remove image-d…
himanshunaidu Jan 28, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 66 additions & 4 deletions IOSAccessAssessment.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,13 @@ enum AccessibilityFeatureAttribute: String, Identifiable, CaseIterable, Codable,
case lidarDepth
case latitudeDelta
case longitudeDelta
/**
- NOTE:
Legacy attributes for comparison with older data
*/
case widthLegacy
case runningSlopeLegacy
case crossSlopeLegacy

enum Value: Sendable, Codable, Equatable {
case length(Measurement<UnitLength>)
Expand Down Expand Up @@ -97,6 +104,24 @@ enum AccessibilityFeatureAttribute: String, Identifiable, CaseIterable, Codable,
valueType: .length(Measurement(value: 0, unit: .meters)),
osmTagKey: APIConstants.TagKeys.longitudeDeltaKey
)
case .widthLegacy:
return Metadata(
id: 10, name: "Width Legacy", unit: UnitLength.meters,
valueType: .length(Measurement(value: 0, unit: .meters)),
osmTagKey: "width_legacy"
)
case .runningSlopeLegacy:
return Metadata(
id: 20, name: "Running Slope Legacy", unit: UnitAngle.degrees,
valueType: .angle(Measurement(value: 0, unit: .degrees)),
osmTagKey: "incline_legacy"
)
case .crossSlopeLegacy:
return Metadata(
id: 30, name: "Cross Slope Legacy", unit: UnitAngle.degrees,
valueType: .angle(Measurement(value: 0, unit: .degrees)),
osmTagKey: "cross_slope_legacy"
)
}
}

Expand Down Expand Up @@ -150,6 +175,12 @@ extension AccessibilityFeatureAttribute {
return true
case (.longitudeDelta, .length):
return true
case (.widthLegacy, .length):
return true
case (.runningSlopeLegacy, .angle):
return true
case (.crossSlopeLegacy, .angle):
return true
default:
return false
}
Expand Down Expand Up @@ -198,6 +229,12 @@ extension AccessibilityFeatureAttribute {
return .length(Measurement(value: value, unit: .meters))
case .longitudeDelta:
return .length(Measurement(value: value, unit: .meters))
case .widthLegacy:
return .length(Measurement(value: value, unit: .meters))
case .runningSlopeLegacy:
return .angle(Measurement(value: value, unit: .degrees))
case .crossSlopeLegacy:
return .angle(Measurement(value: value, unit: .degrees))
}
}

Expand Down Expand Up @@ -229,6 +266,12 @@ extension AccessibilityFeatureAttribute {
return String(format: "%.2f", measurement.converted(to: .meters).value)
case (.longitudeDelta, .length(let measurement)):
return String(format: "%.2f", measurement.converted(to: .meters).value)
case (.widthLegacy, .length(let measurement)):
return String(format: "%.2f", measurement.converted(to: .meters).value)
case (.runningSlopeLegacy, .angle(let measurement)):
return String(format: "%.2f", measurement.converted(to: .degrees).value)
case (.crossSlopeLegacy, .angle(let measurement)):
return String(format: "%.2f", measurement.converted(to: .degrees).value)
default:
return nil
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ extension AccessibilityFeatureConfig {
AccessibilityFeatureClass(
id: "sidewalk", name: "Sidewalk", grayscaleValue: 1.0 / 255.0, labelValue: 1,
color: CIColor(red: 0.957, green: 0.137, blue: 0.910),
bounds: CGRect(
x: 0.0, y: 0.5, width: 1.0, height: 0.4
),
meshClassification: [.floor],
attributes: [.width, .runningSlope, .crossSlope, .surfaceIntegrity],
attributes: [
.width, .runningSlope, .crossSlope, .surfaceIntegrity,
.widthLegacy, .runningSlopeLegacy, .crossSlopeLegacy
],
oswPolicy: OSWPolicy(oswElementClass: .Sidewalk)
),

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,37 @@ struct ContourDetails: Sendable, Codable, Equatable, Hashable {
let area: Float
let perimeter: Float

init(centroid: CGPoint, boundingBox: CGRect, normalizedPoints: [SIMD2<Float>], area: Float, perimeter: Float) {
let trapezoidPoints: [SIMD2<Float>]?

init(
centroid: CGPoint, boundingBox: CGRect, normalizedPoints: [SIMD2<Float>], area: Float, perimeter: Float,
trapezoidPoints: [SIMD2<Float>]? = nil
) {
self.centroid = centroid
self.boundingBox = boundingBox
self.normalizedPoints = normalizedPoints
self.area = area
self.perimeter = perimeter
self.trapezoidPoints = trapezoidPoints
}

init(normalizedPoints: [SIMD2<Float>]) {
init(normalizedPoints: [SIMD2<Float>], trapezoidPoints: [SIMD2<Float>]? = nil) {
let contourDetails = ContourUtils.getCentroidAreaBounds(normalizedPoints: normalizedPoints)
self.centroid = contourDetails.centroid
self.boundingBox = contourDetails.boundingBox
self.normalizedPoints = normalizedPoints
self.area = contourDetails.area
self.perimeter = contourDetails.perimeter
self.trapezoidPoints = trapezoidPoints
}

init(contourDetails: ContourDetails, trapezoidPoints: [SIMD2<Float>]? = nil) {
self.centroid = contourDetails.centroid
self.boundingBox = contourDetails.boundingBox
self.normalizedPoints = contourDetails.normalizedPoints
self.area = contourDetails.area
self.perimeter = contourDetails.perimeter
self.trapezoidPoints = trapezoidPoints
}
}

Expand Down
65 changes: 57 additions & 8 deletions IOSAccessAssessment/Annotation/AnnotationImageManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ struct AnnotationImageResults {
let cameraImage: CIImage
let segmentationLabelImage: CIImage

var alignedSegmentationLabalImages: [CIImage]?
var alignedSegmentationLabelImages: [CIImage]?
var processedSegmentationLabelImage: CIImage? = nil
var featuresSourceCGImage: CGImage? = nil

Expand All @@ -61,6 +61,11 @@ struct AnnotationImageResults {
var featuresOverlayOutputImage: CIImage? = nil
}

struct AnnotationImageFeatureUpdateResults: Sendable {
let plane: Plane
let projectedPlane: ProjectedPlane
}

/**
A class to manage annotation image processing including segmentation mask post-processing and feature detection.
*/
Expand Down Expand Up @@ -109,7 +114,7 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP
Task {
await MainActor.run {
self.outputConsumer?.annotationOutputImage(
self, image: cameraOutputImage, overlayImage: nil, overlay2Image: nil
self, image: cameraOutputImage, overlayImage: nil, overlay2Image: nil, overlay3Image: nil
)
}
}
Expand All @@ -127,7 +132,7 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP
return
}
let alignedSegmentationLabelImages = getAlignedCaptureDataHistory(captureDataHistory: captureDataHistory)
annotationImageResults.alignedSegmentationLabalImages = alignedSegmentationLabelImages
annotationImageResults.alignedSegmentationLabelImages = alignedSegmentationLabelImages
self.annotationImageResults = annotationImageResults
}

Expand All @@ -142,7 +147,9 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP
/**
Updates the camera image, and recreates the overlay image.
*/
func updateFeatureClass(accessibilityFeatureClass: AccessibilityFeatureClass) throws -> [EditableAccessibilityFeature] {
func updateFeatureClass(
accessibilityFeatureClass: AccessibilityFeatureClass
) throws -> [EditableAccessibilityFeature] {
guard isConfigured else {
throw AnnotationImageManagerError.notConfigured
}
Expand Down Expand Up @@ -199,7 +206,8 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP
self,
image: cameraOutputImage,
overlayImage: segmentationOverlayOutputImage,
overlay2Image: featuresOverlayOutputImage
overlay2Image: featuresOverlayOutputImage,
overlay3Image: nil
)
}
}
Expand All @@ -209,15 +217,17 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP
func updateFeature(
accessibilityFeatureClass: AccessibilityFeatureClass,
accessibilityFeatures: [EditableAccessibilityFeature],
featureSelectedStatus: [UUID: Bool]
featureSelectedStatus: [UUID: Bool],
updateFeatureResults: AnnotationImageFeatureUpdateResults?
) throws {
guard isConfigured else {
throw AnnotationImageManagerError.notConfigured
}
guard var annotationImageResults = self.annotationImageResults,
let cameraOutputImage = annotationImageResults.cameraOutputImage,
let segmentationOverlayOutputImage = annotationImageResults.segmentationOverlayOutputImage,
let featuresSourceCGImage = annotationImageResults.featuresSourceCGImage else {
let featuresSourceCGImage = annotationImageResults.featuresSourceCGImage,
let captureImageData = self.captureImageData else {
throw AnnotationImageManagerError.imageResultCacheFailed
}
let updatedFeaturesOverlayResults = try updateFeaturesOverlayOutputImageWithSource(
Expand All @@ -228,13 +238,20 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP
annotationImageResults.featuresSourceCGImage = updatedFeaturesOverlayResults.sourceCGImage
annotationImageResults.featuresOverlayOutputImage = updatedFeaturesOverlayResults.overlayImage
self.annotationImageResults = annotationImageResults
/// Additional images for debugging
let overlay3Image: CIImage? = self.getPlaneImage(
captureImageData: captureImageData,
size: captureImageData.originalSize,
updateFeatureResults: updateFeatureResults
)
Task {
await MainActor.run {
self.outputConsumer?.annotationOutputImage(
self,
image: cameraOutputImage,
overlayImage: segmentationOverlayOutputImage,
overlay2Image: updatedFeaturesOverlayResults.overlayImage
overlay2Image: updatedFeaturesOverlayResults.overlayImage,
overlay3Image: overlay3Image
)
}
}
Expand Down Expand Up @@ -534,3 +551,35 @@ extension AnnotationImageManager {
return polygonsNormalizedCoordinates
}
}

/**
Additional images for debugging
*/
extension AnnotationImageManager {
private func getPlaneImage(
captureImageData: (any CaptureImageDataProtocol),
size: CGSize,
updateFeatureResults: AnnotationImageFeatureUpdateResults?
) -> CIImage? {
guard let plane = updateFeatureResults?.plane,
let projectedPlane = updateFeatureResults?.projectedPlane else {
return nil
}
guard let rasterizedPlaneCGImage = PlaneRasterizer.rasterizePlane(
projectedPlane: projectedPlane, size: size
) else { return nil }
let rasterizedPlaneCIImage = CIImage(cgImage: rasterizedPlaneCGImage)
let interfaceOrientation = captureImageData.interfaceOrientation
let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize
let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface(
currentInterfaceOrientation: interfaceOrientation
)
let orientedImage = rasterizedPlaneCIImage.oriented(imageOrientation)
let overlayOutputImage = CenterCropTransformUtils.centerCropAspectFit(orientedImage, to: croppedSize)

guard let overlayCgImage = context.createCGImage(overlayOutputImage, from: overlayOutputImage.extent) else {
return nil
}
return CIImage(cgImage: overlayCgImage)
}
}
22 changes: 20 additions & 2 deletions IOSAccessAssessment/Annotation/AnnotationImageViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import SwiftUI
protocol AnnotationImageProcessingOutputConsumer: AnyObject {
func annotationOutputImage(
_ delegate: AnnotationImageProcessingDelegate,
image: CIImage?, overlayImage: CIImage?, overlay2Image: CIImage?
image: CIImage?, overlayImage: CIImage?, overlay2Image: CIImage?, overlay3Image: CIImage?
)
}

Expand Down Expand Up @@ -57,6 +57,16 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing
iv.translatesAutoresizingMaskIntoConstraints = false
return iv
}()
private let overlay3View: UIImageView = {
let iv = UIImageView()
iv.contentMode = .scaleAspectFit
iv.clipsToBounds = true
iv.layer.cornerRadius = 12
iv.backgroundColor = UIColor(white: 0, alpha: 0.0)
iv.isUserInteractionEnabled = false
iv.translatesAutoresizingMaskIntoConstraints = false
return iv
}()

init(annotationImageManager: AnnotationImageManager) {
self.annotationImageManager = annotationImageManager
Expand Down Expand Up @@ -87,6 +97,8 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing
constraintChildViewToParent(childView: overlayView, parentView: subView)
subView.addSubview(overlay2View)
constraintChildViewToParent(childView: overlay2View, parentView: subView)
subView.addSubview(overlay3View)
constraintChildViewToParent(childView: overlay3View, parentView: subView)

annotationImageManager.outputConsumer = self
annotationImageManager.setOrientation(getOrientation())
Expand Down Expand Up @@ -118,7 +130,7 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing

func annotationOutputImage(
_ delegate: AnnotationImageProcessingDelegate,
image: CIImage?, overlayImage: CIImage?, overlay2Image: CIImage?
image: CIImage?, overlayImage: CIImage?, overlay2Image: CIImage?, overlay3Image: CIImage?
) {
if let img = image {
let uiImage = UIImage(ciImage: img)
Expand All @@ -132,6 +144,12 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing
let uiOverlay2Image = UIImage(ciImage: overlay2Img)
overlay2View.image = uiOverlay2Image
}
if let overlay3Img = overlay3Image {
let uiOverlay3Image = UIImage(ciImage: overlay3Img)
overlay3View.image = uiOverlay3Image
} else {
overlay3View.image = nil
}
}

override func viewDidLayoutSubviews() {
Expand Down
Loading