'SwiftUI get the colored percentage of a box View

I'm trying to get the percentage of the colored area.

The problem is that when I get the snapshot of the view, the path is not taken into account.

enter image description here

DrawingView

struct DrawingView: View {

    @State private var lineWidth: Float = 15
    
    private let boxSize: CGFloat = 200
    
   
    
    ///colore average del box
    @State private var avColor: Color = .yellow
    @State private var avColor2: Color = .yellow
    @State private var imageSnap: UIImage = UIImage()
    
    
    var drawingBoxView: some View {
        ZStack {
            DrawingBox(boxSize: boxSize, lineWidth: $lineWidth)
                .frame(width: boxSize, height: boxSize)
                .overlay(
                    RoundedRectangle(cornerRadius: 16)
                        .stroke(.primary, lineWidth: 6)
                )
        }
        
    }

    @State private var percColorText: String = ""
    
    var body: some View {
        VStack {
            
                            
            Text(percColorText)
            Image(uiImage:imageSnap)
            
            Button(action: {
                let im3 = drawingBoxView.snapshot()
                imageSnap = im3
                let color3 = im3.averageColorInImage()
                let alpha = color3?.rgba.alpha
                let red = color3?.rgba.red
                let green = color3?.rgba.green
                let blue = color3?.rgba.blue
                percColorText = "\(String(describing: color3))"
            }, label: {
                Text("get color and image")                
            })
            
            drawingBoxView
                
        }
    }
}

Snapshot

extension View {
    func snapshot() -> UIImage {
        let controller = UIHostingController(rootView: self)
        let view = controller.view
        
        let targetSize = controller.view.intrinsicContentSize
        view?.bounds = CGRect(origin: .zero, size: targetSize)
        view?.backgroundColor = .clear
        
        let renderer = UIGraphicsImageRenderer(size: targetSize)
        
        return renderer.image { _ in
            view?.drawHierarchy(in: controller.view.bounds, afterScreenUpdates: true)
        }
    }
}

DrawingBox

struct DrawingBox: View {

    @Binding var lineWidth: Float
    let boxSize: CGFloat = 200
    
    init(boxSize: CGFloat, lineWidth: Binding<Float>) {
        _drawing = State(initialValue: [DrawingPath(id: 0, lineWidth: lineWidth.wrappedValue, drawableArea: boxSize)])
        _lineWidth = lineWidth
    }
    
    @State private var drawing: [DrawingPath]!
    @State private var currentDrawing: Int = 0
    
    
    var body: some View {
        
        ZStack {
            Color(uiColor: UIColor.systemBackground) // drawing background
            ForEach(drawing) { draw in
              DrawShape(drawingPath: draw)
                    //.stroke(lineWidth: CGFloat(draw.lineWidth)) // define stroke width
                    .stroke(.primary, style: StrokeStyle(lineWidth: CGFloat(draw.lineWidth), lineCap: .round, lineJoin: .round))
                    //.foregroundColor(.blue) // define stroke color
            }
        }
        .gesture(
            DragGesture(minimumDistance: 0.0, coordinateSpace: .local)
                .onChanged( { value in
                    drawing[currentDrawing].addPoint(value.location)})
                .onEnded( { value in
                    drawing[currentDrawing].addBreak()
                    currentDrawing += 1
                    drawing.append(DrawingPath(id: currentDrawing, lineWidth: lineWidth, drawableArea: boxSize))
         }))
        .onChange(of: lineWidth) { value in
            drawing[currentDrawing].lineWidth = value
        }
    }
}
    

extension Comparable {
    ///x = x.clamped(0.5, 5.0) rientra tra due numeri
    func clamped(_ f: Self, _ t: Self)  ->  Self {
        var r = self
        if r < f { r = f }
        if r > t { r = t }
        // (use SIMPLE, EXPLICIT code here to make it utterly clear
        // whether we are inclusive, what form of equality, etc etc)
        return r
    }
}
struct DrawingPath: Identifiable {
    var id: Int = 0
    var lineWidth: Float!
    var drawableArea: CGFloat!
    init (id: Int, lineWidth: Float, drawableArea: CGFloat) {
        self.id = id
        self.lineWidth = lineWidth
        self.drawableArea = drawableArea
    }
    private var points = [CGPoint]()
    private var breaks = [Int]()

    mutating func addPoint(_ point: CGPoint) {
        //print(point)
        var pointReturn = point
        pointReturn.x = pointReturn.x.clamped(0 + CGFloat(lineWidth/2), drawableArea - CGFloat(lineWidth/2))
        pointReturn.y = pointReturn.y.clamped(0 + CGFloat(lineWidth/2), drawableArea - CGFloat(lineWidth/2))
        points.append(pointReturn)
    }

    mutating func addBreak() {
        breaks.append(points.count)
    }
}

extension DrawingPath {
    var path: Path {
        var path = Path()
        guard let firstPoint = points.first else { return path }
        path.move(to: firstPoint)
        for i in 0..<points.count {
            if breaks.contains(i) {
                //path.move(to: points[i]) // if break jump after breaks
                path.closeSubpath()
            } else {
                path.addLine(to: points[i]) // connect points otherwise
            }
        }
            return path
    }
}

struct DrawShape: Shape {
    let drawingPath: DrawingPath

    func path(in rect: CGRect) -> Path {
        drawingPath.path
    }
}

struct DrawingBox_Previews: PreviewProvider {
    static var previews: some View {
        DrawingBox(boxSize: 200, lineWidth: .constant(5))
    }
}

extension View {
    func getAverageColor() -> UIColor? {
        let uiimage = self.snapshot()
        let avColor = uiimage.averageColor
        return avColor
    }
}
extension UIImage {
    /// Average color of the image, nil if it cannot be found
    var averageColor: UIColor? {
        // convert our image to a Core Image Image
        guard let inputImage = CIImage(image: self) else { return nil }

        // Create an extent vector (a frame with width and height of our current input image)
        let extentVector = CIVector(x: inputImage.extent.origin.x,
                                    y: inputImage.extent.origin.y,
                                    z: inputImage.extent.size.width,
                                    w: inputImage.extent.size.height)

        // create a CIAreaAverage filter, this will allow us to pull the average color from the image later on
        guard let filter = CIFilter(name: "CIAreaAverage",
                                  parameters: [kCIInputImageKey: inputImage, kCIInputExtentKey: extentVector]) else { return nil }
        guard let outputImage = filter.outputImage else { return nil }

        // A bitmap consisting of (r, g, b, a) value
        var bitmap = [UInt8](repeating: 0, count: 4)
        let context = CIContext(options: [.workingColorSpace: kCFNull!])

        // Render our output image into a 1 by 1 image supplying it our bitmap to update the values of (i.e the rgba of the 1 by 1 image will fill out bitmap array
        context.render(outputImage,
                       toBitmap: &bitmap,
                       rowBytes: 4,
                       bounds: CGRect(x: 0, y: 0, width: 1, height: 1),
                       format: .RGBA8,
                       colorSpace: nil)

        // Convert our bitmap images of r, g, b, a to a UIColor
        return UIColor(red: CGFloat(bitmap[0]) / 255,
                       green: CGFloat(bitmap[1]) / 255,
                       blue: CGFloat(bitmap[2]) / 255,
                       alpha: CGFloat(bitmap[3]) / 255)
    }
    
    func averageColorInImage() -> UIColor? {
        // convert our image to a Core Image Image
        guard let inputImage = CIImage(image: self) else { return nil }

        // Create an extent vector (a frame with width and height of our current input image)
        let extentVector = CIVector(x: inputImage.extent.origin.x,
                                    y: inputImage.extent.origin.y,
                                    z: inputImage.extent.size.width,
                                    w: inputImage.extent.size.height)

        // create a CIAreaAverage filter, this will allow us to pull the average color from the image later on
        guard let filter = CIFilter(name: "CIAreaAverage",
                                  parameters: [kCIInputImageKey: inputImage, kCIInputExtentKey: extentVector]) else { return nil }
        guard let outputImage = filter.outputImage else { return nil }

        // A bitmap consisting of (r, g, b, a) value
        var bitmap = [UInt8](repeating: 0, count: 4)
        let context = CIContext(options: [.workingColorSpace: kCFNull!])

        // Render our output image into a 1 by 1 image supplying it our bitmap to update the values of (i.e the rgba of the 1 by 1 image will fill out bitmap array
        context.render(outputImage,
                       toBitmap: &bitmap,
                       rowBytes: 4,
                       bounds: CGRect(x: 0, y: 0, width: 1, height: 1),
                       format: .RGBA8,
                       colorSpace: nil)

        // Convert our bitmap images of r, g, b, a to a UIColor
        return UIColor(red: CGFloat(bitmap[0]) / 255,
                       green: CGFloat(bitmap[1]) / 255,
                       blue: CGFloat(bitmap[2]) / 255,
                       alpha: CGFloat(bitmap[3]) / 255)
    }
}


Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source