Updated code after some worked
I am working on a SwiftUI project where I apply a perspective warp to an image using Core Image. I allow users to move the control points (corners of the image) to adjust the transformation.
Issue: When I stretch or move the control points, they can go outside the image boundary, which results in unwanted distortions. I want to restrict the control points so that they always remain on the image's border.
Code:
import SwiftUI
import CoreImage
import CoreImage.CIFilterBuiltins
struct PerspectiveWarpImageView: View {
// Control points for the four corners of the image’s frame.
@State private var points: [CGPoint] = []
// Position used to move the warped image around
@State var position: CGPoint = CGPoint(x: 200, y: 400)
@State var position2: CGPoint = CGPoint(x: 200, y: 400)
@State private var initialPosition: CGPoint = .zero
// This offset stores the image’s top‐left origin (computed onAppear).
@State private var offset: CGPoint = .zero
@State var positionImage: CGPoint = .zero
// Change this to your asset’s name
let imageName = "imgMusic"
func squareCenter(from points: [CGPoint]) -> CGPoint {
let sumX = points.reduce(0) { $0 + $1.x }
let sumY = points.reduce(0) { $0 + $1.y }
return CGPoint(x: sumX / CGFloat(points.count),
y: (sumY / CGFloat(points.count)))
}
var body: some View {
GeometryReader { geometry in
ZStack (alignment:.top){
// When the control points are set, display the warped image.
if !points.isEmpty, let transformedImage = transformImage() {
Color.gray
.edgesIgnoringSafeArea(.all)
ZStack{
ZStack(alignment:.center){
Image(uiImage: transformedImage)
.resizable()
.scaledToFit()
.background(.red.opacity(0.3))
.position(positionImage)
.onChange(of: points, perform: { newVal in
print("heightOFImage: \(transformedImage.size.height)")
print("YValue: \(position.y)")
print("YValue: \(points[0])")
print("YValue: \(points[2])")
})
.onAppear(){
positionImage.x = transformedImage.size.width / 2
positionImage.y = transformedImage.size.height / 2
}
}
.frame(width:transformedImage.size.width,height:transformedImage.size.height)
.position(position)
PointsView(points: $points)
.position(position2)
.gesture(
DragGesture()
.onChanged { value in
if initialPosition == .zero {
initialPosition = position
}
let newPosition = CGPoint(
x: initialPosition.x + value.translation.width,
y: initialPosition.y + value.translation.height
)
DispatchQueue.main.async {
position = newPosition
position2 = newPosition
}
}
.onEnded { _ in
initialPosition = .zero
}
)
}
}
}
.onAppear {
// In case the PointsView onAppear did not run, initialize here.
if points.isEmpty {
let screenWidth = geometry.size.width
let screenHeight = geometry.size.height
let offsetX = (screenWidth - 400) / 2
let offsetY = (screenHeight - 300) / 2
offset = CGPoint(x: offsetX, y: offsetY)
points = [
CGPoint(x: offsetX + 0, y: offsetY + 0), // Top-left
CGPoint(x: offsetX + 400, y: offsetY + 0), // Top-right
CGPoint(x: offsetX + 400, y: offsetY + 300), // Bottom-right
CGPoint(x: offsetX + 0, y: offsetY + 300) // Bottom-left
]
}
}
}
}
/// Applies a perspective warp to the image using Core Image.
func transformImage() -> UIImage? {
// Load the image from your assets.
guard let uiImage = UIImage(named: imageName) else { return nil }
guard let ciImage = CIImage(image: uiImage) else { return nil }
// We assume the image is shown in a 300×200 frame.
let displaySize = CGSize(width: uiImage.size.width, height: uiImage.size.height)
// Set up the perspective transform filter.
let filter = CIFilter(name: "CIPerspectiveTransform")!
filter.setValue(ciImage, forKey: kCIInputImageKey)
// Convert each of the four control points into the image’s coordinate system.
// (Remember: SwiftUI’s coordinate space origin is at the top-left, while Core Image’s
// coordinate space origin is at the bottom-left.)
guard points.count == 4 else { return nil }
let topLeft = convertToImageCoordinates(points[0], displaySize: displaySize, offset: offset)
let topRight = convertToImageCoordinates(points[1], displaySize: displaySize, offset: offset)
let bottomRight = convertToImageCoordinates(points[2], displaySize: displaySize, offset: offset)
let bottomLeft = convertToImageCoordinates(points[3], displaySize: displaySize, offset: offset)
guard points.count == 4 else { return nil }
filter.setValue(CIVector(cgPoint: topLeft), forKey: "inputTopLeft")
filter.setValue(CIVector(cgPoint: topRight), forKey: "inputTopRight")
filter.setValue(CIVector(cgPoint: bottomRight), forKey: "inputBottomRight")
filter.setValue(CIVector(cgPoint: bottomLeft), forKey: "inputBottomLeft")
// Render the output image.
guard let outputImage = filter.outputImage else { return nil }
// let context = CIContext(options: nil)
//
// if let cgimg = context.createCGImage(outputImage, from: CGRect(origin: .zero, size: displaySize)) {
// return UIImage(cgImage: cgimg)
// }
return flipImageVertically(outputImage.toUIImage()!)
}
func flipImageVertically(_ image: UIImage) -> UIImage? {
UIGraphicsImageRenderer(size: image.size).image { context in
context.cgContext.translateBy(x: 0, y: image.size.height)
context.cgContext.scaleBy(x: 1, y: -1)
image.draw(at: .zero)
}
}
func scaleImage(_ image: UIImage, to newSize: CGSize) -> UIImage {
let renderer = UIGraphicsImageRenderer(size: newSize)
return renderer.image { _ in
image.draw(in: CGRect(origin: .zero, size: newSize))
}
}
/// Converts a point from SwiftUI’s coordinate space (global) into the image’s coordinate space.
func convertToImageCoordinates(_ point: CGPoint, displaySize: CGSize, offset: CGPoint) -> CGPoint {
// The image’s frame starts at the given offset.
let relativeX = point.x //- offset.x
let relativeY = point.y //- offset.y
// Flip the y coordinate so that the origin is at the bottom.
// let flippedY = displaySize.height - relativeY
return CGPoint(x: relativeX, y: relativeY)
}
}
extension CIImage {
func toUIImage() -> UIImage? {
let context = CIContext(options: nil)
if let cgImage = context.createCGImage(self, from: self.extent) {
return UIImage(cgImage: cgImage, scale: 1.0, orientation: .up)
}
return nil
}
}
struct PerspectiveWarpImageView_Previews: PreviewProvider {
static var previews: some View {
PerspectiveWarpImageView()
}
}
PointsView
struct PointsView: View {
@Binding var points: [CGPoint]
var body: some View {
ZStack(alignment:.center){
Path { path in
path.move(to: points[0])
path.addLine(to: points[1])
path.addLine(to: points[2])
path.addLine(to: points[3])
path.closeSubpath()
}
.fill(Color.white.opacity(0.1))
.stroke(Color.white.opacity(0.5), lineWidth: 2)
ForEach(0..<points.count, id: \.self) { index in
Circle()
.fill(Color.white)
.frame(width: 14, height: 14)
.position(points[index])
.gesture(
DragGesture()
.onChanged { value in
points[index] = value.location
}
)
}
}
}
}
Expected Behavior:
- The control points should never leave the image bounds.
- Users should be able to drag them only along the image edges.
Image