Sunday, August 13, 2023
HomeiOS Developmentswift - Video get oriented to the other way up whereas writing...

swift – Video get oriented to the other way up whereas writing to a file after eradicating mirror impact in iOS


I am really making an attempt to point out the digital camera with mirrored impact right here throughout the preview mode however whereas capturing the video for writing in a doc listing file, I simply eliminated that mirrored impact, simply earlier than writing to that file however finally, that made my video oriented the other way up. I would like to repair that in portrait mode whereas writing to the file. Please assist me on this regard. Right here is my code snippet –

import AVFoundation
import Speed up.vImage
import UIKit
import CoreImage

enum CameraState {
    case idle, begin, capturing, finish
}

closing class VideoDataManager: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
    
    /// Delegate to obtain the frames captured by the machine's digital camera.
    var delegate: VideoServiceProtocol?
    var cameraState = CameraState.idle
    
    personal var _videoOutput: AVCaptureVideoDataOutput?
    personal var assetWriter: AVAssetWriter?
    personal var assetWriterInput: AVAssetWriterInput?
    personal var adpater: AVAssetWriterInputPixelBufferAdaptor?
    personal var time: Double = 0
    
    override init() {
        tremendous.init()
        configureSession("FRONT")
    }
    
    /// Begin capturing frames from the digital camera.
    func startRunning() {
        DispatchQueue.international(qos: .background).async {
            self.captureSession.startRunning()
        }
    }
    
    /// Cease capturing frames from the digital camera.
    func stopRunning() {
        captureSession.stopRunning()
    }
    
    var captureSession = AVCaptureSession()
    
    func switchCamera(_ cameraType:String?) {
        
        let session: AVCaptureSession = captureSession
        session.beginConfiguration()
        
        var place = AVCaptureDevice.Place.again
        if cameraType == "FRONT" {
            place = AVCaptureDevice.Place.entrance
        }
        
        guard let digital camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, place: place) else { return }
        
        for enter in session.inputs {
            session.removeInput(enter)
        }
        
        do {
            let enter = strive AVCaptureDeviceInput(machine: digital camera)
            session.addInput(enter)
        } catch {
            return
        }
        
        if let videoOutput =  session.outputs.first {
            // Deal with mirroring primarily based on cameraType for preview
            if let connection = videoOutput.connection(with: .video), connection.isVideoMirroringSupported {
                connection.isVideoMirrored = place == .entrance
            }
            videoOutput.connection(with: .video)?.videoOrientation = .portrait
        }
        
        session.commitConfiguration()
        captureSession = session
        
    }
    
    /// Initialize the seize session.
    ///
    func configureSession(_ cameraType:String?) {
        
        if captureSession.isRunning {
            stopRunning()
            for enter in captureSession.inputs {
                captureSession.removeInput(enter)
            }
        }
        
        captureSession.sessionPreset = AVCaptureSession.Preset.picture
        var place = AVCaptureDevice.Place.again
        
        if cameraType == "FRONT" {
            place = AVCaptureDevice.Place.entrance
        }
        
        guard let digital camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, place: place) else { return }
        
        do {
            let enter = strive AVCaptureDeviceInput(machine: digital camera)
            captureSession.addInput(enter)
        } catch {
            return
        }
        
        let videoOutput = AVCaptureVideoDataOutput()
        videoOutput.videoSettings = [
            (kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: kCVPixelFormatType_32BGRA)
        ]
        videoOutput.alwaysDiscardsLateVideoFrames = true
        
        let dataOutputQueue = DispatchQueue(label: "video information queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
        
        if captureSession.canAddOutput(videoOutput) {
            captureSession.addOutput(videoOutput)
            if let connection = videoOutput.connection(with: .video), connection.isVideoMirroringSupported {
                connection.isVideoMirrored = place == .entrance
                connection.videoOrientation = .portrait
            }
        }
        
        videoOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
        _videoOutput = videoOutput
        
    }
    
    // MARK: Strategies of the AVCaptureVideoDataOutputSampleBufferDelegate
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
            return
        }
        
        CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly)
        delegate?.feedVideoDataIntoModel(didOutput: pixelBuffer)
        CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly)
        
        let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds
        
        change cameraState {
        case .begin:
            
            // Arrange recorder
            guard let fileUrl = Utility.getExcerciseRecordingFilePath() else { return }
            
            strive? FileManager.default.removeItem(at: fileUrl)
            let author = strive! AVAssetWriter(outputURL: fileUrl, fileType: .mp4)
            let settings = _videoOutput?.recommendedVideoSettingsForAssetWriter(writingTo: .mp4)
            let enter = AVAssetWriterInput(mediaType: .video, outputSettings: settings)
            enter.mediaTimeScale = CMTimeScale(bitPattern: 600)
            enter.expectsMediaDataInRealTime = true
            let adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: enter, sourcePixelBufferAttributes: nil)
            if author.canAdd(enter) {
                author.add(enter)
            }
            enter.rework = getVideoTransform(for: connection.videoOrientation)
            author.startWriting()
            author.startSession(atSourceTime: .zero)
            assetWriter = author
            assetWriterInput = enter
            adpater = adapter
            cameraState = .capturing
            time = timestamp
            MMHLogger.smartLog("Digital camera State.... begin...")
            
        case .capturing:
            MMHLogger.smartLog("Video is capturing.... in capturing state")
            MMHLogger.smartLog("Video isReady in capturing state Mode:>>>>>>(String(describing: assetWriterInput?.isReadyForMoreMediaData))")
            
            if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
                if assetWriterInput?.isReadyForMoreMediaData == true {
                    let transformedPixelBuffer = applyMirrorEffectRemovalTransformation(pixelBuffer, originalOrientation: connection.videoOrientation)
                    let time = CMTime(seconds: timestamp - time, preferredTimescale: CMTimeScale(600))
                    adpater?.append(transformedPixelBuffer, withPresentationTime: time)
                    
                }
            }
            break
            
        case .finish:
            MMHLogger.smartLog("Video is led to finish state....")
            MMHLogger.smartLog("Video isReady in finish state Mode:>>>>>>(String(describing: assetWriterInput?.isReadyForMoreMediaData))")
            guard assetWriterInput?.isReadyForMoreMediaData == true, assetWriter!.standing != .failed else { break }
            
            assetWriterInput?.markAsFinished()
            assetWriter?.finishWriting { [weak self] in
                self?.assetWriter = nil
                self?.assetWriterInput = nil
                MMHLogger.smartLog("Video is recorded")
                NotificationCenter.default.publish(title: Notification.Identify("STOPRECORDING"), object: nil)
            }
            
        default:
            break
        }
    }
    
    func applyMirrorEffectRemovalTransformation(_ pixelBuffer: CVPixelBuffer, originalOrientation: AVCaptureVideoOrientation) -> CVPixelBuffer {
        let mirroredPixelBuffer = createMirroredPixelBuffer(pixelBuffer)
        return mirroredPixelBuffer
    }
    
    // Create a mirrored pixel buffer
    func createMirroredPixelBuffer(_ pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
        var mirroredPixelBuffer: CVPixelBuffer?
        
        CVPixelBufferCreate(nil, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer), CVPixelBufferGetPixelFormatType(pixelBuffer), nil, &mirroredPixelBuffer)
        
        guard let mirroredPixelBuffer = mirroredPixelBuffer else {
            return pixelBuffer
        }
        
        CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
        CVPixelBufferLockBaseAddress(mirroredPixelBuffer, [])
        
        let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer)
        let mirroredBaseAddress = CVPixelBufferGetBaseAddress(mirroredPixelBuffer)
        
        let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
        let mirroredBytesPerRow = CVPixelBufferGetBytesPerRow(mirroredPixelBuffer)
        for y in 0..<CVPixelBufferGetHeight(pixelBuffer) {
            memcpy(mirroredBaseAddress! + y * mirroredBytesPerRow, baseAddress! + (CVPixelBufferGetHeight(pixelBuffer) - y - 1) * bytesPerRow, bytesPerRow)
        }
        
        CVPixelBufferUnlockBaseAddress(mirroredPixelBuffer, [])
        CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
        
        return mirroredPixelBuffer
    }
    
    // Replace the getVideoTransform technique to simply accept an orientation parameter
    personal func getVideoTransform(for orientation: AVCaptureVideoOrientation) -> CGAffineTransform {
        change orientation {
        case .portraitUpsideDown:
            return CGAffineTransform(rotationAngle: .pi)
        case .landscapeLeft:
            return CGAffineTransform(rotationAngle: .pi / 2)
        case .landscapeRight:
            return CGAffineTransform(rotationAngle: -.pi / 2)
        default:
            return CGAffineTransform.id
        }
    }
    
    func seize(state: CameraState) {
        change state {
        case .idle:
            cameraState = .idle
        case .begin:
            cameraState = .begin
        case .capturing:
            cameraState = .capturing
        case .finish:
            cameraState = .finish
        }
    }
}



Supply hyperlink

RELATED ARTICLES

LEAVE A REPLY

Please enter your comment!
Please enter your name here

- Advertisment -
Google search engine

Most Popular

Recent Comments