I’ve a video recorder constructed with AVFoundation in Swift. The Recorder can take a number of clips and join them into one video. For instance you can begin the video then cease it, then begin it once more and cease it; the ultimate video shall be each clips mixed. If I take the primary video utilizing the entrance digital camera then cease the clip, toggle to the again digital camera and take one other clip then a last clip is made efficiently. Nevertheless if I take one clip utilizing each the back and front digital camera then the video fails and the clip would not course of. Why does toggling between the digital camera perspective throughout a video trigger this downside?
import SwiftUI
import SwiftUI
import AVKit
import AVFoundation
class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate {
@Printed var session = AVCaptureSession()
@Printed var alert = false
@Printed var output = AVCaptureMovieFileOutput()
@Printed var preview: AVCaptureVideoPreviewLayer!
@Printed var isRecording: Bool = false
@Printed var recordedURLs: [URL] = []
@Printed var previewURL: URL?
@Printed var showPreview: Bool = false
@Printed var recordedDuration: CGFloat = 0
@Printed var maxDuration: CGFloat = 20
var currentCameraPosition: AVCaptureDevice.Place = .again
override init() {
tremendous.init()
self.checkPermission()
self.preview = AVCaptureVideoPreviewLayer(session: session)
self.preview.videoGravity = .resizeAspectFill
}
func flipCamera() {
// Create a discovery session to seek out all out there video units
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, place: .unspecified)
// Get all out there video units
let videoDevices = discoverySession.units
// Examine if there's multiple video gadget
guard videoDevices.depend > 1 else {
return // If not, return early
}
// Get the present enter
guard let currentVideoInput = session.inputs.first as? AVCaptureDeviceInput else {
return
}
// Get the brand new digital camera place
let newCameraPosition: AVCaptureDevice.Place = (currentCameraPosition == .again) ? .entrance : .again
// Discover the brand new digital camera gadget
if let newCamera = videoDevices.first(the place: { $0.place == newCameraPosition }) {
// Create a brand new video enter
do {
let newVideoInput = attempt AVCaptureDeviceInput(gadget: newCamera)
// Take away the present enter
session.removeInput(currentVideoInput)
// Add the brand new enter
if session.canAddInput(newVideoInput) {
session.addInput(newVideoInput)
currentCameraPosition = newCameraPosition
} else {
// Deal with the case the place including the brand new enter fails
print("Failed so as to add new digital camera enter")
}
} catch {
// Deal with any errors that happen whereas creating the brand new enter
print("Error creating new digital camera enter: (error.localizedDescription)")
}
}
}
func checkPermission(){
change AVCaptureDevice.authorizationStatus(for: .video) {
case .licensed:
checkAudioPermission()
return
case .notDetermined:
AVCaptureDevice.requestAccess(for: .video) { (standing) in
if standing {
self.checkAudioPermission()
}
}
case .denied:
self.alert.toggle()
return
default:
return
}
}
func checkAudioPermission() {
change AVCaptureDevice.authorizationStatus(for: .audio) {
case .licensed:
setUp()
return
case .notDetermined:
AVCaptureDevice.requestAccess(for: .audio) { (audioStatus) in
if audioStatus {
self.setUp()
}
}
case .denied:
self.alert.toggle()
return
default:
return
}
}
func setUp(){
do {
self.session.beginConfiguration()
let cameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, place: .again)
let videoInput = attempt AVCaptureDeviceInput(gadget: cameraDevice!)
let audioDevice = AVCaptureDevice.default(for: .audio)
let audioInput = attempt AVCaptureDeviceInput(gadget: audioDevice!)
if self.session.canAddInput(videoInput) && self.session.canAddInput(audioInput){
self.session.addInput(videoInput)
self.session.addInput(audioInput)
}
if self.session.canAddOutput(self.output){
self.session.addOutput(self.output)
}
self.session.commitConfiguration()
}
catch{
print(error.localizedDescription)
}
}
func startRecording(){
// MARK: Short-term URL for recording Video
let tempURL = NSTemporaryDirectory() + "(Date()).mov"
output.startRecording(to: URL(fileURLWithPath: tempURL), recordingDelegate: self)
isRecording = true
}
func stopRecording(){
output.stopRecording()
isRecording = false
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print(error.localizedDescription)
return
}
// CREATED SUCCESSFULLY
print(outputFileURL)
self.recordedURLs.append(outputFileURL)
if self.recordedURLs.depend == 1{
self.previewURL = outputFileURL
return
}
// CONVERTING URLs TO ASSETS
let belongings = recordedURLs.compactMap { url -> AVURLAsset in
return AVURLAsset(url: url)
}
self.previewURL = nil
// MERGING VIDEOS
Activity {
await mergeVideos(belongings: belongings) { exporter in
exporter.exportAsynchronously {
if exporter.standing == .failed{
// HANDLE ERROR
print(exporter.error!)
}
else{
if let finalURL = exporter.outputURL{
print(finalURL)
DispatchQueue.foremost.async {
self.previewURL = finalURL
}
}
}
}
}
}
}
func mergeVideos(belongings: [AVURLAsset],completion: @escaping (_ exporter: AVAssetExportSession)->()) async {
let compostion = AVMutableComposition()
var lastTime: CMTime = .zero
guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
for asset in belongings {
// Linking Audio and Video
do {
attempt await videoTrack.insertTimeRange(CMTimeRange(begin: .zero, length: asset.load(.length)), of: asset.loadTracks(withMediaType: .video)[0], at: lastTime)
// Protected Examine if Video has Audio
if attempt await !asset.loadTracks(withMediaType: .audio).isEmpty {
attempt await audioTrack.insertTimeRange(CMTimeRange(begin: .zero, length: asset.load(.length)), of: asset.loadTracks(withMediaType: .audio)[0], at: lastTime)
}
}
catch {
print(error.localizedDescription)
}
// Updating Final Time
do {
lastTime = attempt await CMTimeAdd(lastTime, asset.load(.length))
} catch {
print(error.localizedDescription)
}
}
// MARK: Temp Output URL
let tempURL = URL(fileURLWithPath: NSTemporaryDirectory() + "Reel-(Date()).mp4")
// VIDEO IS ROTATED
// BRINGING BACK TO ORIGNINAL TRANSFORM
let layerInstructions = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
// MARK: Rework
var rework = CGAffineTransform.identification
rework = rework.rotated(by: 90 * (.pi / 180))
rework = rework.translatedBy(x: 0, y: -videoTrack.naturalSize.peak)
layerInstructions.setTransform(rework, at: .zero)
let directions = AVMutableVideoCompositionInstruction()
directions.timeRange = CMTimeRange(begin: .zero, length: lastTime)
directions.layerInstructions = [layerInstructions]
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: videoTrack.naturalSize.peak, peak: videoTrack.naturalSize.width)
videoComposition.directions = [instructions]
videoComposition.frameDuration = CMTimeMake(worth: 1, timescale: 30)
guard let exporter = AVAssetExportSession(asset: compostion, presetName: AVAssetExportPresetHighestQuality) else{return}
exporter.outputFileType = .mp4
exporter.outputURL = tempURL
exporter.videoComposition = videoComposition
completion(exporter)
}
}
//IGNORE NOT IMPORTANT TO QUESTION
struct HomeStory: View {
@StateObject var cameraModel = CameraViewModel()
var physique: some View {
ZStack(alignment: .backside) {
CameraStoryView()
.environmentObject(cameraModel)
.clipShape(RoundedRectangle(cornerRadius: 30, type: .steady))
.padding(.high,10)
.padding(.backside,30)
ZStack {
Button {
if cameraModel.isRecording {
cameraModel.stopRecording()
} else {
cameraModel.startRecording()
}
} label: {
if cameraModel.isRecording {
Circle().body(width: 95, peak: 95).foregroundStyle(.crimson).opacity(0.7)
} else {
ZStack {
Coloration.grey.opacity(0.001)
Circle().stroke(.white, lineWidth: 7).body(width: 80, peak: 80)
}.body(width: 95, peak: 95)
}
}
Button {
cameraModel.flipCamera()
} label: {
Picture(systemName: "arrow.triangle.2.circlepath.digital camera")
.font(.title)
.foregroundColor(.white)
.padding()
.background(Circle().fill(Coloration.black.opacity(0.7)))
}.offset(x: -100)
Button {
if let _ = cameraModel.previewURL {
cameraModel.showPreview.toggle()
}
} label: {
if cameraModel.previewURL == nil && !cameraModel.recordedURLs.isEmpty {
ProgressView().tint(.black)
} else {
HStack {
Textual content("Preview")
Picture(systemName: "chevron.proper")
}
.padding()
.foregroundColor(.black).font(.physique)
.background {
Capsule().foregroundStyle(.ultraThinMaterial)
}
}
}
.padding(.horizontal,20)
.padding(.vertical,8)
.body(maxWidth: .infinity,alignment: .trailing)
.padding(.trailing)
.opacity((cameraModel.previewURL == nil && cameraModel.recordedURLs.isEmpty) || cameraModel.isRecording ? 0 : 1)
}
.body(maxHeight: .infinity,alignment: .backside)
.padding(.backside,10)
.padding(.backside,30)
Button {
cameraModel.recordedDuration = 0
cameraModel.previewURL = nil
cameraModel.recordedURLs.removeAll()
} label: {
Picture(systemName: "xmark")
.font(.title)
.foregroundColor(.white)
}
.body(maxWidth: .infinity,maxHeight: .infinity,alignment: .topLeading)
.padding()
.padding(.high)
.opacity(!cameraModel.recordedURLs.isEmpty && cameraModel.previewURL != nil && !cameraModel.isRecording ? 1 : 0)
}
.overlay(content material: {
if let url = cameraModel.previewURL, cameraModel.showPreview {
FinalPreview(url: url, showPreview: $cameraModel.showPreview)
.transition(.transfer(edge: .trailing))
}
})
.animation(.easeInOut, worth: cameraModel.showPreview)
.preferredColorScheme(.darkish)
}
}
struct FinalPreview: View {
var url: URL
@Binding var showPreview: Bool
var physique: some View {
GeometryReader { proxy in
let measurement = proxy.measurement
VideoPlayer(participant: AVPlayer(url: url))
.aspectRatio(contentMode: .fill)
.body(width: measurement.width, peak: measurement.peak)
.clipShape(RoundedRectangle(cornerRadius: 30, type: .steady))
.overlay(alignment: .topLeading) {
Button {
showPreview.toggle()
} label: {
Label {
Textual content("Again")
} icon: {
Picture(systemName: "chevron.left")
}
.foregroundColor(.white)
}
.padding(.main)
.padding(.high,22)
}
}
}
}
struct CameraStoryView: View {
@EnvironmentObject var cameraModel: CameraViewModel
var physique: some View {
GeometryReader { proxy in
let measurement = proxy.measurement
CameraPreview(measurement: measurement).environmentObject(cameraModel)
}
.onReceive(Timer.publish(each: 0.01, on: .foremost, in: .widespread).autoconnect()) { _ in
if cameraModel.recordedDuration <= cameraModel.maxDuration && cameraModel.isRecording{
cameraModel.recordedDuration += 0.01
}
if cameraModel.recordedDuration >= cameraModel.maxDuration && cameraModel.isRecording{
cameraModel.stopRecording()
cameraModel.isRecording = false
}
}
}
}
struct CameraPreview: UIViewRepresentable {
@EnvironmentObject var cameraModel : CameraViewModel
var measurement: CGSize
func makeUIView(context: Context) -> UIView {
let view = UIView(body: CGRect(origin: .zero, measurement: measurement))
guard let preview = cameraModel.preview else { return view }
preview.body = view.bounds
preview.videoGravity = .resizeAspectFill
view.layer.addSublayer(preview)
DispatchQueue.world(qos: .userInitiated).async {
if !self.cameraModel.session.isRunning {
self.cameraModel.session.startRunning()
}
}
return view
}
func updateUIView(_ uiView: UIView, context: Context) { }
}