本地初始化git

This commit is contained in:
bluesea 2024-03-05 11:44:34 +08:00
commit 6c63b0eea0
52 changed files with 7108 additions and 0 deletions

View File

@ -0,0 +1,592 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 56;
objects = {
/* Begin PBXBuildFile section */
007EFE022B60DA3600EFD078 /* IMG_0071.MOV in Resources */ = {isa = PBXBuildFile; fileRef = 007EFE012B60DA3600EFD078 /* IMG_0071.MOV */; };
007EFE042B60E00000EFD078 /* VideoConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 007EFE032B60E00000EFD078 /* VideoConvertor.swift */; };
007EFE082B60EB5900EFD078 /* VideoConvertor2.swift in Sources */ = {isa = PBXBuildFile; fileRef = 007EFE072B60EB5900EFD078 /* VideoConvertor2.swift */; };
009882722B6269B30076385E /* PlayController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 009882712B6269B30076385E /* PlayController.swift */; };
009B7E9B2B5BA788003BE217 /* VideoWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 009B7E9A2B5BA788003BE217 /* VideoWriter.swift */; };
009B7E9D2B5BB392003BE217 /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 009B7E9C2B5BB392003BE217 /* CoreVideo.framework */; };
009B7E9F2B5BB39A003BE217 /* CoreImage.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 009B7E9E2B5BB39A003BE217 /* CoreImage.framework */; };
00EED0532B5A4A2400637604 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00EED0522B5A4A2400637604 /* AppDelegate.swift */; };
00EED0552B5A4A2400637604 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00EED0542B5A4A2400637604 /* SceneDelegate.swift */; };
00EED0572B5A4A2400637604 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00EED0562B5A4A2400637604 /* ViewController.swift */; };
00EED05A2B5A4A2400637604 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 00EED0582B5A4A2400637604 /* Main.storyboard */; };
00EED05C2B5A4A2500637604 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 00EED05B2B5A4A2500637604 /* Assets.xcassets */; };
00EED05F2B5A4A2500637604 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 00EED05D2B5A4A2500637604 /* LaunchScreen.storyboard */; };
00EED0682B5A4B1400637604 /* ImageProcessingShaders.metal in Sources */ = {isa = PBXBuildFile; fileRef = 00EED0662B5A4B1400637604 /* ImageProcessingShaders.metal */; };
00EED0692B5A4B1400637604 /* VideoFile.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00EED0672B5A4B1400637604 /* VideoFile.swift */; };
00EED06B2B5A4B1B00637604 /* VideoPreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00EED06A2B5A4B1B00637604 /* VideoPreview.swift */; };
00EED06D2B5A4B6400637604 /* MetalPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00EED06C2B5A4B6400637604 /* MetalPlayer.swift */; };
00EED0722B5A4BD600637604 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 00EED0712B5A4BD600637604 /* AVFoundation.framework */; };
00EED0762B5A4CEB00637604 /* FrameProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00EED0752B5A4CEB00637604 /* FrameProcessor.swift */; };
00EED0792B5A686500637604 /* SpatialVideoConverter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00EED0782B5A686500637604 /* SpatialVideoConverter.swift */; };
043A6AD52B81AF04003776D2 /* PlayContoller8.swift in Sources */ = {isa = PBXBuildFile; fileRef = 043A6AD42B81AF04003776D2 /* PlayContoller8.swift */; };
043A6AD72B81B301003776D2 /* VideoConvertor3.swift in Sources */ = {isa = PBXBuildFile; fileRef = 043A6AD62B81B301003776D2 /* VideoConvertor3.swift */; };
043C63122B6B90E80095F268 /* PlayContoller4.swift in Sources */ = {isa = PBXBuildFile; fileRef = 043C63112B6B90E80095F268 /* PlayContoller4.swift */; };
043C63142B6B9CC90095F268 /* ExternalAccessory.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 043C63132B6B9CC90095F268 /* ExternalAccessory.framework */; };
043C63152B6BA6650095F268 /* ExternalAccessory.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 043C63132B6B9CC90095F268 /* ExternalAccessory.framework */; };
044447792B779B4200C7452B /* PlayContoller7.swift in Sources */ = {isa = PBXBuildFile; fileRef = 044447782B779B4200C7452B /* PlayContoller7.swift */; };
0444477C2B779C1B00C7452B /* b.HEIC in Resources */ = {isa = PBXBuildFile; fileRef = 0444477A2B779C1B00C7452B /* b.HEIC */; };
0444477D2B779C1B00C7452B /* a.HEIC in Resources */ = {isa = PBXBuildFile; fileRef = 0444477B2B779C1B00C7452B /* a.HEIC */; };
04880AF22B6F702A00FF9E59 /* ViewController2.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04880AF12B6F702A00FF9E59 /* ViewController2.swift */; };
04880AF42B6F7A3B00FF9E59 /* PlayControllerVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04880AF32B6F7A3B00FF9E59 /* PlayControllerVideo.swift */; };
049ABF9F2B861C450049A94B /* PlayContoller9.swift in Sources */ = {isa = PBXBuildFile; fileRef = 049ABF9E2B861C450049A94B /* PlayContoller9.swift */; };
049EBD832B6C91C4005421C7 /* PlayContoller5.swift in Sources */ = {isa = PBXBuildFile; fileRef = 049EBD822B6C91C4005421C7 /* PlayContoller5.swift */; };
049EBD852B6CD4EA005421C7 /* ImageIO.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 049EBD842B6CD4EA005421C7 /* ImageIO.framework */; };
04A042542B6F9F4A00EA3EF9 /* PlayContoller6.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A042532B6F9F4A00EA3EF9 /* PlayContoller6.swift */; };
04A281A82B6A24840067FB28 /* img1.HEIC in Resources */ = {isa = PBXBuildFile; fileRef = 04A281A72B6A24840067FB28 /* img1.HEIC */; };
04A281AA2B6A2E730067FB28 /* img2.HEIC in Resources */ = {isa = PBXBuildFile; fileRef = 04A281A92B6A2E730067FB28 /* img2.HEIC */; };
04A281AC2B6A38DA0067FB28 /* img3.HEIC in Resources */ = {isa = PBXBuildFile; fileRef = 04A281AB2B6A38DA0067FB28 /* img3.HEIC */; };
04A4E0622B86FED3001894D2 /* PlayContoller10.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A4E0612B86FED3001894D2 /* PlayContoller10.swift */; };
04A4E0642B870160001894D2 /* VideoConvertor4.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A4E0632B870160001894D2 /* VideoConvertor4.swift */; };
04A4E06B2B87465B001894D2 /* bb.MOV in Resources */ = {isa = PBXBuildFile; fileRef = 04A4E0692B87465A001894D2 /* bb.MOV */; };
04A4E06C2B87465B001894D2 /* aa.MOV in Resources */ = {isa = PBXBuildFile; fileRef = 04A4E06A2B87465B001894D2 /* aa.MOV */; };
04A4E06E2B87483C001894D2 /* SpatialVideoWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A4E06D2B87483C001894D2 /* SpatialVideoWriter.swift */; };
04A4E0702B87697E001894D2 /* PlayContoller11.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A4E06F2B87697E001894D2 /* PlayContoller11.swift */; };
04A4E0722B87789C001894D2 /* CreateVideoByBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A4E0712B87789C001894D2 /* CreateVideoByBuffer.swift */; };
04A5AE312B6B3234000D26EA /* img4.HEIC in Resources */ = {isa = PBXBuildFile; fileRef = 04A5AE302B6B3234000D26EA /* img4.HEIC */; };
04A5AE332B6B45E8000D26EA /* Photos.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 04A5AE322B6B45E8000D26EA /* Photos.framework */; };
04A5AE352B6B479E000D26EA /* CCAddImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A5AE342B6B479E000D26EA /* CCAddImage.swift */; };
04A5AE372B6B47AB000D26EA /* PlayControllerImg.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A5AE362B6B47AB000D26EA /* PlayControllerImg.swift */; };
04A5AE392B6B480C000D26EA /* MobileCoreServices.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 04A5AE382B6B480C000D26EA /* MobileCoreServices.framework */; };
04A5AE3B2B6B6AD6000D26EA /* PhotosUI.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 04A5AE3A2B6B6AD6000D26EA /* PhotosUI.framework */; };
04A5AE3D2B6B757F000D26EA /* PlayController3.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04A5AE3C2B6B757F000D26EA /* PlayController3.swift */; };
04BD5B3E2B81E21100DBBE08 /* VideoPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04BD5B3D2B81E21100DBBE08 /* VideoPlayer.swift */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
007EFE012B60DA3600EFD078 /* IMG_0071.MOV */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = IMG_0071.MOV; sourceTree = "<group>"; };
007EFE032B60E00000EFD078 /* VideoConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoConvertor.swift; sourceTree = "<group>"; };
007EFE072B60EB5900EFD078 /* VideoConvertor2.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoConvertor2.swift; sourceTree = "<group>"; };
009882712B6269B30076385E /* PlayController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayController.swift; sourceTree = "<group>"; };
009B7E9A2B5BA788003BE217 /* VideoWriter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoWriter.swift; sourceTree = "<group>"; };
009B7E9C2B5BB392003BE217 /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; };
009B7E9E2B5BB39A003BE217 /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = System/Library/Frameworks/CoreImage.framework; sourceTree = SDKROOT; };
00EED04F2B5A4A2400637604 /* tdvideo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = tdvideo.app; sourceTree = BUILT_PRODUCTS_DIR; };
00EED0522B5A4A2400637604 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
00EED0542B5A4A2400637604 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = "<group>"; };
00EED0562B5A4A2400637604 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = "<group>"; };
00EED0592B5A4A2400637604 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
00EED05B2B5A4A2500637604 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
00EED05E2B5A4A2500637604 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
00EED0602B5A4A2500637604 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
00EED0662B5A4B1400637604 /* ImageProcessingShaders.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; path = ImageProcessingShaders.metal; sourceTree = "<group>"; };
00EED0672B5A4B1400637604 /* VideoFile.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoFile.swift; sourceTree = "<group>"; };
00EED06A2B5A4B1B00637604 /* VideoPreview.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoPreview.swift; sourceTree = "<group>"; };
00EED06C2B5A4B6400637604 /* MetalPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalPlayer.swift; sourceTree = "<group>"; };
00EED0712B5A4BD600637604 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
00EED0752B5A4CEB00637604 /* FrameProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FrameProcessor.swift; sourceTree = "<group>"; };
00EED0772B5A683200637604 /* tdvideo.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = tdvideo.entitlements; sourceTree = "<group>"; };
00EED0782B5A686500637604 /* SpatialVideoConverter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SpatialVideoConverter.swift; sourceTree = "<group>"; };
043A6AD42B81AF04003776D2 /* PlayContoller8.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayContoller8.swift; sourceTree = "<group>"; };
043A6AD62B81B301003776D2 /* VideoConvertor3.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoConvertor3.swift; sourceTree = "<group>"; };
043C63112B6B90E80095F268 /* PlayContoller4.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayContoller4.swift; sourceTree = "<group>"; };
043C63132B6B9CC90095F268 /* ExternalAccessory.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ExternalAccessory.framework; path = System/Library/Frameworks/ExternalAccessory.framework; sourceTree = SDKROOT; };
044447782B779B4200C7452B /* PlayContoller7.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayContoller7.swift; sourceTree = "<group>"; };
0444477A2B779C1B00C7452B /* b.HEIC */ = {isa = PBXFileReference; lastKnownFileType = file; path = b.HEIC; sourceTree = "<group>"; };
0444477B2B779C1B00C7452B /* a.HEIC */ = {isa = PBXFileReference; lastKnownFileType = file; path = a.HEIC; sourceTree = "<group>"; };
04880AF12B6F702A00FF9E59 /* ViewController2.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController2.swift; sourceTree = "<group>"; };
04880AF32B6F7A3B00FF9E59 /* PlayControllerVideo.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayControllerVideo.swift; sourceTree = "<group>"; };
049ABF9E2B861C450049A94B /* PlayContoller9.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayContoller9.swift; sourceTree = "<group>"; };
049EBD822B6C91C4005421C7 /* PlayContoller5.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayContoller5.swift; sourceTree = "<group>"; };
049EBD842B6CD4EA005421C7 /* ImageIO.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ImageIO.framework; path = System/Library/Frameworks/ImageIO.framework; sourceTree = SDKROOT; };
04A042532B6F9F4A00EA3EF9 /* PlayContoller6.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayContoller6.swift; sourceTree = "<group>"; };
04A281A72B6A24840067FB28 /* img1.HEIC */ = {isa = PBXFileReference; lastKnownFileType = file; path = img1.HEIC; sourceTree = "<group>"; };
04A281A92B6A2E730067FB28 /* img2.HEIC */ = {isa = PBXFileReference; lastKnownFileType = file; path = img2.HEIC; sourceTree = "<group>"; };
04A281AB2B6A38DA0067FB28 /* img3.HEIC */ = {isa = PBXFileReference; lastKnownFileType = file; path = img3.HEIC; sourceTree = "<group>"; };
04A4E0612B86FED3001894D2 /* PlayContoller10.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayContoller10.swift; sourceTree = "<group>"; };
04A4E0632B870160001894D2 /* VideoConvertor4.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoConvertor4.swift; sourceTree = "<group>"; };
04A4E0692B87465A001894D2 /* bb.MOV */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = bb.MOV; sourceTree = "<group>"; };
04A4E06A2B87465B001894D2 /* aa.MOV */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = aa.MOV; sourceTree = "<group>"; };
04A4E06D2B87483C001894D2 /* SpatialVideoWriter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpatialVideoWriter.swift; sourceTree = "<group>"; };
04A4E06F2B87697E001894D2 /* PlayContoller11.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayContoller11.swift; sourceTree = "<group>"; };
04A4E0712B87789C001894D2 /* CreateVideoByBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CreateVideoByBuffer.swift; sourceTree = "<group>"; };
04A5AE302B6B3234000D26EA /* img4.HEIC */ = {isa = PBXFileReference; lastKnownFileType = file; path = img4.HEIC; sourceTree = "<group>"; };
04A5AE322B6B45E8000D26EA /* Photos.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Photos.framework; path = System/Library/Frameworks/Photos.framework; sourceTree = SDKROOT; };
04A5AE342B6B479E000D26EA /* CCAddImage.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CCAddImage.swift; sourceTree = "<group>"; };
04A5AE362B6B47AB000D26EA /* PlayControllerImg.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayControllerImg.swift; sourceTree = "<group>"; };
04A5AE382B6B480C000D26EA /* MobileCoreServices.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MobileCoreServices.framework; path = System/Library/Frameworks/MobileCoreServices.framework; sourceTree = SDKROOT; };
04A5AE3A2B6B6AD6000D26EA /* PhotosUI.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = PhotosUI.framework; path = System/Library/Frameworks/PhotosUI.framework; sourceTree = SDKROOT; };
04A5AE3C2B6B757F000D26EA /* PlayController3.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayController3.swift; sourceTree = "<group>"; };
04BD5B3D2B81E21100DBBE08 /* VideoPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoPlayer.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
00EED04C2B5A4A2400637604 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
00EED0722B5A4BD600637604 /* AVFoundation.framework in Frameworks */,
04A5AE332B6B45E8000D26EA /* Photos.framework in Frameworks */,
049EBD852B6CD4EA005421C7 /* ImageIO.framework in Frameworks */,
009B7E9D2B5BB392003BE217 /* CoreVideo.framework in Frameworks */,
04A5AE392B6B480C000D26EA /* MobileCoreServices.framework in Frameworks */,
043C63142B6B9CC90095F268 /* ExternalAccessory.framework in Frameworks */,
009B7E9F2B5BB39A003BE217 /* CoreImage.framework in Frameworks */,
043C63152B6BA6650095F268 /* ExternalAccessory.framework in Frameworks */,
04A5AE3B2B6B6AD6000D26EA /* PhotosUI.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
00EED0462B5A4A2400637604 = {
isa = PBXGroup;
children = (
00EED0512B5A4A2400637604 /* tdvideo */,
00EED0502B5A4A2400637604 /* Products */,
00EED0702B5A4BD600637604 /* Frameworks */,
);
sourceTree = "<group>";
};
00EED0502B5A4A2400637604 /* Products */ = {
isa = PBXGroup;
children = (
00EED04F2B5A4A2400637604 /* tdvideo.app */,
);
name = Products;
sourceTree = "<group>";
};
00EED0512B5A4A2400637604 /* tdvideo */ = {
isa = PBXGroup;
children = (
04A4E06A2B87465B001894D2 /* aa.MOV */,
04A4E0692B87465A001894D2 /* bb.MOV */,
0444477B2B779C1B00C7452B /* a.HEIC */,
0444477A2B779C1B00C7452B /* b.HEIC */,
04A5AE302B6B3234000D26EA /* img4.HEIC */,
04A281AB2B6A38DA0067FB28 /* img3.HEIC */,
04A281A92B6A2E730067FB28 /* img2.HEIC */,
04A281A72B6A24840067FB28 /* img1.HEIC */,
007EFE012B60DA3600EFD078 /* IMG_0071.MOV */,
009B7E9A2B5BA788003BE217 /* VideoWriter.swift */,
00EED0782B5A686500637604 /* SpatialVideoConverter.swift */,
04A4E06D2B87483C001894D2 /* SpatialVideoWriter.swift */,
04A4E0712B87789C001894D2 /* CreateVideoByBuffer.swift */,
00EED0772B5A683200637604 /* tdvideo.entitlements */,
00EED0752B5A4CEB00637604 /* FrameProcessor.swift */,
00EED06A2B5A4B1B00637604 /* VideoPreview.swift */,
00EED0662B5A4B1400637604 /* ImageProcessingShaders.metal */,
00EED0672B5A4B1400637604 /* VideoFile.swift */,
00EED0522B5A4A2400637604 /* AppDelegate.swift */,
00EED0542B5A4A2400637604 /* SceneDelegate.swift */,
00EED0562B5A4A2400637604 /* ViewController.swift */,
1EFAF0BE2B8B72D4002A1773 /* 转码 */,
00EED06C2B5A4B6400637604 /* MetalPlayer.swift */,
007EFE032B60E00000EFD078 /* VideoConvertor.swift */,
007EFE072B60EB5900EFD078 /* VideoConvertor2.swift */,
043A6AD62B81B301003776D2 /* VideoConvertor3.swift */,
04A4E0632B870160001894D2 /* VideoConvertor4.swift */,
04880AF32B6F7A3B00FF9E59 /* PlayControllerVideo.swift */,
04A5AE3C2B6B757F000D26EA /* PlayController3.swift */,
043C63112B6B90E80095F268 /* PlayContoller4.swift */,
049EBD822B6C91C4005421C7 /* PlayContoller5.swift */,
04A042532B6F9F4A00EA3EF9 /* PlayContoller6.swift */,
044447782B779B4200C7452B /* PlayContoller7.swift */,
043A6AD42B81AF04003776D2 /* PlayContoller8.swift */,
049ABF9E2B861C450049A94B /* PlayContoller9.swift */,
04A4E0612B86FED3001894D2 /* PlayContoller10.swift */,
04A4E06F2B87697E001894D2 /* PlayContoller11.swift */,
04BD5B3D2B81E21100DBBE08 /* VideoPlayer.swift */,
04A5AE342B6B479E000D26EA /* CCAddImage.swift */,
00EED0582B5A4A2400637604 /* Main.storyboard */,
00EED05B2B5A4A2500637604 /* Assets.xcassets */,
00EED05D2B5A4A2500637604 /* LaunchScreen.storyboard */,
00EED0602B5A4A2500637604 /* Info.plist */,
);
path = tdvideo;
sourceTree = "<group>";
};
00EED0702B5A4BD600637604 /* Frameworks */ = {
isa = PBXGroup;
children = (
049EBD842B6CD4EA005421C7 /* ImageIO.framework */,
043C63132B6B9CC90095F268 /* ExternalAccessory.framework */,
04A5AE3A2B6B6AD6000D26EA /* PhotosUI.framework */,
04A5AE382B6B480C000D26EA /* MobileCoreServices.framework */,
04A5AE322B6B45E8000D26EA /* Photos.framework */,
009B7E9E2B5BB39A003BE217 /* CoreImage.framework */,
009B7E9C2B5BB392003BE217 /* CoreVideo.framework */,
00EED0712B5A4BD600637604 /* AVFoundation.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
1EFAF0BE2B8B72D4002A1773 /* 转码 */ = {
isa = PBXGroup;
children = (
04880AF12B6F702A00FF9E59 /* ViewController2.swift */,
009882712B6269B30076385E /* PlayController.swift */,
04A5AE362B6B47AB000D26EA /* PlayControllerImg.swift */,
);
path = "转码";
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
00EED04E2B5A4A2400637604 /* tdvideo */ = {
isa = PBXNativeTarget;
buildConfigurationList = 00EED0632B5A4A2500637604 /* Build configuration list for PBXNativeTarget "tdvideo" */;
buildPhases = (
00EED04B2B5A4A2400637604 /* Sources */,
00EED04C2B5A4A2400637604 /* Frameworks */,
00EED04D2B5A4A2400637604 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = tdvideo;
productName = tdvideo;
productReference = 00EED04F2B5A4A2400637604 /* tdvideo.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
00EED0472B5A4A2400637604 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 1520;
LastUpgradeCheck = 1520;
TargetAttributes = {
00EED04E2B5A4A2400637604 = {
CreatedOnToolsVersion = 15.2;
};
};
};
buildConfigurationList = 00EED04A2B5A4A2400637604 /* Build configuration list for PBXProject "tdvideo" */;
compatibilityVersion = "Xcode 14.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 00EED0462B5A4A2400637604;
productRefGroup = 00EED0502B5A4A2400637604 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
00EED04E2B5A4A2400637604 /* tdvideo */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
00EED04D2B5A4A2400637604 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
0444477D2B779C1B00C7452B /* a.HEIC in Resources */,
04A281AC2B6A38DA0067FB28 /* img3.HEIC in Resources */,
007EFE022B60DA3600EFD078 /* IMG_0071.MOV in Resources */,
04A281AA2B6A2E730067FB28 /* img2.HEIC in Resources */,
04A4E06C2B87465B001894D2 /* aa.MOV in Resources */,
04A281A82B6A24840067FB28 /* img1.HEIC in Resources */,
00EED05F2B5A4A2500637604 /* LaunchScreen.storyboard in Resources */,
00EED05C2B5A4A2500637604 /* Assets.xcassets in Resources */,
00EED05A2B5A4A2400637604 /* Main.storyboard in Resources */,
0444477C2B779C1B00C7452B /* b.HEIC in Resources */,
04A5AE312B6B3234000D26EA /* img4.HEIC in Resources */,
04A4E06B2B87465B001894D2 /* bb.MOV in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
00EED04B2B5A4A2400637604 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
04A042542B6F9F4A00EA3EF9 /* PlayContoller6.swift in Sources */,
009B7E9B2B5BA788003BE217 /* VideoWriter.swift in Sources */,
044447792B779B4200C7452B /* PlayContoller7.swift in Sources */,
00EED06D2B5A4B6400637604 /* MetalPlayer.swift in Sources */,
04A4E0722B87789C001894D2 /* CreateVideoByBuffer.swift in Sources */,
00EED06B2B5A4B1B00637604 /* VideoPreview.swift in Sources */,
043A6AD52B81AF04003776D2 /* PlayContoller8.swift in Sources */,
04A5AE372B6B47AB000D26EA /* PlayControllerImg.swift in Sources */,
049EBD832B6C91C4005421C7 /* PlayContoller5.swift in Sources */,
04880AF42B6F7A3B00FF9E59 /* PlayControllerVideo.swift in Sources */,
00EED0692B5A4B1400637604 /* VideoFile.swift in Sources */,
00EED0572B5A4A2400637604 /* ViewController.swift in Sources */,
04A5AE352B6B479E000D26EA /* CCAddImage.swift in Sources */,
00EED0762B5A4CEB00637604 /* FrameProcessor.swift in Sources */,
04A4E06E2B87483C001894D2 /* SpatialVideoWriter.swift in Sources */,
00EED0532B5A4A2400637604 /* AppDelegate.swift in Sources */,
049ABF9F2B861C450049A94B /* PlayContoller9.swift in Sources */,
04880AF22B6F702A00FF9E59 /* ViewController2.swift in Sources */,
00EED0792B5A686500637604 /* SpatialVideoConverter.swift in Sources */,
04BD5B3E2B81E21100DBBE08 /* VideoPlayer.swift in Sources */,
00EED0552B5A4A2400637604 /* SceneDelegate.swift in Sources */,
009882722B6269B30076385E /* PlayController.swift in Sources */,
007EFE042B60E00000EFD078 /* VideoConvertor.swift in Sources */,
04A4E0642B870160001894D2 /* VideoConvertor4.swift in Sources */,
04A5AE3D2B6B757F000D26EA /* PlayController3.swift in Sources */,
043A6AD72B81B301003776D2 /* VideoConvertor3.swift in Sources */,
00EED0682B5A4B1400637604 /* ImageProcessingShaders.metal in Sources */,
04A4E0702B87697E001894D2 /* PlayContoller11.swift in Sources */,
043C63122B6B90E80095F268 /* PlayContoller4.swift in Sources */,
007EFE082B60EB5900EFD078 /* VideoConvertor2.swift in Sources */,
04A4E0622B86FED3001894D2 /* PlayContoller10.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
00EED0582B5A4A2400637604 /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
00EED0592B5A4A2400637604 /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
00EED05D2B5A4A2500637604 /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
00EED05E2B5A4A2500637604 /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
00EED0612B5A4A2500637604 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 17.2;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
00EED0622B5A4A2500637604 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 17.2;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
00EED0642B5A4A2500637604 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = tdvideo/tdvideo.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 2RAN5PZH5L;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = tdvideo/Info.plist;
INFOPLIST_KEY_NSCameraUsageDescription = "我们需要访问您的摄像头以拍摄照片和录制视频";
INFOPLIST_KEY_NSMicrophoneUsageDescription = "我们需要访问您的麦克风以录制视频的音频";
INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "我们需要保存视频到您的相册以便您后续查看";
INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "我们需要访问您的照片库以保存照片和视频";
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen;
INFOPLIST_KEY_UIMainStoryboardFile = Main;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.nks.vptesst.sdk;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = 1;
};
name = Debug;
};
00EED0652B5A4A2500637604 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = tdvideo/tdvideo.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 2RAN5PZH5L;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = tdvideo/Info.plist;
INFOPLIST_KEY_NSCameraUsageDescription = "我们需要访问您的摄像头以拍摄照片和录制视频";
INFOPLIST_KEY_NSMicrophoneUsageDescription = "我们需要访问您的麦克风以录制视频的音频";
INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "我们需要保存视频到您的相册以便您后续查看";
INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "我们需要访问您的照片库以保存照片和视频";
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen;
INFOPLIST_KEY_UIMainStoryboardFile = Main;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.nks.vptesst.sdk;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = 1;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
00EED04A2B5A4A2400637604 /* Build configuration list for PBXProject "tdvideo" */ = {
isa = XCConfigurationList;
buildConfigurations = (
00EED0612B5A4A2500637604 /* Debug */,
00EED0622B5A4A2500637604 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
00EED0632B5A4A2500637604 /* Build configuration list for PBXNativeTarget "tdvideo" */ = {
isa = XCConfigurationList;
buildConfigurations = (
00EED0642B5A4A2500637604 /* Debug */,
00EED0652B5A4A2500637604 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 00EED0472B5A4A2400637604 /* Project object */;
}

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "17709289-9BA9-4062-BBD8-F03238F8D2B8"
type = "1"
version = "2.0">
</Bucket>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>tdvideo.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

View File

@ -0,0 +1,227 @@
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "95D5A999-D1A8-46CB-B6A5-EF090B523F87"
type = "1"
version = "2.0">
<Breakpoints>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "23D5B38D-0F04-46A0-9201-80308BCBB496"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "tdvideo/&#x8f6c;&#x7801;/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "258"
endingLineNumber = "258"
landmarkName = "imagePickerController(_:didFinishPickingMediaWithInfo:)"
landmarkType = "7">
<Locations>
<Location
uuid = "23D5B38D-0F04-46A0-9201-80308BCBB496 - 6a9a0728fabb40a"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.imagePickerController(_: __C.UIImagePickerController, didFinishPickingMediaWithInfo: Swift.Dictionary&lt;__C.UIImagePickerControllerInfoKey, Any&gt;) -&gt; ()"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "254"
endingLineNumber = "254"
offsetFromSymbolStart = "364">
</Location>
<Location
uuid = "23D5B38D-0F04-46A0-9201-80308BCBB496 - 6a9a0728fabb40a"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.imagePickerController(_: __C.UIImagePickerController, didFinishPickingMediaWithInfo: Swift.Dictionary&lt;__C.UIImagePickerControllerInfoKey, Any&gt;) -&gt; ()"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "254"
endingLineNumber = "254"
offsetFromSymbolStart = "748">
</Location>
<Location
uuid = "23D5B38D-0F04-46A0-9201-80308BCBB496 - 6a9a0728fabb40a"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.imagePickerController(_: __C.UIImagePickerController, didFinishPickingMediaWithInfo: Swift.Dictionary&lt;__C.UIImagePickerControllerInfoKey, Any&gt;) -&gt; ()"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "254"
endingLineNumber = "254"
offsetFromSymbolStart = "189">
</Location>
<Location
uuid = "23D5B38D-0F04-46A0-9201-80308BCBB496 - 6a9a0728fabb40a"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.imagePickerController(_: __C.UIImagePickerController, didFinishPickingMediaWithInfo: Swift.Dictionary&lt;__C.UIImagePickerControllerInfoKey, Any&gt;) -&gt; ()"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "254"
endingLineNumber = "254"
offsetFromSymbolStart = "568">
</Location>
<Location
uuid = "23D5B38D-0F04-46A0-9201-80308BCBB496 - 6a9a0728fabb48e"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.imagePickerController(_: __C.UIImagePickerController, didFinishPickingMediaWithInfo: Swift.Dictionary&lt;__C.UIImagePickerControllerInfoKey, Any&gt;) -&gt; ()"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "258"
endingLineNumber = "258"
offsetFromSymbolStart = "189">
</Location>
<Location
uuid = "23D5B38D-0F04-46A0-9201-80308BCBB496 - 6a9a0728fabb48e"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.imagePickerController(_: __C.UIImagePickerController, didFinishPickingMediaWithInfo: Swift.Dictionary&lt;__C.UIImagePickerControllerInfoKey, Any&gt;) -&gt; ()"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "258"
endingLineNumber = "258"
offsetFromSymbolStart = "568">
</Location>
</Locations>
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "FDC050D9-64C1-4077-8420-007FABF23815"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "tdvideo/PlayContoller9.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "164"
endingLineNumber = "164"
landmarkName = "toggleRecording(_:)"
landmarkType = "7">
<Locations>
<Location
uuid = "FDC050D9-64C1-4077-8420-007FABF23815 - 37d512f3c396be21"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.PlayContoller9.toggleRecording(__C.UIButton) -&gt; ()"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/PlayContoller9.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "164"
endingLineNumber = "164"
offsetFromSymbolStart = "40">
</Location>
<Location
uuid = "FDC050D9-64C1-4077-8420-007FABF23815 - 37d512f3c396be21"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.PlayContoller9.toggleRecording(__C.UIButton) -&gt; ()"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/PlayContoller9.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "164"
endingLineNumber = "164"
offsetFromSymbolStart = "62">
</Location>
</Locations>
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "63C812B1-741C-4CBF-A151-3D25760654E9"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "tdvideo/&#x8f6c;&#x7801;/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "22"
endingLineNumber = "22"
landmarkName = "ViewController2"
landmarkType = "3">
<Locations>
<Location
uuid = "63C812B1-741C-4CBF-A151-3D25760654E9 - 4f49b5d7a28b2ba4"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.convertor2.getter : tdvideo.VideoConvertor2"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "0"
endingLineNumber = "0"
offsetFromSymbolStart = "16">
</Location>
<Location
uuid = "63C812B1-741C-4CBF-A151-3D25760654E9 - 3568a32d7e7a84cb"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.init(nibName: Swift.Optional&lt;Swift.String&gt;, bundle: Swift.Optional&lt;__C.NSBundle&gt;) -&gt; tdvideo.ViewController2"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "23"
endingLineNumber = "23"
offsetFromSymbolStart = "207">
</Location>
<Location
uuid = "63C812B1-741C-4CBF-A151-3D25760654E9 - e8b187231d72cae1"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "tdvideo.ViewController2.init(coder: __C.NSCoder) -&gt; Swift.Optional&lt;tdvideo.ViewController2&gt;"
moduleName = "tdvideo"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/i308051/Desktop/Demo/VPro/tdvideo/tdvideo/%E8%BD%AC%E7%A0%81/ViewController2.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "23"
endingLineNumber = "23"
offsetFromSymbolStart = "152">
</Location>
</Locations>
</BreakpointContent>
</BreakpointProxy>
</Breakpoints>
</Bucket>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>tdvideo.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "2FA07A39-337B-40A0-8987-DB8E06B6150A"
type = "1"
version = "2.0">
</Bucket>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>tdvideo.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>tdvideo.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

View File

@ -0,0 +1,53 @@
//
// AppDelegate.swift
// tdvideo
//
// Created by aaa on 2024/1/19.
//
/*
https://www.figma.com/file/HtnafU9zeXR8qeW9CAIoIO/VR?type=design&node-id=379%3A963&mode=design&t=1Uxec3RbjuOvNuUK-1
com.peuid.snsdev
com.cjsztea.wanchuan
com.nks.vptest
*/
import UIKit
@main
class AppDelegate: UIResponder, UIApplicationDelegate {
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
let window = UIWindow(frame: UIScreen.main.bounds)
window.backgroundColor = UIColor.white
let vc = ViewController()
let navigationController = UINavigationController(rootViewController: vc)
let tab = UITabBarController.init()
tab.viewControllers = [vc]
window.rootViewController = tab
window.makeKeyAndVisible()
return true
}
// MARK: UISceneSession Lifecycle
func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration {
// Called when a new scene session is being created.
// Use this method to select a configuration to create the new scene with.
return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role)
}
func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set<UISceneSession>) {
// Called when the user discards a scene session.
// If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions.
// Use this method to release any resources that were specific to the discarded scenes, as they will not return.
}
}

View File

@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,13 @@
{
"images" : [
{
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" xcode11CocoaTouchSystemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" xcode11CocoaTouchSystemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,263 @@
//
// CCAddImage.swift
// SwiftProject
//
// Created by soldoros on 2024/1/10.
//
import UIKit
import Photos
import MobileCoreServices
/// 访
///
/// - formIpc: 访
/// - gallery: 访
/// - camer: 访
enum ImagePickerWayStyle {
case formIpc
case gallery
case camer
}
///
///
/// - image:
/// - video:
/// - all:
enum ImagePickerModelType {
case image
case video
case all
}
///
typealias AddImagePicekerBlock = (_ style:ImagePickerWayStyle,_ type:ImagePickerModelType,_ datas:Any) -> ()
class CCAddImage: NSObject ,UIImagePickerControllerDelegate,UINavigationControllerDelegate{
//访
var _wayStyle:ImagePickerWayStyle?
//
var _modelType:ImagePickerModelType?
//
var _controller:UIViewController?
//
var _imagePickerController:UIImagePickerController?
//block
var _pickerBlock:AddImagePicekerBlock?
/// Alert ( )
///
/// - Parameters:
/// - controller:
/// - modelType:
/// - pickerBlock:
public func getImagePicker(controller:UIViewController,modelType:ImagePickerModelType,pickerBlock:@escaping AddImagePicekerBlock){
// let alerts:NSArray = [[SSPickerWayGallery:""],[SSPickerWayCamer:""]]
// self .getImagePicker(controller: controller, alerts: alerts, modelType: modelType, pickerBlock: pickerBlock)
_controller = controller
_modelType = modelType
_pickerBlock = pickerBlock
self._wayStyle = ImagePickerWayStyle.camer
self.addImagePickerFromCamer(modelType: modelType)
}
// /// Alert ( ...)
// ///
// /// - Parameters:
// /// - controller:
// /// - alerts: Alert
// /// - modelType:
// /// - pickerBlock:
// public func getImagePicker(controller:UIViewController,alerts:NSArray,modelType:SSImagePickerModelType,pickerBlock:@escaping SSAddImagePicekerBlock){
//
// _controller = controller
// _modelType = modelType
// _pickerBlock = pickerBlock
//
// let alertController = UIAlertController.init(title: nil, message: nil, preferredStyle: UIAlertController.Style.actionSheet)
//
// for i:NSInteger in 0...alerts.count-1{
//
// let wayDic:NSDictionary = alerts[i] as! NSDictionary
// let wayKey:NSString = wayDic.allKeys[0] as! NSString
// let wayTitle:NSString = wayDic.value(forKey: wayKey as String) as! NSString
//
// let action = UIAlertAction.init(title: wayTitle as String, style: UIAlertAction.Style.default) { (UIAlertAction) in
//
// if(wayKey as String == SSPickerWayFormIpc){
// self._wayStyle = SSImagePickerWayStyle.SSImagePickerWayGallery
// self.addImagePickerFromIpc(modelType: modelType)
//
// }
// else if(wayKey as String == SSPickerWayGallery){
// self._wayStyle = SSImagePickerWayStyle.SSImagePickerWayGallery
// self.addImagePickerFromIpc(modelType: modelType)
// }
// else{
// self._wayStyle = SSImagePickerWayStyle.SSImagePickerWayCamer
// self.addImagePickerFromCamer(modelType: modelType)
// }
// }
//
// alertController.addAction(action)
// }
//
// alertController.addAction(UIAlertAction.init(title: "", style: UIAlertAction.Style.cancel, handler: nil))
// _controller?.present(alertController, animated: true, completion: nil)
//
// }
//
func addImagePickerFromCamer(modelType:ImagePickerModelType){
if(self.isCameraAvailable() == false){
print("没有摄像头")
return
}
_imagePickerController = UIImagePickerController()
_imagePickerController?.delegate = self
_imagePickerController!.modalTransitionStyle = UIModalTransitionStyle.flipHorizontal
//
_imagePickerController!.sourceType = UIImagePickerController.SourceType.camera;
//
_imagePickerController!.videoQuality = UIImagePickerController.QualityType.typeHigh;
//
_imagePickerController!.allowsEditing = true;
//
if(modelType == ImagePickerModelType.image){
_imagePickerController?.mediaTypes = [kUTTypeImage as String]
}
//
else if(modelType == ImagePickerModelType.video){
_imagePickerController?.mediaTypes = [kUTTypeMovie as String]
}
//
else{
_imagePickerController?.mediaTypes = [kUTTypeImage as String,kUTTypeMovie as String]
}
_imagePickerController?.modalPresentationStyle = UIModalPresentationStyle.overFullScreen
_controller?.present(_imagePickerController!, animated: true, completion: nil)
}
//
func addImagePickerFromIpc(modelType:ImagePickerModelType){
if(self.isPhotoLibraryAvailable() == false){
print("相册不可用")
return
}
_imagePickerController = UIImagePickerController()
_imagePickerController?.delegate = self
_imagePickerController!.modalTransitionStyle = UIModalTransitionStyle.flipHorizontal
_imagePickerController?.allowsEditing = true
//访
if(_wayStyle == ImagePickerWayStyle.formIpc){
_imagePickerController!.sourceType = UIImagePickerController.SourceType.photoLibrary;
}
//访
else{
_imagePickerController!.sourceType = UIImagePickerController.SourceType.savedPhotosAlbum;
}
//
if(modelType == ImagePickerModelType.image){
_imagePickerController?.mediaTypes = [kUTTypeImage as String]
}
//
else if(modelType == ImagePickerModelType.video){
_imagePickerController?.mediaTypes = [kUTTypeMovie as String]
}
//
else{
_imagePickerController?.mediaTypes = [kUTTypeImage as String,kUTTypeMovie as String]
}
_imagePickerController?.modalPresentationStyle = UIModalPresentationStyle.overFullScreen
_controller?.present(_imagePickerController!, animated: true, completion: nil)
}
//访
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
let mediaType:NSString = info[UIImagePickerController.InfoKey.mediaType] as! NSString
//
if(mediaType as String == kUTTypeImage as String){
_modelType = ImagePickerModelType.image
self.saveImageAndUpdataHeader(image: info[UIImagePickerController.InfoKey.editedImage] as! UIImage)
}
//
else if(mediaType as String == kUTTypeMovie as String){
_modelType = ImagePickerModelType.video
let url:NSURL = info[UIImagePickerController.InfoKey.mediaURL] as! NSURL
let urlStr:NSString = url.path! as NSString
//簿
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(urlStr as String)){
UISaveVideoAtPathToSavedPhotosAlbum(urlStr as String, self, #selector(didFinishSaveVideo(videoPath:)), nil)
}
}
picker.dismiss(animated: false, completion: nil)
}
//
func saveImageAndUpdataHeader(image:UIImage){
if(_pickerBlock != nil){
_pickerBlock!(_wayStyle!,_modelType!,image)
}
else{
_pickerBlock = nil
}
}
//
@objc func didFinishSaveVideo(videoPath:NSString){
print("视频保存成功")
if(_pickerBlock != nil){
_pickerBlock!(_wayStyle!,_modelType!,videoPath)
}
else{
_pickerBlock = nil
}
}
//
func isCameraAvailable()->Bool{
return UIImagePickerController.isSourceTypeAvailable(UIImagePickerController.SourceType.camera)
}
//
func isPhotoLibraryAvailable()->Bool{
return UIImagePickerController.isSourceTypeAvailable(UIImagePickerController.SourceType.photoLibrary)
}
}

View File

@ -0,0 +1,140 @@
//
// CreateVideoByBuffer.swift
// tdvideo
//
// Created by mac on 2024/2/22.
//
import UIKit
import AVFoundation
import Photos
class CreateVideoByBuffer: NSObject {
private var videoWriter: AVAssetWriter?
private var videoWriterInput: AVAssetWriterInput?
private var adaptor: AVAssetWriterInputPixelBufferAdaptor?
private var outputURL: URL?
private var isWriting: Bool = false
init(outputURL: URL) {
super.init()
self.outputURL = outputURL
setupWriter()
}
private func setupWriter() {
guard let outputURL = outputURL else {
print("Output URL is nil")
return
}
let width = 1920 // Example width
let height = 1080 // Example height
//
let outputSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: width,
AVVideoHeightKey: height
]
//
do {
videoWriter = try AVAssetWriter(outputURL: outputURL, fileType: .mov)
} catch {
print("Error creating AVAssetWriter: \(error)")
return
}
//
videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
//
guard let videoWriterInput = videoWriterInput else {
print("Failed to create video writer input")
return
}
//
guard let videoWriter = videoWriter, videoWriter.canAdd(videoWriterInput) else {
print("Cannot add video writer input to video writer")
return
}
videoWriter.add(videoWriterInput)
//
adaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: nil)
//
guard videoWriter.startWriting() else {
print("Failed to start writing")
return
}
videoWriter.startSession(atSourceTime: .zero)
}
func createVideo(from imageBuffer: CVImageBuffer, presentationTime: CMTime) {
guard isWriting else {
print("Video writer is not initialized or already writing")
return
}
//
let pixelBuffer = imageBuffer
guard let adaptor = adaptor else {
print("Failed to get pixel buffer adaptor")
return
}
guard let videoWriterInput = videoWriterInput else {
print("Failed to get video writer input")
return
}
guard adaptor.append(pixelBuffer, withPresentationTime: presentationTime) else {
print("Failed to append pixel buffer")
return
}
}
func videoCreationFinished(error: Error?) {
//
isWriting = false
//
guard let videoWriter = videoWriter, let videoWriterInput = videoWriterInput else {
print("Video writer or writer input is nil")
return
}
videoWriterInput.markAsFinished()
videoWriter.finishWriting { [weak self] in
guard let self = self else { return }
if let error = videoWriter.error {
print("Error finishing AVAssetWriter: \(error)")
} else {
print("Video creation finished at \(self.outputURL!)")
//
saveVideoToAlbum(url: outputURL!)
}
}
}
func saveVideoToAlbum(url: URL) {
PHPhotoLibrary.shared().performChanges {
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url)
} completionHandler: { success, error in
if success {
print("Video saved to album")
} else {
print("Failed to save video to album: \(error?.localizedDescription ?? "Unknown error")")
}
}
}
}

View File

@ -0,0 +1,233 @@
//
// FrameProcessor.swift
// SpatialVideoGist
//
//
// Created by Bryan on 12/15/23.
//
import AVFoundation
import CoreImage
///
final class FrameProcessor {
// MARK: - Properties
// MARK: Public
///
var isPrepared = false
// MARK: Private
/// CIContext
private var ciContext: CIContext?
///
private var outputColorSpace: CGColorSpace?
/// ' CVPixelBufferPool '
private var outputPixelBufferPool: CVPixelBufferPool?
///' CMFormatDescription '
private(set) var inputFormatDescription: CMFormatDescription?
///' CMFormatDescription '
private(set) var outputFormatDescription: CMFormatDescription?
///GPU
private let metalDevice = MTLCreateSystemDefaultDevice()!
///' CVMetalTexture '
private var textureCache: CVMetalTextureCache!
// MARK: - Methods
///
/// -:
/// - formatDescription:' CMFormatDescription '
/// - outputRetainedBufferCountHint:
func prepare(
with formatDescription: CMFormatDescription,
outputRetainedBufferCountHint: Int
) {
reset()
(outputPixelBufferPool,
outputColorSpace,
outputFormatDescription) = allocateOutputBufferPool(
with: formatDescription,
outputRetainedBufferCountHint: outputRetainedBufferCountHint
)
if outputPixelBufferPool == nil {
return
}
inputFormatDescription = formatDescription
ciContext = CIContext()
var metalTextureCache: CVMetalTextureCache?
if CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, metalDevice, nil, &metalTextureCache) != kCVReturnSuccess {
assertionFailure("Unable to allocate texture cache")
} else {
textureCache = metalTextureCache
}
isPrepared = true
}
///' CIImage '' CGRect '' CVPixelBuffer '
/// -:
/// - pixelBufferImage:' CIImage '
/// - targetRect:CGRect
/// -:' CVPixelBuffer '
func cropPixelBuffer(
pixelBufferImage: CIImage,
targetRect: CGRect
) -> CVPixelBuffer? {
guard let ciContext = ciContext,
isPrepared
else {
isPrepared = false
return nil
}
var croppedImage = pixelBufferImage.cropped(to: targetRect)
let originTransform = CGAffineTransform(
translationX: -croppedImage.extent.origin.x,
y: -croppedImage.extent.origin.y
)
croppedImage = croppedImage.transformed(by: originTransform)
var pbuf: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
guard let outputPixelBuffer = pbuf else {
print("Allocation failure")
return nil
}
//(CIContext)
ciContext.render(
croppedImage,
to: outputPixelBuffer,
bounds: croppedImage.extent,
colorSpace: outputColorSpace
)
return outputPixelBuffer
}
// MARK: - Private
///
private func reset() {
ciContext = nil
outputColorSpace = nil
outputPixelBufferPool = nil
outputFormatDescription = nil
inputFormatDescription = nil
textureCache = nil
isPrepared = false
}
}
///Helper
private extension FrameProcessor {
///
/// -:
/// - inputFormatDescription:' CMFormatDescription '
/// - outputRetainedBufferCountHint:
/// -:
private func allocateOutputBufferPool(
with inputFormatDescription: CMFormatDescription,
outputRetainedBufferCountHint: Int
) ->(
outputBufferPool: CVPixelBufferPool?,
outputColorSpace: CGColorSpace?,
outputFormatDescription: CMFormatDescription?) {
let inputDimensions = CMVideoFormatDescriptionGetDimensions(inputFormatDescription)
var pixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: UInt(kCVPixelFormatType_32BGRA),
kCVPixelBufferWidthKey as String: Int(inputDimensions.width / 2),
kCVPixelBufferHeightKey as String: Int(inputDimensions.height),
kCVPixelBufferIOSurfacePropertiesKey as String: [:]
]
//
var cgColorSpace = CGColorSpaceCreateDeviceRGB()
if let inputFormatDescriptionExtension = CMFormatDescriptionGetExtensions(inputFormatDescription) as Dictionary? {
let colorPrimaries = inputFormatDescriptionExtension[kCVImageBufferColorPrimariesKey]
if let colorPrimaries = colorPrimaries {
var colorSpaceProperties: [String: AnyObject] = [kCVImageBufferColorPrimariesKey as String: colorPrimaries]
if let yCbCrMatrix = inputFormatDescriptionExtension[kCVImageBufferYCbCrMatrixKey] {
colorSpaceProperties[kCVImageBufferYCbCrMatrixKey as String] = yCbCrMatrix
}
if let transferFunction = inputFormatDescriptionExtension[kCVImageBufferTransferFunctionKey] {
colorSpaceProperties[kCVImageBufferTransferFunctionKey as String] = transferFunction
}
pixelBufferAttributes[kCVBufferPropagatedAttachmentsKey as String] = colorSpaceProperties
}
if let cvColorspace = inputFormatDescriptionExtension[kCVImageBufferCGColorSpaceKey] {
cgColorSpace = cvColorspace as! CGColorSpace
} else if (colorPrimaries as? String) == (kCVImageBufferColorPrimaries_P3_D65 as String) {
cgColorSpace = CGColorSpace(name: CGColorSpace.displayP3)!
}
}
//
let poolAttributes = [kCVPixelBufferPoolMinimumBufferCountKey as String: outputRetainedBufferCountHint]
var cvPixelBufferPool: CVPixelBufferPool?
CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as NSDictionary?, pixelBufferAttributes as NSDictionary?, &cvPixelBufferPool)
guard let pixelBufferPool = cvPixelBufferPool else {
assertionFailure("Allocation failure: Could not allocate pixel buffer pool.")
return (nil, nil, nil)
}
preallocateBuffers(pool: pixelBufferPool, allocationThreshold: outputRetainedBufferCountHint)
//
var pixelBuffer: CVPixelBuffer?
var outputFormatDescription: CMFormatDescription?
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: outputRetainedBufferCountHint] as NSDictionary
CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pixelBufferPool, auxAttributes, &pixelBuffer)
if let pixelBuffer = pixelBuffer {
CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: pixelBuffer,
formatDescriptionOut: &outputFormatDescription)
}
pixelBuffer = nil
return (pixelBufferPool, cgColorSpace, outputFormatDescription)
}
///
/// -:
/// -:' CVPixelBufferPool '
/// - allocationThreshold:
private func preallocateBuffers(
pool: CVPixelBufferPool,
allocationThreshold: Int
) {
var pixelBuffers = [CVPixelBuffer]()
var error: CVReturn = kCVReturnSuccess
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary
var pixelBuffer: CVPixelBuffer?
while error == kCVReturnSuccess {
error = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer)
if let pixelBuffer = pixelBuffer {
pixelBuffers.append(pixelBuffer)
}
pixelBuffer = nil
}
pixelBuffers.removeAll()
}
}

View File

@ -0,0 +1,26 @@
//
// VideoHelpers.metal
// SpatialVideoGist
//
// Created by Bryan on 12/18/23.
//
#include <metal_stdlib>
using namespace metal;
kernel void sideBySideEffect(
texture2d<float, access::read> inputTextureA [[texture(0)]],
texture2d<float, access::read> inputTextureB [[texture(1)]],
texture2d<float, access::write> outputTexture [[texture(2)]],
uint2 gid [[thread_position_in_grid]]
) {
uint outputWidth = inputTextureA.get_width();
float4 inputColorLeft = inputTextureA.read(gid);
float4 inputColorRight = inputTextureB.read(gid);
outputTexture.write(inputColorLeft, gid);
uint2 gidB = uint2(gid.x + outputWidth, gid.y);
outputTexture.write(inputColorRight, gidB);
}

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSCameraUseContinuityCameraDeviceType</key>
<string>连续拍摄</string>
<key>UIApplicationSceneManifest</key>
<dict>
<key>UIApplicationSupportsMultipleScenes</key>
<false/>
<key>UISceneConfigurations</key>
<dict>
<key>UIWindowSceneSessionRoleApplication</key>
<array>
<dict>
<key>UISceneConfigurationName</key>
<string>Default Configuration</string>
<key>UISceneDelegateClassName</key>
<string>$(PRODUCT_MODULE_NAME).SceneDelegate</string>
<key>UISceneStoryboardFile</key>
<string>Main</string>
</dict>
</array>
</dict>
</dict>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
</array>
</dict>
</plist>

View File

@ -0,0 +1,345 @@
//
// MetalPlayer.swift
// tdvideo
//
// Created by aaa on 2024/1/19.
//
import AVFoundation
import MetalKit
import SwiftUI
/// Metal
class MetalPlayer: MTKView {
// MARK: - Properties
// MARK: Private
///
private let colorSpace = CGColorSpaceCreateDeviceRGB()
///
private lazy var commandQueue: MTLCommandQueue? = {
return self.device!.makeCommandQueue()
}()
/// CIContext
private lazy var context: CIContext = {
return CIContext(
mtlDevice: self.device!,
options: [CIContextOption.workingColorSpace : NSNull()]
)
}()
/// 使
private let metalDevice = MTLCreateSystemDefaultDevice()!
///
private var textureCache: CVMetalTextureCache?
/// ' CVPixelBuffer '
private var outputPixelBufferPool: CVPixelBufferPool?
///
private var computePipelineState: MTLComputePipelineState?
///
private var image: CIImage? {
didSet {
draw()
}
}
// MARK: - Methods
// MARK: Public
///使
/// -frameect:
init(frame frameRect: CGRect) {
super.init(
frame: frameRect,
device: metalDevice
)
setup(frameSize: frameRect.size)
}
///
/// -aDecoder:NSCoder
required init(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
device = MTLCreateSystemDefaultDevice()
setup(frameSize: .zero)
}
// MARK: Private
///
/// -frameSize:
private func setup(frameSize: CGSize) {
framebufferOnly = false
enableSetNeedsDisplay = false
guard let defaultLibrary = metalDevice.makeDefaultLibrary() else {
assertionFailure("Could not create default Metal device.")
return
}
let kernelFunction = defaultLibrary.makeFunction(name: "sideBySideEffect")
do {
computePipelineState = try metalDevice.makeComputePipelineState(function: kernelFunction!)
} catch {
print("Could not create pipeline state: \(error)")
}
setupCache(
outputRetainedBufferCountHint: 5,
frameSize: frameSize
)
}
///
/// -:
/// - outputRetainedBufferCountHint:
/// - frameSize:
private func setupCache(
outputRetainedBufferCountHint: Int,
frameSize: CGSize
) {
reset()
let outputSize = CGSize(
width: frameSize.width,
height: frameSize.height
)
guard let outputPixelBufferPool = createBufferPool(size: outputSize) else {return}
self.outputPixelBufferPool = outputPixelBufferPool
var metalTextureCache: CVMetalTextureCache?
if CVMetalTextureCacheCreate(
kCFAllocatorDefault,
nil,
metalDevice,
nil,
&metalTextureCache
) != kCVReturnSuccess {
assertionFailure("Unable to allocate texture cache")
} else {
textureCache = metalTextureCache
}
}
///便
func reset() {
outputPixelBufferPool = nil
textureCache = nil
}
///' CVPixelBuffer '
/// -:
/// - leftPixelBuffer:
/// - rightPixelBuffer:
func render(
leftPixelBuffer: CVPixelBuffer,
rightPixelBuffer: CVPixelBuffer
) {
var outputPixelBuffer: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
outputPixelBufferPool!,
&outputPixelBuffer
)
guard let outputBuffer = outputPixelBuffer else {
print("Allocation failure: Could not get pixel buffer from pool. (\(self.description))")
return
}
guard
let leftInputTexture = makeTextureFromCVPixelBuffer(
pixelBuffer: leftPixelBuffer,
textureFormat: .bgra8Unorm
),
let rightInputTexture = makeTextureFromCVPixelBuffer(
pixelBuffer: rightPixelBuffer,
textureFormat: .bgra8Unorm
),
let outputTexture = makeTextureFromCVPixelBuffer(
pixelBuffer: outputBuffer,
textureFormat: .bgra8Unorm
)
else { return }
//
guard let commandQueue = commandQueue,
let commandBuffer = commandQueue.makeCommandBuffer(),
let commandEncoder = commandBuffer.makeComputeCommandEncoder() else {
print("Failed to create a Metal command queue.")
CVMetalTextureCacheFlush(textureCache!, 0)
return
}
commandEncoder.label = "BlendGPU"
commandEncoder.setComputePipelineState(computePipelineState!)
commandEncoder.setTexture(leftInputTexture, index: 0)
commandEncoder.setTexture(rightInputTexture, index: 1)
commandEncoder.setTexture(outputTexture, index: 2)
//线
let width = computePipelineState!.threadExecutionWidth
let height = computePipelineState!.maxTotalThreadsPerThreadgroup / width
let threadsPerThreadgroup = MTLSizeMake(width, height, 1)
let threadgroupsPerGrid = MTLSize(width: (leftInputTexture.width + width - 1) / width,
height: (leftInputTexture.height + height - 1) / height,
depth: 1)
commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup)
commandEncoder.endEncoding()
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
guard let outputPixelBuffer else { return }
self.image = CIImage(cvPixelBuffer: outputPixelBuffer)
}
///' CVPixelBuffer '
/// -:
/// - leftPixelBuffer:
/// - rightPixelBuffer:
func render1(
pixelBuffer: CVPixelBuffer
) {
var outputPixelBuffer: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
outputPixelBufferPool!,
&outputPixelBuffer
)
guard let outputBuffer = outputPixelBuffer else {
print("Allocation failure: Could not get pixel buffer from pool. (\(self.description))")
return
}
guard
let inputTexture = makeTextureFromCVPixelBuffer(
pixelBuffer: pixelBuffer,
textureFormat: .bgra8Unorm
),
let outputTexture = makeTextureFromCVPixelBuffer(
pixelBuffer: outputBuffer,
textureFormat: .bgra8Unorm
)
else { return }
//
guard let commandQueue = commandQueue,
let commandBuffer = commandQueue.makeCommandBuffer(),
let commandEncoder = commandBuffer.makeComputeCommandEncoder() else {
print("Failed to create a Metal command queue.")
CVMetalTextureCacheFlush(textureCache!, 0)
return
}
commandEncoder.label = "BlendGPU"
commandEncoder.setComputePipelineState(computePipelineState!)
commandEncoder.setTexture(inputTexture, index: 0)
commandEncoder.setTexture(outputTexture, index: 2)
//线
let width = computePipelineState!.threadExecutionWidth
let height = computePipelineState!.maxTotalThreadsPerThreadgroup / width
let threadsPerThreadgroup = MTLSizeMake(width, height, 1)
let threadgroupsPerGrid = MTLSize(width: inputTexture.width,
height: inputTexture.height,
depth: 1)
commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup)
commandEncoder.endEncoding()
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
guard let outputPixelBuffer else { return }
self.image = CIImage(cvPixelBuffer: outputPixelBuffer)
}
///' image '
/// -rect:
override func draw(_ rect: CGRect) {
guard let image = image,
let currentDrawable = currentDrawable,
let commandBuffer = commandQueue?.makeCommandBuffer()
else {
return
}
let currentTexture = currentDrawable.texture
let drawingBounds = CGRect(origin: .zero, size: drawableSize)
let scaleX = drawableSize.width / image.extent.width
let scaleY = drawableSize.height / image.extent.height
let scaledImage = image.transformed(by: CGAffineTransform(scaleX: scaleX, y: scaleY))
context.render(scaledImage, to: currentTexture, commandBuffer: commandBuffer, bounds: drawingBounds, colorSpace: colorSpace)
commandBuffer.present(currentDrawable)
commandBuffer.commit()
}
///' CVPixelBuffer '' MTLTexture '
/// -:
/// - pixelBuffer:
/// - textureFormat:
/// -:' MTLTexture '
private func makeTextureFromCVPixelBuffer(
pixelBuffer: CVPixelBuffer,
textureFormat: MTLPixelFormat
) -> MTLTexture? {
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
guard let textureCache else { return nil }
//
var cvTextureOut: CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
textureCache,
pixelBuffer,
nil,
textureFormat,
width,
height,
0,
&cvTextureOut
)
guard let cvTextureOut,
let texture = CVMetalTextureGetTexture(cvTextureOut)
else {
CVMetalTextureCacheFlush(textureCache, 0)
return nil
}
return texture
}
///' CVPixelBufferPool '
/// -:
/// -:' CVPixelBufferPool '
private func createBufferPool(size: CGSize) -> CVPixelBufferPool? {
let allocationThreshold = 5
let sourcePixelBufferAttributesDictionary = [
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA),
kCVPixelBufferWidthKey as String: NSNumber(value: Float(size.width)),
kCVPixelBufferHeightKey as String: NSNumber(value: Float(size.height)),
kCVPixelBufferMetalCompatibilityKey as String: kCFBooleanTrue!,
kCVPixelBufferIOSurfacePropertiesKey as String: [
kCVPixelBufferIOSurfaceCoreAnimationCompatibilityKey:kCFBooleanTrue,
]
] as [String : Any]
let poolAttributes = [kCVPixelBufferPoolMinimumBufferCountKey as String: allocationThreshold]
var cvPixelBufferPool: CVPixelBufferPool?
CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as NSDictionary?, sourcePixelBufferAttributesDictionary as NSDictionary?, &cvPixelBufferPool)
return cvPixelBufferPool
}
}

View File

@ -0,0 +1,92 @@
//
// PlayContoller10.swift
// tdvideo
//
// Created by mac on 2024/2/22.
//
import UIKit
import AVFoundation
import AVKit
//
class PlayContoller10: UIViewController {
// URL
var leftEyeVideoURL: URL?
var rightEyeVideoURL: URL?
var outputVideoURL: URL?
//
var player: AVPlayer?
var playerLayer: AVPlayerLayer?
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .brown
let path1 = Bundle.main.path(forResource: "aa", ofType: "MOV")
let path2 = Bundle.main.path(forResource: "bb", ofType: "MOV")
leftEyeVideoURL = URL(fileURLWithPath: path1!)
rightEyeVideoURL = URL(fileURLWithPath: path2!)
outputVideoURL = URL.documentsDirectory.appending(path: "output1111.MOV")
setupUI()
}
private func setupUI() {
let generateButton = UIButton(type: .system)
generateButton.setTitleColor(UIColor.white, for: .normal)
generateButton.setTitle("生成空间视频并保存到相册", for: .normal)
generateButton.addTarget(self, action: #selector(generateSpatialVideo), for: .touchUpInside)
generateButton.frame = CGRect(x: 50, y: 200, width: view.frame.width - 100, height: 50)
view.addSubview(generateButton)
//
let playerView = UIView(frame: CGRect(x: 0, y: 300, width: view.frame.width, height: 300))
view.addSubview(playerView)
playerView.backgroundColor = UIColor.black
//
player = AVPlayer()
playerLayer = AVPlayerLayer(player: player)
playerLayer?.frame = playerView.bounds
playerLayer?.backgroundColor = UIColor.black.cgColor
playerView.layer.addSublayer(playerLayer!)
}
@objc private func generateSpatialVideo() {
guard let leftEyeVideoURL = leftEyeVideoURL,
let rightEyeVideoURL = rightEyeVideoURL,
let outputVideoURL = outputVideoURL else {
print("Invalid video URLs")
return
}
let spatialVideoWriter = SpatialVideoWriter()
Task {
spatialVideoWriter.writeSpatialVideo(leftEyeVideoURL: leftEyeVideoURL, rightEyeVideoURL: rightEyeVideoURL, outputVideoURL: outputVideoURL) { success, error in
if success {
print("空间视频生成成功")
self.playSpatialVideo()
} else if let error = error {
print("生成空间视频失败:\(error.localizedDescription)")
}
}
}
}
private func playSpatialVideo() {
guard let outputVideoURL = outputVideoURL else {
print("Output video URL is nil")
return
}
// AVPlayerItem
let playerItem = AVPlayerItem(url: outputVideoURL)
player?.replaceCurrentItem(with: playerItem)
//
player?.play()
}
}

View File

@ -0,0 +1,185 @@
//
// PlayContoller11.swift
// tdvideo
//
// Created by mac on 2024/2/22.
//
import UIKit
import AVFoundation
class PlayContoller11: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
var session = AVCaptureMultiCamSession()
var backCameraDeviceInput: AVCaptureDeviceInput?
var frontCameraDeviceInput: AVCaptureDeviceInput?
var backCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
var frontCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
var backCameraVideoDataOutput: AVCaptureVideoDataOutput?
var frontCameraVideoDataOutput: AVCaptureVideoDataOutput?
var movieOutput: AVCaptureMovieFileOutput?
var isRecording = false
var recordButton:UIButton?
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
self.configureSession()
self.addRecordButton()
}
private func configureSession() {
session.beginConfiguration()
defer {
session.commitConfiguration()
}
//
guard let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
print("Could not find the back camera")
return
}
do {
backCameraDeviceInput = try AVCaptureDeviceInput(device: backCamera)
guard let backCameraDeviceInput = backCameraDeviceInput,
session.canAddInput(backCameraDeviceInput) else {
print("Could not add back camera input")
return
}
session.addInput(backCameraDeviceInput)
} catch {
print("Could not create back camera device input: \(error)")
return
}
//
guard let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
print("Could not find the front camera")
return
}
do {
frontCameraDeviceInput = try AVCaptureDeviceInput(device: frontCamera)
guard let frontCameraDeviceInput = frontCameraDeviceInput,
session.canAddInput(frontCameraDeviceInput) else {
print("Could not add front camera input")
return
}
session.addInput(frontCameraDeviceInput)
} catch {
print("Could not create front camera device input: \(error)")
return
}
//
backCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
backCameraVideoPreviewLayer?.frame = CGRect(x: 0, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height / 2)
if let backCameraVideoPreviewLayer = backCameraVideoPreviewLayer {
view.layer.addSublayer(backCameraVideoPreviewLayer)
}
frontCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
frontCameraVideoPreviewLayer?.frame = CGRect(x: view.frame.size.width / 2, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height / 2)
if let frontCameraVideoPreviewLayer = frontCameraVideoPreviewLayer {
view.layer.addSublayer(frontCameraVideoPreviewLayer)
}
//
backCameraVideoDataOutput = AVCaptureVideoDataOutput()
backCameraVideoDataOutput?.setSampleBufferDelegate(self, queue: DispatchQueue(label: "backCameraVideoDataOutputQueue"))
if let backCameraVideoDataOutput = backCameraVideoDataOutput,
session.canAddOutput(backCameraVideoDataOutput) {
session.addOutput(backCameraVideoDataOutput)
}
//
frontCameraVideoDataOutput = AVCaptureVideoDataOutput()
frontCameraVideoDataOutput?.setSampleBufferDelegate(self, queue: DispatchQueue(label: "frontCameraVideoDataOutputQueue"))
if let frontCameraVideoDataOutput = frontCameraVideoDataOutput,
session.canAddOutput(frontCameraVideoDataOutput) {
session.addOutput(frontCameraVideoDataOutput)
}
//
movieOutput = AVCaptureMovieFileOutput()
if let movieOutput = movieOutput, session.canAddOutput(movieOutput) {
session.addOutput(movieOutput)
}
DispatchQueue.global().async {
self.session.startRunning()
}
}
private func addRecordButton() {
recordButton = UIButton(type: .system)
recordButton!.setTitle("开始录制", for: .normal)
recordButton!.addTarget(self, action: #selector(toggleRecording), for: .touchUpInside)
recordButton!.frame = CGRect(x: 50, y: view.frame.height - 200, width: 200, height: 50)
view.addSubview(recordButton!)
}
@objc private func toggleRecording() {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
private func startRecording() {
guard let movieOutput = movieOutput else { return }
let outputURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("output.mov")
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
isRecording = true
recordButton!.setTitle("停止录制", for: .normal)
}
private func stopRecording() {
guard let movieOutput = movieOutput else { return }
movieOutput.stopRecording()
isRecording = false
recordButton!.setTitle("开始录制", for: .normal)
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
//
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
//
if isRecording {
if connection.inputPorts.contains(where: { $0.mediaType == .video && $0.sourceDeviceType == .builtInWideAngleCamera && $0.sourceDevicePosition == .back }) {
print("后置摄像头视频数据")
} else if connection.inputPorts.contains(where: { $0.mediaType == .video && $0.sourceDeviceType == .builtInWideAngleCamera && $0.sourceDevicePosition == .front }) {
print("前置摄像头视频数据")
}
}
//
// 使
}
}
extension PlayContoller11: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
print("开始录制")
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print("录制出错:\(error.localizedDescription)")
} else {
print("录制完成:\(outputFileURL)")
}
}
}

View File

@ -0,0 +1,285 @@
//
// PlayContoller4.swift
// tdvideo
//
// Created by mac on 2024/2/1.
//
import AVKit
import UIKit
//class PlayContoller4: UIViewController {
//
// private var playerViewController: AVPlayerViewController?
// private var player: AVPlayer?
//
// var link = false
// var play = false
// var btn:UIButton?
//
// override func viewDidLoad() {
// super.viewDidLoad()
// view.backgroundColor = UIColor.brown
//
//
//
// btn = UIButton(type: UIButton.ButtonType.custom)
// self.view.addSubview(btn!)
// btn!.frame = CGRect(x: 100, y: 100, width: 200, height: 50)
// btn!.setTitle("", for: UIControl.State.normal)
// btn!.addTarget(self, action: #selector(buttonPressed(sender:)), for: UIControl.Event.touchUpInside)
//
// // AVPlayerViewController
// playerViewController = AVPlayerViewController()
// playerViewController?.view.frame = CGRect(x: 0, y: 200, width: self.view.frame.size.width, height: 350)
// addChild(playerViewController!)
// view.addSubview(playerViewController!.view)
//
// // AVPlayer
// guard let videoURL = Bundle.main.url(forResource: "IMG_0071", withExtension: "MOV") else {
// print("")
// return
// }
// player = AVPlayer(url: videoURL)
// playerViewController?.player = player
// player?.play()
//
// // UIScreenMirroringDidChange
// NotificationCenter.default.addObserver(self, selector: #selector(screenMirroringChanged(_:)), name: UIScreen.didConnectNotification, object: nil)
// NotificationCenter.default.addObserver(self, selector: #selector(screenMirroringChanged(_:)), name: UIScreen.didDisconnectNotification, object: nil)
//
//
// // AirPlay
//// NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
//
// //
// let isScreenMirroring = UIScreen.screens.count > 1
// print("Screen Mirroring: \(isScreenMirroring)")
// setttinisScreenMirroring(isScreenMirroring: isScreenMirroring)
// }
//
// @objc private func screenMirroringChanged(_ notification: NSNotification) {
// //
// let isScreenMirroring = UIScreen.screens.count > 1
// print("Screen Mirroring: \(isScreenMirroring)")
// setttinisScreenMirroring(isScreenMirroring: isScreenMirroring)
// }
//
// func setttinisScreenMirroring(isScreenMirroring:Bool){
//
// //
// if(isScreenMirroring){
// link = true
//// let currentRoute = AVAudioSession.sharedInstance().currentRoute
//// let isAirPlayActive = currentRoute.outputs.contains { output in
//// return output.portType == AVAudioSession.Port.airPlay
//// }
// if(play == true){
// player!.usesExternalPlaybackWhileExternalScreenIsActive = true
// player!.allowsExternalPlayback = true
// btn!.setTitle("", for: UIControl.State.normal)
// }else{
// player!.usesExternalPlaybackWhileExternalScreenIsActive = false
// player!.allowsExternalPlayback = false
// btn!.setTitle("", for: UIControl.State.normal)
// }
// }
//
// //
// else{
// link = false
// // AirPlay
// player!.usesExternalPlaybackWhileExternalScreenIsActive = false
// player!.allowsExternalPlayback = false
// btn!.setTitle("", for: UIControl.State.normal)
// }
// }
//
// @objc private func airPlayStatusDidChange(_ notification: Notification) {
// print("")
// }
//
// deinit {
// NotificationCenter.default.removeObserver(self)
// }
//
// @objc func buttonPressed(sender:UIButton){
//
// if(link == true){
// play = !play
// if(play == true){
// // AirPlay
// player!.usesExternalPlaybackWhileExternalScreenIsActive = true
// player!.allowsExternalPlayback = true
// btn!.setTitle("", for: UIControl.State.normal)
// }else{
// player!.usesExternalPlaybackWhileExternalScreenIsActive = false
// player!.allowsExternalPlayback = false
// btn!.setTitle("", for: UIControl.State.normal)
// }
// }
// }
//}
class PlayContoller4: UIViewController {
private var playerViewController: AVPlayerViewController?
private var player: AVPlayer?
var link = false
var play = false
var btn:UIButton?
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.brown
btn = UIButton(type: UIButton.ButtonType.custom)
self.view.addSubview(btn!)
btn!.frame = CGRect(x: 100, y: 100, width: 200, height: 50)
btn!.setTitle("未连接设备", for: UIControl.State.normal)
btn!.addTarget(self, action: #selector(buttonPressed(sender:)), for: UIControl.Event.touchUpInside)
btn!.layer.borderWidth = 1
btn!.layer.borderColor = UIColor.white.cgColor
// AVPlayerViewController
playerViewController = AVPlayerViewController()
playerViewController?.view.frame = CGRect(x: 0, y: 200, width: self.view.frame.size.width, height: 350)
addChild(playerViewController!)
view.addSubview(playerViewController!.view)
// AVPlayer
guard let videoURL = Bundle.main.url(forResource: "IMG_0071", withExtension: "MOV") else {
print("无法加载视频文件")
return
}
player = AVPlayer(url: videoURL)
playerViewController?.player = player
// AirPlay
NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// AirPlay
checkAirPlayStatus()
}
@objc private func airPlayStatusDidChange(_ notification: Notification) {
checkAirPlayStatus()
}
private func checkAirPlayStatus() {
print("设备连接变化")
let currentRoute = AVAudioSession.sharedInstance().currentRoute
let isAirPlayActive = currentRoute.outputs.contains { output in
return output.portType == AVAudioSession.Port.HDMI ||
output.portType == AVAudioSession.Port.airPlay
}
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
}
func setttinisScreenMirroring(isScreenMirroring:Bool){
//
if(isScreenMirroring){
print("已连接")
link = true
play = true
player!.usesExternalPlaybackWhileExternalScreenIsActive = true
player!.allowsExternalPlayback = true
btn!.setTitle("串流播放中", for: UIControl.State.normal)
player!.play()
}
//
else{
print("未连接")
link = false
play = false
// AirPlay
player!.usesExternalPlaybackWhileExternalScreenIsActive = false
player!.allowsExternalPlayback = false
btn!.setTitle("未连接设备", for: UIControl.State.normal)
}
}
deinit {
NotificationCenter.default.removeObserver(self)
}
@objc func buttonPressed(sender:UIButton){
if(link == true){
play = !play
if(play == true){
// AirPlay
player!.usesExternalPlaybackWhileExternalScreenIsActive = true
player!.allowsExternalPlayback = true
btn!.setTitle("串流播放中", for: UIControl.State.normal)
}else{
player!.usesExternalPlaybackWhileExternalScreenIsActive = false
player!.allowsExternalPlayback = false
btn!.setTitle("已连接设备", for: UIControl.State.normal)
}
}
}
}
//import UIKit
//import ExternalAccessory
//
//class PlayContoller4: UIViewController {
//
//
// override func viewDidLoad() {
// super.viewDidLoad()
// self.view.backgroundColor = UIColor.brown
//
//
// NotificationCenter.default.addObserver(self, selector: #selector(didConnectAccessory(_:)), name: Notification.Name.EAAccessoryDidConnect, object: nil)
// NotificationCenter.default.addObserver(self, selector: #selector(didDisconnectAccessory(_:)), name: Notification.Name.EAAccessoryDidDisconnect, object: nil)
// EAAccessoryManager.shared().registerForLocalNotifications()
// print(EAAccessoryManager.shared().connectedAccessories)
// }
//
//
// @objc
// private func didConnectAccessory(_ notification: NSNotification) {
// let accessoryManager = EAAccessoryManager.shared()
// for accessory in accessoryManager.connectedAccessories {
// print(accessory.protocolStrings)
// if accessory.protocolStrings.contains("") {
// //We have found the accessory corresponding to our gadget
// let description = """
// Accessory name: \(accessory.name)
// Manufacturer: \(accessory.manufacturer)
// Model number: \(accessory.modelNumber)
// Serial number: \(accessory.serialNumber)
// HW Revision: \(accessory.hardwareRevision)
// FW Revision: \(accessory.firmwareRevision)
// Connected: \(accessory.isConnected)
// Connection ID: \(accessory.connectionID)
// Protocol strings: \(accessory.protocolStrings.joined(separator: "; "))
// """
// print(description)
//
// }
//
//// self.accessory = accessory
//
// }
// }
//
// @objc
// private func didDisconnectAccessory(_ notification: NSNotification) {
// print("disconnect")
// }
//}

View File

@ -0,0 +1,401 @@
//
// PlayContoller5.swift
// tdvideo
//
// Created by mac on 2024/2/2.
//
import UIKit
import AVFoundation
import Photos
class PlayContoller5: UIViewController, AVCaptureFileOutputRecordingDelegate {
var session = AVCaptureMultiCamSession()
var backCameraDeviceInput: AVCaptureDeviceInput?
var frontCameraDeviceInput: AVCaptureDeviceInput?
var backCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
var frontCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
var startRecordingButton: UIButton?
var backCameraMovieOutput: AVCaptureMovieFileOutput?
var frontCameraMovieOutput: AVCaptureMovieFileOutput?
var isRecording = false
var imgs:NSMutableArray = NSMutableArray()
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
imgs.removeAllObjects()
configureSession()
setupUI()
}
private func configureSession() {
session.beginConfiguration()
defer {
session.commitConfiguration()
}
//builtInWideAngleCamera
//builtInWideAngleCamera
//builtInTrueDepthCamera
//
guard let backCamera = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) else {
print("Could not find the back camera")
return
}
do {
backCameraDeviceInput = try AVCaptureDeviceInput(device: backCamera)
guard let backCameraDeviceInput = backCameraDeviceInput,
session.canAddInput(backCameraDeviceInput) else {
print("Could not add back camera input")
return
}
session.addInput(backCameraDeviceInput)
} catch {
print("Could not create back camera device input: \(error)")
return
}
//
guard let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
print("Could not find the main camera")
return
}
do {
frontCameraDeviceInput = try AVCaptureDeviceInput(device: frontCamera)
guard let frontCameraDeviceInput = frontCameraDeviceInput,
session.canAddInput(frontCameraDeviceInput) else {
print("Could not add front camera input")
return
}
session.addInput(frontCameraDeviceInput)
} catch {
print("Could not create front camera device input: \(error)")
return
}
//
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
print("Could not find audio device")
return
}
do {
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard session.canAddInput(audioDeviceInput) else {
print("Could not add audio input")
return
}
session.addInput(audioDeviceInput)
} catch {
print("Could not create audio device input: \(error)")
return
}
//
backCameraMovieOutput = AVCaptureMovieFileOutput()
guard let backCameraMovieOutput = backCameraMovieOutput,
session.canAddOutput(backCameraMovieOutput) else {
print("Could not add the back camera movie output")
return
}
session.addOutput(backCameraMovieOutput)
//
frontCameraMovieOutput = AVCaptureMovieFileOutput()
guard let frontCameraMovieOutput = frontCameraMovieOutput,
session.canAddOutput(frontCameraMovieOutput) else {
print("Could not add the front camera movie output")
return
}
session.addOutput(frontCameraMovieOutput)
//
backCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
backCameraVideoPreviewLayer?.frame = CGRect(x: 0, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height / 2)
if let backCameraVideoPreviewLayer = backCameraVideoPreviewLayer {
view.layer.addSublayer(backCameraVideoPreviewLayer)
}
frontCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
frontCameraVideoPreviewLayer?.frame = CGRect(x: view.frame.size.width / 2, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height / 2)
if let frontCameraVideoPreviewLayer = frontCameraVideoPreviewLayer {
view.layer.addSublayer(frontCameraVideoPreviewLayer)
}
DispatchQueue.global().async {
self.session.startRunning()
}
}
private func setupUI() {
startRecordingButton = UIButton(type: .system)
startRecordingButton?.setTitle("Start Recording", for: .normal)
startRecordingButton?.setTitleColor(UIColor.brown, for: UIControl.State.normal)
startRecordingButton?.addTarget(self, action: #selector(toggleRecording(_:)), for: .touchUpInside)
startRecordingButton?.frame = CGRect(x: 0, y: view.frame.size.height - 250, width: view.frame.size.width, height: 50)
view.addSubview(startRecordingButton!)
}
@objc private func toggleRecording(_ sender: UIButton) {
startRecording()
}
private func startRecording() {
imgs.removeAllObjects()
guard let backCameraMovieOutput = backCameraMovieOutput,
let frontCameraMovieOutput = frontCameraMovieOutput else {
print("Movie output not configured")
return
}
//
let soundID: SystemSoundID = 1108
AudioServicesPlaySystemSound(soundID)
let time = Date().timeIntervalSince1970
let name1 = "back" + String(time) + ".mov"
let name2 = "front" + String(time) + ".mov"
let backCameraOutputURL = URL.documentsDirectory.appending(path:name1)
let frontCameraOutputURL = URL.documentsDirectory.appending(path:name2)
backCameraMovieOutput.startRecording(to: backCameraOutputURL, recordingDelegate: self)
frontCameraMovieOutput.startRecording(to: frontCameraOutputURL, recordingDelegate: self)
//0.1
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
self.stopRecording()
}
}
private func stopRecording() {
guard let backCameraMovieOutput = backCameraMovieOutput,
let frontCameraMovieOutput = frontCameraMovieOutput else {
print("Movie output not configured")
return
}
backCameraMovieOutput.stopRecording()
frontCameraMovieOutput.stopRecording()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print("Video recording finished with error: \(error.localizedDescription)")
} else {
if output == backCameraMovieOutput {
print("Back camera video recorded: \(outputFileURL)")
if let firstFrame = self.firstFrame(from: outputFileURL) {
// self.saveImageToLibrary(image: firstFrame)
imgs.add(firstFrame)
}
} else if output == frontCameraMovieOutput {
print("Front camera video recorded: \(outputFileURL)")
if let firstFrame = self.firstFrame(from: outputFileURL) {
// self.saveImageToLibrary(image: firstFrame)
imgs.add(firstFrame)
}
}
if(imgs.count == 2){
bbb()
}
}
}
private func firstFrame(from videoURL: URL) -> UIImage? {
let asset = AVURLAsset(url: videoURL)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 0.0, preferredTimescale: 1)
do {
let cgImage = try generator.copyCGImage(at: time, actualTime: nil)
return UIImage(cgImage: cgImage)
} catch {
print("Error generating first frame: \(error.localizedDescription)")
return nil
}
}
private func saveImageToLibrary(image: UIImage) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAsset(from: image)
}) { success, error in
if success {
print("Image saved to library")
} else if let error = error {
print("Error saving image to library: \(error.localizedDescription)")
}
}
}
func bbb(){
let img1:UIImage = imgs[0] as! UIImage
let img2:UIImage = imgs[1] as! UIImage
let imageSize1 = CGRect(x: 0, y: 0, width: img1.cgImage!.width, height: img1.cgImage!.height)
let imageSize2 = CGRect(x: 0, y: 0, width: img2.cgImage!.width, height: img2.cgImage!.height)
let url = URL.documentsDirectory.appending(path:"aaa12.HEIC")
let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)!
let properties1 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
0,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
]
let properties2 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
-0.019238,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
]
let leftImg = fixOrientation(img1)
let rightImg = fixOrientation(img2)
CGImageDestinationAddImage(destination, leftImg.cgImage!, properties1 as CFDictionary)
CGImageDestinationAddImage(destination, rightImg.cgImage!, properties2 as CFDictionary)
CGImageDestinationFinalize(destination)
let image = UIImage(contentsOfFile: url.path())
let source = CGImageSourceCreateWithURL(url as CFURL, nil)!
guard let properties22 = CGImageSourceCopyPropertiesAtIndex(source, 1, nil) as? [CFString: Any] else {
return
}
print(properties22)
savePhoto(url)
}
func savePhoto(_ fileURL: URL) {
// PHAssetCreationRequest
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .photoProxy, fileURL: fileURL, options: nil)
}) { success, error in
if let error = error {
print("Error saving photo to library: \(error.localizedDescription)")
} else {
print("Photo saved to library successfully.")
}
}
}
//
func fixOrientation(_ image: UIImage) -> UIImage {
// No-op if the orientation is already correct
guard image.imageOrientation != .up else { return image }
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
var transform = CGAffineTransform.identity
switch image.imageOrientation {
case .down, .downMirrored:
transform = transform.translatedBy(x: image.size.width, y: image.size.height)
transform = transform.rotated(by: .pi)
case .left, .leftMirrored:
transform = transform.translatedBy(x: image.size.width, y: 0)
transform = transform.rotated(by: .pi / 2)
case .right, .rightMirrored:
transform = transform.translatedBy(x: 0, y: image.size.height)
transform = transform.rotated(by: -.pi / 2)
default:
break
}
switch image.imageOrientation {
case .upMirrored, .downMirrored:
transform = transform.translatedBy(x: image.size.width, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
case .leftMirrored, .rightMirrored:
transform = transform.translatedBy(x: image.size.height, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
default:
break
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
guard let cgImage = image.cgImage,
let colorSpace = cgImage.colorSpace,
let context = CGContext(data: nil,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: cgImage.bitsPerComponent,
bytesPerRow: 0,
space: colorSpace,
bitmapInfo: cgImage.bitmapInfo.rawValue)
else {
return image
}
context.concatenate(transform)
switch image.imageOrientation {
case .left, .leftMirrored, .right, .rightMirrored:
// Grr...
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: image.size.height, height: image.size.width))
default:
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
}
// And now we just create a new UIImage from the drawing context
guard let cgImageFixed = context.makeImage() else { return image }
let fixedImage = UIImage(cgImage: cgImageFixed)
return fixedImage
}
}

View File

@ -0,0 +1,160 @@
//
// PlayContoller6.swift
// tdvideo
//
// Created by mac on 2024/2/4.
//
import UIKit
import AVFoundation
import CoreImage
import Foundation
import Observation
import VideoToolbox
class PlayContoller6: UIViewController {
private var player: AVPlayer!
private var playerLayer: AVPlayerLayer!
private var exportButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.brown
setupPlayer()
setupExportButton()
}
private func setupPlayer() {
let path = Bundle.main.path(forResource: "IMG_0071", ofType: "MOV")
let videoURL = URL.init(filePath: path!)
let playerItem = AVPlayerItem(url: videoURL)
player = AVPlayer(playerItem: playerItem)
playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = CGRect(x: 0, y: 0, width: view.bounds.width, height: view.bounds.height - 100)
view.layer.addSublayer(playerLayer)
player.play()
}
private func setupExportButton() {
exportButton = UIButton(type: .system)
exportButton.setTitleColor(UIColor.white, for: UIControl.State.normal)
exportButton.setTitle("导出", for: .normal)
exportButton.frame = CGRect(x: 0, y: view.bounds.height - 260, width: view.bounds.width, height: 100)
exportButton.addTarget(self, action: #selector(exportButtonTapped), for: .touchUpInside)
view.addSubview(exportButton)
}
@objc private func exportButtonTapped() {
let path = Bundle.main.path(forResource: "IMG_0071", ofType: "MOV")
let videoURL = URL.init(filePath: path!)
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let outputURL = documentsDirectory.appendingPathComponent("output_path.mp4")
do {
try FileManager.default.removeItem(atPath: outputURL.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
let width = 1280
let height = 720
let dataRate = 5000000
let horizontalDisparity: Float = 0.0
let horizontalFieldOfView: Float = 90.0
exportVideo(url: videoURL, outputURL: outputURL, width: width, height: height, dataRate: dataRate, horizontalDisparity: horizontalDisparity, horizontalFieldOfView: horizontalFieldOfView) { exportedAsset in
// AVAsset
//
DispatchQueue.main.async {
let exportedPlayerItem = AVPlayerItem(asset: exportedAsset!)
self.player.replaceCurrentItem(with: exportedPlayerItem)
self.player.play()
}
}
}
private func exportVideo(url: URL, outputURL: URL, width: Int, height: Int, dataRate: Int, horizontalDisparity: Float, horizontalFieldOfView: Float, completion: @escaping (AVAsset?) -> Void) {
let asset = AVAsset(url: url)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough) else {
print("Failed to create export session")
completion(nil)
return
}
let composition = AVMutableComposition()
let videoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
guard let assetTrack = asset.tracks(withMediaType: .video).first else {
print("Failed to get video track from asset")
completion(nil)
return
}
do {
try videoTrack?.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: assetTrack, at: .zero)
} catch {
print("Failed to insert video track into composition")
completion(nil)
return
}
var videoSettings = AVOutputSettingsAssistant(preset: .mvhevc1440x1440)?.videoSettings
videoSettings?[AVVideoWidthKey] = width
videoSettings?[AVVideoHeightKey] = height
var compressionProperties = videoSettings?[AVVideoCompressionPropertiesKey] as! [String: Any]
compressionProperties[AVVideoAverageBitRateKey] = dataRate
compressionProperties[kVTCompressionPropertyKey_HorizontalDisparityAdjustment as String] = horizontalDisparity
compressionProperties[kCMFormatDescriptionExtension_HorizontalFieldOfView as String] = horizontalFieldOfView
compressionProperties[kCMFormatDescriptionExtension_HorizontalFieldOfView as String] = 180.0
videoSettings?[AVVideoCompressionPropertiesKey] = compressionProperties
let writerInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
writerInput.expectsMediaDataInRealTime = false
guard let writer = try? AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mp4) else {
print("Failed to create AVAssetWriter")
completion(nil)
return
}
writer.add(writerInput)
guard let assetExportSession = exportSession as? AVAssetExportSession else {
print("Failed to cast export session to AVAssetExportSession")
completion(nil)
return
}
assetExportSession.videoComposition = AVMutableVideoComposition(propertiesOf: composition)
assetExportSession.outputFileType = AVFileType.mp4
assetExportSession.outputURL = outputURL
assetExportSession.exportAsynchronously {
switch assetExportSession.status {
case .completed:
let exportedAsset = AVAsset(url: outputURL)
completion(exportedAsset)
case .failed:
if let error = assetExportSession.error {
print("Export failed with error: \(error.localizedDescription)")
} else {
print("Export failed")
}
completion(nil)
case .cancelled:
print("Export cancelled")
completion(nil)
default:
break
}
}
}
}

View File

@ -0,0 +1,313 @@
//
// PlayContoller7.swift
// tdvideo
//
// Created by mac on 2024/2/10.
//
import UIKit
import AVFoundation
import MobileCoreServices
import CoreImage
import ImageIO
import Photos
class PlayContoller7: UIViewController {
var startRecordingButton:UIButton?
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.brown
setupUI()
// bbb()
// aaa()
}
private func setupUI() {
startRecordingButton = UIButton(type: .system)
startRecordingButton?.setTitle("开始合成空间图片", for: .normal)
startRecordingButton?.setTitleColor(UIColor.white, for: UIControl.State.normal)
startRecordingButton?.addTarget(self, action: #selector(toggleRecording(_:)), for: .touchUpInside)
startRecordingButton?.frame = CGRect(x: 0, y: view.frame.size.height - 250, width: view.frame.size.width, height: 50)
view.addSubview(startRecordingButton!)
}
@objc private func toggleRecording(_ sender: UIButton) {
bbb()
}
func bbb(){
let img1:UIImage = UIImage(named: "a.HEIC")!
let img2:UIImage = UIImage(named: "b.HEIC")!
let imageSize1 = CGRect(x: 0, y: 0, width: img1.cgImage!.width, height: img1.cgImage!.height)
let imageSize2 = CGRect(x: 0, y: 0, width: img2.cgImage!.width, height: img2.cgImage!.height)
let url = URL.documentsDirectory.appending(path:"aaa12.HEIC")
let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)!
// --- 16.0 ---
/*
[ColorModel: RGB, ProfileName: sRGB IEC61966-2.1, Depth: 8, {TIFF}: {
Orientation = 1;
TileLength = 512;
TileWidth = 512;
}, {HEIF}: {
CameraExtrinsics = {
CoordinateSystemID = 0;
Position = (
"-0.019238",
0,
0
);
Rotation = (
1,
0,
0,
0,
1,
0,
0,
0,
1
);
};
}, PixelWidth: 4032, PixelHeight: 3024, Orientation: 1]
*/
let properties1 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
0,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
]
//
let properties2 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
-0.019238,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
]
let leftImg = fixOrientation(img1)
let rightImg = fixOrientation(img2)
CGImageDestinationAddImage(destination, leftImg.cgImage!, properties1 as CFDictionary)
CGImageDestinationAddImage(destination, rightImg.cgImage!, properties2 as CFDictionary)
CGImageDestinationFinalize(destination)
let image = UIImage(contentsOfFile: url.path())
let source = CGImageSourceCreateWithURL(url as CFURL, nil)!
guard let properties22 = CGImageSourceCopyPropertiesAtIndex(source, 1, nil) as? [CFString: Any] else {
return
}
print(properties22)
savePhoto(url)
}
func aaa() {
let imageSize = CGRect(x: 0, y: 0, width: 3072, height: 3072)
let leftImage = CIContext().createCGImage(.red, from: imageSize)!
let rightImage = CIContext().createCGImage(.blue, from: imageSize)!
let url = URL.documentsDirectory.appendingPathComponent("33122.HEIC")
print("URL: \(url)")
let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)!
let properties1 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1
],
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
0,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
]
let properties2 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1
],
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
-0.019238,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
]
//
CGImageDestinationAddImage(destination, leftImage, properties1 as CFDictionary)
CGImageDestinationAddImage(destination, rightImage, properties2 as CFDictionary)
CGImageDestinationFinalize(destination)
// imageSource nil
guard let imageSource = CGImageSourceCreateWithURL(url as CFURL, nil) else {
print("Failed to create CGImageSource")
return
}
// 使 properties
if let properties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [CFString: Any] {
print(properties)
} else {
print("Failed to retrieve properties")
}
let image = UIImage(contentsOfFile: url.path)
savePhoto(url)
}
func savePhoto(_ fileURL: URL) {
// PHAssetCreationRequest
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .photoProxy, fileURL: fileURL, options: nil)
}) { success, error in
if let error = error {
print("Error saving photo to library: \(error.localizedDescription)")
} else {
print("Photo saved to library successfully.")
}
}
}
//
func fixOrientation(_ image: UIImage) -> UIImage {
// No-op if the orientation is already correct
guard image.imageOrientation != .up else { return image }
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
var transform = CGAffineTransform.identity
switch image.imageOrientation {
case .down, .downMirrored:
transform = transform.translatedBy(x: image.size.width, y: image.size.height)
transform = transform.rotated(by: .pi)
case .left, .leftMirrored:
transform = transform.translatedBy(x: image.size.width, y: 0)
transform = transform.rotated(by: .pi / 2)
case .right, .rightMirrored:
transform = transform.translatedBy(x: 0, y: image.size.height)
transform = transform.rotated(by: -.pi / 2)
default:
break
}
switch image.imageOrientation {
case .upMirrored, .downMirrored:
transform = transform.translatedBy(x: image.size.width, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
case .leftMirrored, .rightMirrored:
transform = transform.translatedBy(x: image.size.height, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
default:
break
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
guard let cgImage = image.cgImage,
let colorSpace = cgImage.colorSpace,
let context = CGContext(data: nil,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: cgImage.bitsPerComponent,
bytesPerRow: 0,
space: colorSpace,
bitmapInfo: cgImage.bitmapInfo.rawValue)
else {
return image
}
context.concatenate(transform)
switch image.imageOrientation {
case .left, .leftMirrored, .right, .rightMirrored:
// Grr...
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: image.size.height, height: image.size.width))
default:
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
}
// And now we just create a new UIImage from the drawing context
guard let cgImageFixed = context.makeImage() else { return image }
let fixedImage = UIImage(cgImage: cgImageFixed)
return fixedImage
}
}

View File

@ -0,0 +1,292 @@
//
// PlayContoller8.swift
// tdvideo
//
// Created by mac on 2024/2/18.
//
import Foundation
import AVKit
import VideoToolbox
import CoreImage
import ImageIO
import UIKit
import AVFoundation
import UIKit
import AVFoundation
import CoreMedia
//
class PlayContoller8: UIViewController {
var player: AVPlayer?
var playerLayer: AVPlayerLayer?
var isRedFilterEnabled = false
var asset:AVAsset?
var assetReader:AVAssetReader?
var output:AVAssetReaderTrackOutput?
var selectedIndex:NSInteger?
var videoConver:VideoConvertor3 = VideoConvertor3()
func loadVideo() async {
do {
if(assetReader != nil && assetReader!.status == .reading){
assetReader?.cancelReading()
}
assetReader = try AVAssetReader(asset: asset!)
output = try await AVAssetReaderTrackOutput(
track: asset!.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader!.timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
assetReader!.add(output!)
assetReader!.startReading()
} catch {
print("Error loading video: \(error)")
}
}
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .black
selectedIndex = 0
let videoURL = Bundle.main.url(forResource: "IMG_0071", withExtension: "MOV")!
asset = AVAsset(url: videoURL)
Task {
await loadVideo()
}
let playerItem = AVPlayerItem(asset: asset!)
//
playerItem.videoComposition = AVVideoComposition(asset: playerItem.asset) { [self] request in
let compositionTime = request.compositionTime
print(compositionTime.value)
if(selectedIndex == 0){
//
request.finish(with: request.sourceImage, context: nil)
}
else if(selectedIndex == 1){
//time
getImage(at: compositionTime) { [self] leftImage, rightImage in
if(leftImage != nil){
//
let lastImg = getHonalanImg(leftImage: leftImage!, rightImage: rightImage!)
request.finish(with: lastImg, context: nil)
}
}
}
}
player = AVPlayer(playerItem: playerItem)
playerLayer = AVPlayerLayer(player: player!)
playerLayer?.frame = view.bounds
view.layer.addSublayer(playerLayer!)
player?.play()
//
player!.actionAtItemEnd = .none
//
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [self] _ in
Task {
await loadVideo()
}
player?.seek(to: .zero)
player?.play()
}
let segmentedControl = UISegmentedControl(items: ["空间视频", "红蓝立体"])
segmentedControl.frame = CGRect(x: 20, y: 700, width: 360, height: 45)
segmentedControl.selectedSegmentIndex = 0
self.view.addSubview(segmentedControl)
segmentedControl.layer.borderWidth = 1.0
segmentedControl.layer.borderColor = UIColor.blue.cgColor
segmentedControl.tintColor = UIColor.blue
let normalTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.white]
let selectedTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.blue]
segmentedControl.setTitleTextAttributes(normalTextAttributes, for: .normal)
segmentedControl.setTitleTextAttributes(selectedTextAttributes, for: .selected)
segmentedControl.addTarget(self, action: #selector(segmentedControlValueChanged(_:)), for: .valueChanged)
}
@objc func segmentedControlValueChanged(_ sender: UISegmentedControl) {
selectedIndex = sender.selectedSegmentIndex
print("选中了第 \(selectedIndex) 个选项")
}
func getImage(at time: CMTime, completion: @escaping ((CIImage?, CIImage?) -> Void)) {
//time output
while let nextSampleBuffer = output!.copyNextSampleBuffer() {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(nextSampleBuffer)
if presentationTime == time {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
completion(lciImage,rciImage)
}
break
}
}
completion(nil,nil)
}
func getHonalanImg(leftImage:CIImage,rightImage:CIImage)->CIImage{
//
let redColorMatrix: [CGFloat] = [
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 1.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let blueColorMatrix: [CGFloat] = [
1.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let redFilter = CIFilter(name: "CIColorMatrix")!
redFilter.setValue(leftImage, forKey: kCIInputImageKey)
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
let blueFilter = CIFilter(name: "CIColorMatrix")!
blueFilter.setValue(rightImage, forKey: kCIInputImageKey)
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
var lastImg:CIImage?
if let redOutputImage = redFilter.outputImage,
let blueOutputImage = blueFilter.outputImage {
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
lastImg = compositeFilter.outputImage!
}
return lastImg!
}
}
//import Foundation
//import AVKit
//import VideoToolbox
//import CoreImage
//import ImageIO
//import UIKit
//import AVFoundation
//import UIKit
//import AVFoundation
//import CoreMedia
//
//
//class PlayContoller8: UIViewController {
//
// var player: AVPlayer?
// var playerLayer: AVPlayerLayer?
// var isRedFilterEnabled = false
//
// var videoConver:VideoConvertor3 = VideoConvertor3()
//
// override func viewDidLoad() {
// super.viewDidLoad()
// view.backgroundColor = .black
//
// let videoURL = Bundle.main.url(forResource: "IMG_0071", withExtension: "MOV")!
// let asset = AVAsset(url: videoURL)
// let playerItem = AVPlayerItem(asset: asset)
//
// player = AVPlayer(playerItem: playerItem)
//
// playerLayer = AVPlayerLayer(player: player)
// playerLayer?.frame = view.bounds
// view.layer.addSublayer(playerLayer!)
//
//
//
// Task{
// try await videoConver.convertVideo(asset: asset) { [self] pro in
// player?.play()
// }
// }
//
//
//
//
// let button = UIButton(type: .system)
// button.setTitle("", for: .normal)
// button.addTarget(self, action: #selector(toggleRedFilter), for: .touchUpInside)
// button.translatesAutoresizingMaskIntoConstraints = false
//
// view.addSubview(button)
//
// NSLayoutConstraint.activate([
// button.centerXAnchor.constraint(equalTo: view.centerXAnchor),
// button.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -20)
// ])
// }
//
// @objc func toggleRedFilter() {
// guard let playerItem = player?.currentItem else { return }
//
// if !isRedFilterEnabled {
// applyRedFilter(to: playerItem)
// } else {
// //
// playerItem.videoComposition = nil
// }
//
// isRedFilterEnabled.toggle()
// }
//
// func applyRedFilter(to playerItem: AVPlayerItem) {
//
// playerItem.videoComposition = AVVideoComposition(asset: playerItem.asset) { [self] request in
// let compositionTime = request.compositionTime
// let arr = videoConver.datas
// for i in 0...arr.count{
// let dic = arr[i] as! NSDictionary
// let time = dic["time"] as! CMTime
// if(compositionTime == time){
// let left = dic["left"] as! CIImage
// let right = dic["right"] as! CIImage
// let image = videoConver.joinImages(leftImage: left, rightImage: right)
// request.finish(with: image, context: nil)
// break
// }
// }
// }
// }
//
//}

View File

@ -0,0 +1,235 @@
//
// PlayContoller9.swift
// tdvideo
//
// Created by mac on 2024/2/21.
//
/*
*/
import UIKit
import AVFoundation
import Photos
import AVKit
import VideoToolbox
class PlayContoller9: UIViewController, AVCaptureFileOutputRecordingDelegate {
//AVCaptureSession ---
var session = AVCaptureMultiCamSession()//
var wideAngleCameraDeviceInput: AVCaptureDeviceInput?//广 .builtInWideAngleCamera
var ultraWideCameraDeviceInput: AVCaptureDeviceInput?//广 .builtInUltraWideCamera
var wideAngleCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?//广
var ultraWideCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?//广
var wideAngleCameraMovieOutput: AVCaptureMovieFileOutput?
var ultraWideCameraMovieOutput: AVCaptureMovieFileOutput?
var isRecording = false
var startRecordingButton: UIButton?
var leftEyeVideoURL:URL?
var rightEyeVideoURL:URL?
var outputVideoURL: URL?
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
outputVideoURL = URL.documentsDirectory.appendingPathComponent("output.MOV")
configureSession()
setupUI()
}
private func configureSession() {
session.beginConfiguration()
defer {
session.commitConfiguration()
}
// .builtInWideAngleCamera 广( --- 28mm) .back
guard let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
print("Could not find the back camera")
return
}
do {
wideAngleCameraDeviceInput = try AVCaptureDeviceInput(device: backCamera)
guard let wideAngleCameraDeviceInput = wideAngleCameraDeviceInput,
session.canAddInput(wideAngleCameraDeviceInput) else {
print("Could not add back camera input")
return
}
session.addInput(wideAngleCameraDeviceInput)
} catch {
print("Could not create back camera device input: \(error)")
return
}
// .builtInUltraWideCamera 广(0.5x使AVCaptureDeviceDiscoverySession) .back
guard let frontCamera = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) else {
print("Could not find the front camera")
return
}
do {
ultraWideCameraDeviceInput = try AVCaptureDeviceInput(device: frontCamera)
guard let ultraWideCameraDeviceInput = ultraWideCameraDeviceInput,
session.canAddInput(ultraWideCameraDeviceInput) else {
print("Could not add front camera input")
return
}
session.addInput(ultraWideCameraDeviceInput)
} catch {
print("Could not create front camera device input: \(error)")
return
}
//
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
print("Could not find audio device")
return
}
do {
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard session.canAddInput(audioDeviceInput) else {
print("Could not add audio input")
return
}
session.addInput(audioDeviceInput)
} catch {
print("Could not create audio device input: \(error)")
return
}
//
wideAngleCameraMovieOutput = AVCaptureMovieFileOutput()
guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
session.canAddOutput(wideAngleCameraMovieOutput) else {
print("Could not add the back camera movie output")
return
}
session.addOutput(wideAngleCameraMovieOutput)
//
ultraWideCameraMovieOutput = AVCaptureMovieFileOutput()
guard let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput,
session.canAddOutput(ultraWideCameraMovieOutput) else {
print("Could not add the front camera movie output")
return
}
session.addOutput(ultraWideCameraMovieOutput)
//
wideAngleCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
wideAngleCameraVideoPreviewLayer?.frame = CGRect(x: 0, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height / 2)
if let wideAngleCameraVideoPreviewLayer = wideAngleCameraVideoPreviewLayer {
view.layer.addSublayer(wideAngleCameraVideoPreviewLayer)
}
ultraWideCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
ultraWideCameraVideoPreviewLayer?.frame = CGRect(x: view.frame.size.width / 2, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height / 2)
if let ultraWideCameraVideoPreviewLayer = ultraWideCameraVideoPreviewLayer {
view.layer.addSublayer(ultraWideCameraVideoPreviewLayer)
}
DispatchQueue.global().async {
self.session.startRunning()
}
}
private func setupUI() {
startRecordingButton = UIButton(type: .system)
startRecordingButton?.setTitle("Start Recording", for: .normal)
startRecordingButton?.setTitleColor(UIColor.brown, for: UIControl.State.normal)
startRecordingButton?.addTarget(self, action: #selector(toggleRecording(_:)), for: .touchUpInside)
startRecordingButton?.frame = CGRect(x: 0, y: view.frame.size.height - 250, width: view.frame.size.width, height: 50)
view.addSubview(startRecordingButton!)
}
@objc private func toggleRecording(_ sender: UIButton) {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
private func startRecording() {
guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput else {
print("Movie output not configured")
return
}
let time = Date().timeIntervalSince1970
let name1 = "back" + String(time) + ".mov"
let name2 = "front" + String(time) + ".mov"
let backCameraOutputURL = URL.documentsDirectory.appending(path:name1)
let frontCameraOutputURL = URL.documentsDirectory.appending(path:name2)
wideAngleCameraMovieOutput.startRecording(to: backCameraOutputURL, recordingDelegate: self)
ultraWideCameraMovieOutput.startRecording(to: frontCameraOutputURL, recordingDelegate: self)
isRecording = true
startRecordingButton?.setTitle("Stop Recording", for: .normal)
}
private func stopRecording() {
guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput else {
print("Movie output not configured")
return
}
wideAngleCameraMovieOutput.stopRecording()
ultraWideCameraMovieOutput.stopRecording()
isRecording = false
startRecordingButton?.setTitle("Start Recording", for: .normal)
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print("Video recording finished with error: \(error.localizedDescription)")
} else {
if output == wideAngleCameraMovieOutput {
print("Back camera video recorded: \(outputFileURL)")
leftEyeVideoURL = outputFileURL
} else if output == ultraWideCameraMovieOutput {
print("Front camera video recorded: \(outputFileURL)")
rightEyeVideoURL = outputFileURL
}
createSpVideo()
}
}
func createSpVideo(){
if(rightEyeVideoURL != nil && leftEyeVideoURL != nil){
let spatialVideoWriter = SpatialVideoWriter()
Task {
spatialVideoWriter.writeSpatialVideo(leftEyeVideoURL: leftEyeVideoURL!, rightEyeVideoURL: rightEyeVideoURL!, outputVideoURL: outputVideoURL!) { success, error in
if success {
print("空间视频生成成功")
} else if let error = error {
print("生成空间视频失败:\(error.localizedDescription)")
}
}
}
}
}
}

View File

@ -0,0 +1,103 @@
//
// PlayController3.swift
// tdvideo
//
// Created by mac on 2024/2/1.
//
import UIKit
import PhotosUI
import AVKit
import MobileCoreServices
class PlayController3: UIViewController, PHPickerViewControllerDelegate {
var imageView: UIImageView!
var playerView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.white
setupUI()
}
private func setupUI() {
imageView = UIImageView(frame: CGRect(x: 0, y: 0, width: 200, height: 200))
imageView.contentMode = .scaleAspectFit
imageView.center = view.center
imageView.backgroundColor = UIColor.brown
view.addSubview(imageView)
playerView = UIView(frame: CGRect(x: 0, y: 0, width: 200, height: 200))
playerView.backgroundColor = .black
playerView.center = view.center
view.addSubview(playerView)
let button = UIButton(type: .system)
button.setTitle("Open Photo Album", for: .normal)
button.addTarget(self, action: #selector(openPhotoAlbum), for: .touchUpInside)
button.frame = CGRect(x: 0, y: 0, width: 200, height: 50)
button.center = CGPoint(x: view.center.x, y: view.center.y + 250)
view.addSubview(button)
}
@objc private func openPhotoAlbum() {
var configuration = PHPickerConfiguration()
configuration.selectionLimit = 1
configuration.filter = .any(of: [.images, .videos])
let picker = PHPickerViewController(configuration: configuration)
picker.delegate = self
present(picker, animated: true, completion: nil)
}
// MARK: - PHPickerViewControllerDelegate
func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
picker.dismiss(animated: true, completion: nil)
guard let result = results.first else {
return
}
let itemProvider = result.itemProvider
if itemProvider.canLoadObject(ofClass: UIImage.self) {
itemProvider.loadObject(ofClass: UIImage.self) { [weak self] (image, error) in
if let image = image as? UIImage {
DispatchQueue.main.async {
self?.imageView.image = image
self?.playerView.isHidden = true
}
}
}
}
let imageTypeIdentifier = kUTTypeImage as String
let videoTypeIdentifier = kUTTypeMovie as String
if itemProvider.hasItemConformingToTypeIdentifier(imageTypeIdentifier) {
print("Item provider contains an image")
}
if itemProvider.hasItemConformingToTypeIdentifier(videoTypeIdentifier) {
itemProvider.loadItem(forTypeIdentifier: videoTypeIdentifier, options: nil) { (item, error) in
if let url = item as? URL {
let asset = AVAsset(url: url)
DispatchQueue.main.async {
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.playerView.bounds ?? CGRect.zero
self.playerView.layer.addSublayer(playerLayer)
player.play()
self.playerView.isHidden = false
}
}
}
}
}
}

View File

@ -0,0 +1,277 @@
//
// PlayControllerVideo.swift
// tdvideo
//
// Created by mac on 2024/2/4.
//
import UIKit
import Photos
import ImageIO
import CoreFoundation
import UIKit
import Photos
import ImageIO
import CoreGraphics
import MobileCoreServices
import AVKit
class PhotoCell2: UICollectionViewCell {
let imageView: UIImageView = {
let imageView = UIImageView()
imageView.contentMode = .scaleAspectFill
imageView.clipsToBounds = true
return imageView
}()
let frameCountLabel: UILabel = {
let label = UILabel()
label.textColor = .white
label.backgroundColor = .red
label.textAlignment = .center
label.font = UIFont.boldSystemFont(ofSize: 12)
label.layer.cornerRadius = 8
label.clipsToBounds = true
return label
}()
override init(frame: CGRect) {
super.init(frame: frame)
setupViews()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setupViews()
}
private func setupViews() {
addSubview(imageView)
addSubview(frameCountLabel)
imageView.translatesAutoresizingMaskIntoConstraints = false
frameCountLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
imageView.topAnchor.constraint(equalTo: topAnchor),
imageView.leadingAnchor.constraint(equalTo: leadingAnchor),
imageView.trailingAnchor.constraint(equalTo: trailingAnchor),
imageView.bottomAnchor.constraint(equalTo: bottomAnchor),
frameCountLabel.topAnchor.constraint(equalTo: topAnchor, constant: 8),
frameCountLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 8),
frameCountLabel.widthAnchor.constraint(equalToConstant: 40),
frameCountLabel.heightAnchor.constraint(equalToConstant: 20)
])
}
}
class PlayControllerVideo: UIViewController, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout {
var collectionView: UICollectionView!
var fetchResult: PHFetchResult<PHAsset>!
var photos: [UIImage] = []
var mediaSelectedHandler: ((AVAsset) -> Void)?
override func viewDidLoad() {
super.viewDidLoad()
setupCollectionView()
fetchPhotos()
collectionView.dataSource = self
collectionView.delegate = self
}
private func setupCollectionView() {
let layout = UICollectionViewFlowLayout()
layout.minimumLineSpacing = 10
layout.minimumInteritemSpacing = 10
collectionView = UICollectionView(frame: view.bounds, collectionViewLayout: layout)
collectionView.backgroundColor = .white
collectionView.register(PhotoCell2.self, forCellWithReuseIdentifier: "PhotoCell2")
view.addSubview(collectionView)
collectionView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
collectionView.topAnchor.constraint(equalTo: view.topAnchor),
collectionView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
collectionView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
collectionView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
])
}
// PHAsset.fetchAssets(with: .image, options: fetchOptions)
// PHAsset.fetchAssets(with: fetchOptions)
func fetchPhotos() {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions)
for index in 0..<fetchResult.count {
let asset = fetchResult.object(at: index)
if asset.mediaType == .video {
let requestOptions = PHVideoRequestOptions()
requestOptions.isNetworkAccessAllowed = true
PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions) { [self] (avAsset, _, _) in
if let avAsset = avAsset {
Task {
do {
let isSS = isSpatialVideo(asset: avAsset)
print(isSS)
if(isSS == true){
//
let generator = AVAssetImageGenerator(asset: avAsset)
generator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 0, preferredTimescale: 1)
let imageRef = try await generator.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: imageRef)
self.photos.append(thumbnail)
collectionView.reloadData()
}
} catch {
//
}
}
}
}
}
}
}
//
func isSpatialVideo(asset: AVAsset) -> Bool {
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
let isSpatialVideo = metadata.contains { item in
if let identifier = item.identifier?.rawValue {
return identifier == "mdta/com.apple.quicktime.spatial.format-version"
}
return false
}
return isSpatialVideo
}
func isSSVideo(asset:AVAsset)async throws->Bool{
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
return false
}
return true
}
// MARK: - UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return photos.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "PhotoCell2", for: indexPath) as! PhotoCell2
cell.imageView.image = photos[indexPath.item]
let frameCount = getFrameCount(for: indexPath.item)
cell.frameCountLabel.isHidden = frameCount <= 1
cell.frameCountLabel.text = "\(frameCount)"
return cell
}
// MARK: - UICollectionViewDelegateFlowLayout
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
let width = collectionView.bounds.width / 3 - 10
return CGSize(width: width, height: width)
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumLineSpacingForSectionAt section: Int) -> CGFloat {
return 10
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumInteritemSpacingForSectionAt section: Int) -> CGFloat {
return 10
}
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
//
let asset = fetchResult.object(at: indexPath.item)
let requestOptions = PHVideoRequestOptions()
requestOptions.isNetworkAccessAllowed = true
PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions) { [self] (avAsset, _, _) in
mediaSelectedHandler!(avAsset!)
DispatchQueue.main.async {
self.dismiss(animated: true, completion: nil)
}
}
}
func playVideo(asset: PHAsset) {
PHImageManager.default().requestPlayerItem(forVideo: asset, options: nil) { [weak self] (playerItem, _) in
guard let playerItem = playerItem else { return }
DispatchQueue.main.async {
let playerViewController = AVPlayerViewController()
playerViewController.player = AVPlayer(playerItem: playerItem)
self?.present(playerViewController, animated: true) {
playerViewController.player?.play()
}
}
}
}
private func getFrameCount(for index: Int) -> Int {
let asset = fetchResult.object(at: index)
if let imageData = getImageData(for: asset) {
if let cgImageSource = CGImageSourceCreateWithData(imageData as CFData, nil) {
return CGImageSourceGetCount(cgImageSource)
}
}
return 0
}
private func getImageData(for asset: PHAsset) -> Data? {
var imageData: Data?
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (data, _, _, _) in
imageData = data
}
return imageData
}
func convertCGImageToCFData(cgImage: CGImage) -> CFData? {
let data = CFDataCreateMutable(kCFAllocatorDefault, 0)
if let data = data {
if let destination = CGImageDestinationCreateWithData(data, kUTTypePNG, 1, nil) {
CGImageDestinationAddImage(destination, cgImage, nil)
CGImageDestinationFinalize(destination)
}
}
return data
}
}

View File

@ -0,0 +1,52 @@
//
// SceneDelegate.swift
// tdvideo
//
// Created by aaa on 2024/1/19.
//
import UIKit
class SceneDelegate: UIResponder, UIWindowSceneDelegate {
var window: UIWindow?
func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) {
// Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`.
// If using a storyboard, the `window` property will automatically be initialized and attached to the scene.
// This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead).
guard let _ = (scene as? UIWindowScene) else { return }
}
func sceneDidDisconnect(_ scene: UIScene) {
// Called as the scene is being released by the system.
// This occurs shortly after the scene enters the background, or when its session is discarded.
// Release any resources associated with this scene that can be re-created the next time the scene connects.
// The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead).
}
func sceneDidBecomeActive(_ scene: UIScene) {
// Called when the scene has moved from an inactive state to an active state.
// Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive.
}
func sceneWillResignActive(_ scene: UIScene) {
// Called when the scene will move from an active state to an inactive state.
// This may occur due to temporary interruptions (ex. an incoming phone call).
}
func sceneWillEnterForeground(_ scene: UIScene) {
// Called as the scene transitions from the background to the foreground.
// Use this method to undo the changes made on entering the background.
}
func sceneDidEnterBackground(_ scene: UIScene) {
// Called as the scene transitions from the foreground to the background.
// Use this method to save data, release shared resources, and store enough scene-specific state information
// to restore the scene back to its current state.
}
}

View File

@ -0,0 +1,421 @@
//
// SpatialVideoConverter.swift
// vp
//
// Created by soldoros on 2024/1/17.
//
import AVFoundation
import CoreImage
import Foundation
import Observation
import VideoToolbox
///
class SpatialVideoConverter:NSObject {
// MARK: - Properties
// MARK: Public
///
var totalFrames: Double = .zero
///
var framesProcessed: Double = 0.0
///
var timeRemaining: Double = .zero
///
var startTime: Date = .now
///使
var isProcessing = false
///
var lastConvertedFileURL: LastConvertedFile?
///
var leftEyeImage: CVPixelBuffer?
///
var rightEyeImage: CVPixelBuffer?
// MARK: Private
///
private let processor = FrameProcessor()
///
private var writer: AVAssetWriter?
///avassetwwriter
private let videoInputQueue = DispatchQueue(label: "com.test.spatialWriterVideo")
///avassetwwriter
private let audioInputQueue = DispatchQueue(label: "com.test.spatialWriterAudio")
///avassetwwriter
private var writerVideoInput: AVAssetWriterInput?
///avassetwwriter
private var writerAudioInput: AVAssetWriterInput?
///
private var heroReader: AVAssetReader?
/// AVAssetReader
private var readerVideoOutput: AVAssetReaderTrackOutput?
/// AVAssetReader
private var readerAudioOutput: AVAssetReaderTrackOutput?
///
private var videoWritingFinished = false
///
private var audioWritingFinished = false
///()
private var dateFormatter: DateComponentsFormatter {
let formatter = DateComponentsFormatter()
formatter.allowedUnits = [.day, .hour, .minute, .second]
formatter.unitsStyle = .abbreviated
return formatter
}
///()
private var byteCountFormatter: ByteCountFormatter {
let formatter = ByteCountFormatter()
formatter.allowedUnits = [.useGB, .useMB, .useKB]
formatter.countStyle = .file
return formatter
}
// MARK: - Methods
// MARK: Public
///
/// -:
/// - sourceVideoURL:URLAVFoundationH.264H.265ProRes
/// - outputVideoURL:URL
func convertStereoscopicVideoToSpatialVideo(
sourceVideoURL: URL,
outputVideoURL: URL,
progress: ((Float)->())? = nil
) async throws {
let heroAsset = AVAsset(url: sourceVideoURL)
//
try removeExistingFile(at: outputVideoURL)
writer = try AVAssetWriter(outputURL: outputVideoURL, fileType: .mov)
guard let videoTrack = try await heroAsset.loadTracks(withMediaType: .video).first else {
return
}
let audioTrack = try await heroAsset.loadTracks(withMediaType: .audio).first
guard let videoFormatDescription = try await videoTrack.load(.formatDescriptions).first else {
return
}
if !processor.isPrepared {
processor.prepare(with: videoFormatDescription, outputRetainedBufferCountHint: 1)
}
//
let naturalSize = try await videoTrack.load(.naturalSize)
let leftEyeRegion = CGRect(
x: 0,
y: 0,
width: naturalSize.width / 2,
height: naturalSize.height
)
let rightEyeRegion = CGRect(
x: naturalSize.width / 2,
y: 0,
width: naturalSize.width / 2,
height: naturalSize.height
)
//
let frameRate = try await videoTrack.load(.nominalFrameRate)
let dataRate = try await videoTrack.load(.estimatedDataRate)
let duration = try await heroAsset.load(.duration)
let frames = CMTimeGetSeconds(duration) * Double(frameRate)
totalFrames = frames
// TODO:
// TODO:
//
var videoSettings = AVOutputSettingsAssistant(preset: .mvhevc1440x1440)?.videoSettings
videoSettings?[AVVideoWidthKey] = leftEyeRegion.width
videoSettings?[AVVideoHeightKey] = leftEyeRegion.height
var compressionProperties = videoSettings?[AVVideoCompressionPropertiesKey] as! [String: Any]
compressionProperties[AVVideoAverageBitRateKey] = dataRate
compressionProperties[kVTCompressionPropertyKey_HorizontalDisparityAdjustment as String] = 0
compressionProperties[kCMFormatDescriptionExtension_HorizontalFieldOfView as String] = 90
videoSettings?[AVVideoCompressionPropertiesKey] = compressionProperties
//
writerVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
guard let writerVideoInput else { return }
writerVideoInput.expectsMediaDataInRealTime = false
//
if audioTrack != nil {
writerAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
}
//
let pixelBufferAdaptor = AVAssetWriterInputTaggedPixelBufferGroupAdaptor(
assetWriterInput: writerVideoInput,
sourcePixelBufferAttributes: .none)
//()
//
let readerOutputSettings: [String:Any] = [
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA),
kCVPixelBufferWidthKey as String: naturalSize.width,
kCVPixelBufferHeightKey as String: naturalSize.height
]
readerVideoOutput = AVAssetReaderTrackOutput(
track: videoTrack,
outputSettings: readerOutputSettings)
if let audioTrack {
readerAudioOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
}
heroReader = try AVAssetReader(asset: heroAsset)
//
guard let heroReader,
let readerVideoOutput
else { return }
heroReader.add(readerVideoOutput)
if let readerAudioOutput {
heroReader.add(readerAudioOutput)
}
heroReader.startReading()
//
guard let writer else { return }
writer.add(writerVideoInput)
if let writerAudioInput {
writer.add(writerAudioInput)
}
writer.startWriting()
writer.startSession(atSourceTime: .zero)
//
isProcessing = true
//
//
startTime = Date.now
writerVideoInput.requestMediaDataWhenReady(on: videoInputQueue) { [weak self] in
guard let self else { return }
while writerVideoInput.isReadyForMoreMediaData {
autoreleasepool {
guard self.processor.isPrepared else {
print("The processor is not prepared. Cannot write video")
return
}
if let frame = readerVideoOutput.copyNextSampleBuffer(),
let frameBuffer = CMSampleBufferGetImageBuffer(frame) {
let sourceBuffer = CIImage(cvImageBuffer: frameBuffer)
// Setup the left and right eye `CVPixelBuffer` references.
guard let leftEye = self.processor.cropPixelBuffer(
pixelBufferImage: sourceBuffer,
targetRect: leftEyeRegion
),
let rightEye = self.processor.cropPixelBuffer(
pixelBufferImage: sourceBuffer,
targetRect: rightEyeRegion
)
else { return }
// Set a video preview
self.leftEyeImage = leftEye
self.rightEyeImage = rightEye
// Create an array of `CMTaggedBuffers, one for each eye's view.
let taggedBuffers: [CMTaggedBuffer] = [
.init(tags: [.videoLayerID(0), .stereoView(.leftEye)], pixelBuffer: leftEye),
.init(tags: [.videoLayerID(1), .stereoView(.rightEye)], pixelBuffer: rightEye)
]
let didAppend = pixelBufferAdaptor.appendTaggedBuffers(
taggedBuffers,
withPresentationTime: frame.presentationTimeStamp
)
if !didAppend {
print("Failed to append frame.")
}
// Increment the number of frames processed.
if self.framesProcessed < (self.totalFrames - 1) {
self.framesProcessed += 1
}
// Calculate the estimated time remaining based on how long this frame took to process.
self.calculateTimeRemaining()
progress?( Float(self.framesProcessed)/Float(self.totalFrames))
} else {
if !self.videoWritingFinished {
sourceVideoURL.stopAccessingSecurityScopedResource()
self.videoWritingFinished.toggle()
writerVideoInput.markAsFinished()
self.stop(with: outputVideoURL)
}
}
}
}
}
//
if let writerAudioInput,
let readerAudioOutput {
writerAudioInput.requestMediaDataWhenReady(on: audioInputQueue) { [weak self] in
guard let self else { return }
while writerAudioInput.isReadyForMoreMediaData {
autoreleasepool {
if let sample = readerAudioOutput.copyNextSampleBuffer() {
writerAudioInput.append(sample)
} else {
if !self.audioWritingFinished {
self.audioWritingFinished.toggle()
writerAudioInput.markAsFinished()
self.stop(with: outputVideoURL)
}
}
}
}
}
}
}
///
///
/// -expectedOutputURL:' URL '
///
func cancel(expectedOutputURL: URL) {
writerVideoInput?.markAsFinished()
writerAudioInput?.markAsFinished()
writer?.cancelWriting()
try? removeExistingFile(at: expectedOutputURL)
resetWriter()
}
// MARK: - Private
///URL
/// -outputVideoURL:URL
private func removeExistingFile(at outputVideoURL: URL) throws {
try FileManager.default.removeItem(atPath: outputVideoURL.path)
}
///
private func calculateTimeRemaining() {
let totalTimeElapsed = Date.now.timeIntervalSince1970 - startTime.timeIntervalSince1970
let totalFramesCompleted = framesProcessed
let averageTimeBetweenFrames = totalTimeElapsed / totalFramesCompleted
let estimatedTimeRemaining = averageTimeBetweenFrames * (totalFrames - totalFramesCompleted)
guard self.timeRemaining != 0 else {
self.timeRemaining = estimatedTimeRemaining
return
}
if estimatedTimeRemaining < self.timeRemaining + 100 {
self.timeRemaining = estimatedTimeRemaining
}
}
///
///outputURL:URL
private func stop(with outputURL: URL) {
guard isProcessing,
let writerVideoInput,
videoWritingFinished
else { return }
if let writerAudioInput {
guard audioWritingFinished else { return }
}
self.writer?.finishWriting { [weak self] in
guard let self else {return}
Task {
try? await self.saveLastConvertedFile(outputURL: outputURL)
outputURL.stopAccessingSecurityScopedResource()
self.resetWriter()
}
print("finished writing")
}
}
///
///
private func resetWriter() {
isProcessing = false
self.totalFrames = 0
self.framesProcessed = 0
self.timeRemaining = 0
self.startTime = .now
self.writerVideoInput = nil
self.writerAudioInput = nil
self.writer = nil
self.readerVideoOutput = nil
self.readerAudioOutput = nil
self.heroReader = nil
self.videoWritingFinished = false
self.audioWritingFinished = false
}
///
///访
/// -outputURL:' URL '
private func saveLastConvertedFile(outputURL: URL) async throws {
do {
let attr = try FileManager.default.attributesOfItem(atPath: outputURL.path)
let fileSize = attr[FileAttributeKey.size] as? Int64
let asset = AVAsset(url: outputURL)
let duration = try await asset.load(.duration)
self.lastConvertedFileURL = LastConvertedFile(
filePath: outputURL,
timeToProcess: dateFormatter.string(from: startTime, to: Date.now) ?? "Unknown",
fileSize: byteCountFormatter.string(fromByteCount: fileSize ?? 0),
duration: dateFormatter.string(from: duration.seconds) ?? "Unknown"
)
} catch {
print("Error: \(error)")
}
}
}
///
struct LastConvertedFile: Codable, Equatable {
///
let filePath: URL
///
let timeToProcess: String
///
let fileSize: String
///
let duration: String
}

View File

@ -0,0 +1,124 @@
//
// SpatialVideoWriter.swift
// tdvideo
//
// Created by mac on 2024/2/22.
//
import UIKit
import AVFoundation
import VideoToolbox
import Photos
class SpatialVideoWriter {
private func removeExistingFile(at outputVideoURL: URL) throws {
do {
try FileManager.default.removeItem(atPath: outputVideoURL.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
}
func writeSpatialVideo(leftEyeVideoURL: URL, rightEyeVideoURL: URL, outputVideoURL: URL, completion: @escaping (Bool, Error?) -> Void) {
do {
try removeExistingFile(at: outputVideoURL)
let leftEyeAsset = AVURLAsset(url: leftEyeVideoURL)
let rightEyeAsset = AVURLAsset(url: rightEyeVideoURL)
let assetWriter = try AVAssetWriter(outputURL: outputVideoURL, fileType: .mov)
let leftVideoTrack = leftEyeAsset.tracks(withMediaType: .video).first!
let videoSettings: [String: Any] = [
AVVideoWidthKey: leftVideoTrack.naturalSize.width,
AVVideoHeightKey: leftVideoTrack.naturalSize.height,
AVVideoCodecKey: AVVideoCodecType.hevc,
AVVideoCompressionPropertiesKey: [
kVTCompressionPropertyKey_MVHEVCVideoLayerIDs: [0, 1] as CFArray,
kCMFormatDescriptionExtension_HorizontalFieldOfView: 90_000, // asset-specific, in thousandths of a degree
kVTCompressionPropertyKey_HorizontalDisparityAdjustment: 200, // asset-specific
]
]
let input = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
assetWriter.add(input)
let adaptor = AVAssetWriterInputTaggedPixelBufferGroupAdaptor(assetWriterInput: input)
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero)
let leftEyeReader = try AVAssetReader(asset: leftEyeAsset)
let rightEyeReader = try AVAssetReader(asset: rightEyeAsset)
let readerOutputSettings: [String:Any] = [
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA),
kCVPixelBufferWidthKey as String: leftVideoTrack.naturalSize.width,
kCVPixelBufferHeightKey as String: leftVideoTrack.naturalSize.height
]
let leftEyeOutput = AVAssetReaderTrackOutput(track: leftVideoTrack, outputSettings: readerOutputSettings)
let rightEyeOutput = AVAssetReaderTrackOutput(track: rightEyeAsset.tracks(withMediaType: .video).first!, outputSettings: readerOutputSettings)
leftEyeReader.add(leftEyeOutput)
rightEyeReader.add(rightEyeOutput)
leftEyeReader.startReading()
rightEyeReader.startReading()
while let leftBuffer = leftEyeOutput.copyNextSampleBuffer(),
let rightBuffer = rightEyeOutput.copyNextSampleBuffer() {
let time = Date().timeIntervalSince1970
print("获取了一帧" + String(time))
//
guard let leftFrameBuffer = CMSampleBufferGetImageBuffer(leftBuffer),
let rightFrameBuffer = CMSampleBufferGetImageBuffer(rightBuffer) else {
print("获取左右眼像素缓冲区失败")
return
}
// CMTaggedBuffer
let leftCVPixelBuffer = leftFrameBuffer as CVPixelBuffer
let rightCVPixelBuffer = rightFrameBuffer as CVPixelBuffer
let left = CMTaggedBuffer(tags: [.stereoView(.leftEye), .videoLayerID(0)], pixelBuffer: leftCVPixelBuffer)
let right = CMTaggedBuffer(tags: [.stereoView(.rightEye), .videoLayerID(1)], pixelBuffer: rightCVPixelBuffer)
while !adaptor.assetWriterInput.isReadyForMoreMediaData {
// writerInput
Thread.sleep(forTimeInterval: 0.1) //
}
adaptor.appendTaggedBuffers([left, right], withPresentationTime: leftBuffer.presentationTimeStamp)
}
//
print("完成写入")
input.markAsFinished()
outputVideoURL.stopAccessingSecurityScopedResource()
assetWriter.finishWriting { [self] in
print("可以保存")
completion(true, nil)
self.saveVideoToLibrary(videoURL: outputVideoURL, completion: completion)
}
} catch {
print("生成失败")
completion(false, error)
}
}
private func saveVideoToLibrary(videoURL: URL, completion: @escaping (Bool, Error?) -> Void) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL)
}) { success, error in
if success {
print("保存成功")
} else if let error = error {
print("保存失败")
}
}
}
}

View File

@ -0,0 +1,133 @@
//
// VideoConvertor.swift
// tdvideo
//
// Created by aaa on 2024/1/24.
//
import Foundation
import AVKit
import VideoToolbox
enum VideoReaderError : Error {
case invalidVideo
case notSpacialVideo
}
class VideoConvertor {
func convertVideo( inputFile : URL, outputFile: URL, progress: ((Float)->())? = nil ) async throws {
do {
try FileManager.default.removeItem(atPath: outputFile.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
// Load the AVAsset
let asset = AVAsset(url: inputFile)
let assetReader = try AVAssetReader(asset: asset)
//
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("该视频不是空间视频")
}
//()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)
//
let output = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader.add(output)
assetReader.startReading()
//
//
let vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height/2), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
let duration = try await asset.load(.duration)
// Based on code from https://www.finnvoorhees.com/words/reading-and-writing-spatial-video-with-avfoundation
while let nextSampleBuffer = output.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
let newpb = joinImages( leftImage: lciImage, rightImage: rciImage )
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
_ = vw!.add(image: newpb, presentationTime: time)
print( "Added frame at \(time)")
// callback with progress
progress?( Float(time.value)/Float(duration.value))
// This sleep is needed to stop memory blooming - keeps around 280Mb rather than spiraling up to 8+Gig!
try await Task.sleep(nanoseconds: 3_000_000)
}
}
_ = try await vw!.finish()
print( "status - \(assetReader.status)")
print( "status - \(assetReader.error?.localizedDescription ?? "None")")
print( "Finished")
}
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
else{throw VideoReaderError.invalidVideo}
let naturalSize = try await track.load(.naturalSize)
let naturalTransform = try await track.load(.preferredTransform)
let size = naturalSize.applying(naturalTransform)
return (naturalTransform, CGSize(width: abs(size.width), height: abs(size.height)) )
}
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
}

View File

@ -0,0 +1,302 @@
//
// VideoConvertor2.swift
// tdvideo
//
// Created by aaa on 2024/1/24.
//
//
//com.nsk.tdvideo
/*
let rr:AVAsynchronousCIImageFilteringRequest?
// AVAsynchronousCIImageFilteringRequest
let videoComposition = AVMutableVideoComposition(asset: asset) { request in
//
let time = request.compositionTime
// callback with progress
progress?(Float(time.value) / Float(duration.value))
}
videoComposition.renderSize = CGSize(width: Int(videoSize.width), height: Int(videoSize.height/2))
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
playerItem?.videoComposition = videoComposition
//
let nc:NotificationCenter = NotificationCenter.default
let noti:NSNotification = NSNotification.init(name: NSNotification.Name(rawValue: "upvideo"), object: nil)
nc.post(noti as Notification)
// Based on code from https://www.finnvoorhees.com/words/reading-and-writing-spatial-video-with-avfoundation
// CIScreenBlendMode:
// CIHardLightBlendMode: 使
// CILightenBlendMode:
// CIColorDodgeBlendMode: 使
// CIColorBurnBlendMode: 使
// CIDarkenBlendMode:
// CILinearDodgeBlendMode: 使线
// CIMultiplyBlendMode:
// CISourceOverCompositing:
*/
import Foundation
import AVKit
import VideoToolbox
import CoreImage
import ImageIO
class VideoConvertor2 {
///
var leftEyeImage: CVPixelBuffer?
///
var rightEyeImage: CVPixelBuffer?
//
var type = 0
func convertVideo( asset : AVAsset, outputFile: URL, progress: ((Float)->())? = nil ) async throws {
do {
try FileManager.default.removeItem(atPath: outputFile.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
let assetReader = try AVAssetReader(asset: asset)
// print("")
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("不是空间视频")
}
//()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)
//
//
let vw:VideoWriter?
if(type == 3){
//+
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
else{
// +
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height/2), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
//
let output = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader.add(output)
assetReader.startReading()
let duration = try await asset.load(.duration)
while let nextSampleBuffer = output.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
leftEyeImage = leftEyePixelBuffer
rightEyeImage = rightEyePixelBuffer
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
//
if(type == 2){
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
_ = vw!.add(image: newpb, presentationTime: time)
print( "Added frame at \(time)")
// callback with progress
progress?( Float(time.value)/Float(duration.value))
// This sleep is needed to stop memory blooming - keeps around 280Mb rather than spiraling up to 8+Gig!
try await Task.sleep(nanoseconds: 3_000_000)
}
//
if(type == 3){
//
let redColorMatrix: [CGFloat] = [
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 1.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let blueColorMatrix: [CGFloat] = [
1.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let redFilter = CIFilter(name: "CIColorMatrix")!
redFilter.setValue(lciImage, forKey: kCIInputImageKey)
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
let blueFilter = CIFilter(name: "CIColorMatrix")!
blueFilter.setValue(rciImage, forKey: kCIInputImageKey)
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
//
if let redOutputImage = redFilter.outputImage,
let blueOutputImage = blueFilter.outputImage {
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
let sharpenedFilter = CIFilter(name: "CISharpenLuminance")!
sharpenedFilter.setValue(compositeFilter.outputImage, forKey: kCIInputImageKey)
sharpenedFilter.setValue(2, forKey: kCIInputSharpnessKey)
// let colorControlsFilter = CIFilter(name: "CIColorControls")!
// colorControlsFilter.setValue(sharpenedFilter.outputImage, forKey: kCIInputImageKey)
// colorControlsFilter.setValue(0.7, forKey: kCIInputSaturationKey)
let lastImg = sharpenedFilter.outputImage!
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
_ = vw!.add(image: lastImg, presentationTime: time)
print( "Added frame at \(time)")
// callback with progress
progress?( Float(time.value)/Float(duration.value))
// This sleep is needed to stop memory blooming - keeps around 280Mb rather than spiraling up to 8+Gig!
try await Task.sleep(nanoseconds: 3_000_000)
}
}
//
if(type == 4){
let filter1 = CIFilter(name: "CIGaussianBlur")!
filter1.setValue(lciImage, forKey: kCIInputImageKey)
let filter2 = CIFilter(name: "CIGaussianBlur")!
filter2.setValue(rciImage, forKey: kCIInputImageKey)
let newpb = joinImages( leftImage: filter1.outputImage!, rightImage:filter2.outputImage! )
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
_ = vw!.add(image: newpb, presentationTime: time)
print( "Added frame at \(time)")
// callback with progress
progress?( Float(time.value)/Float(duration.value))
// This sleep is needed to stop memory blooming - keeps around 280Mb rather than spiraling up to 8+Gig!
try await Task.sleep(nanoseconds: 3_000_000)
}
}
}
print( "status - \(assetReader.status)")
print( "status - \(assetReader.error?.localizedDescription ?? "None")")
print( "Finished")
_ = try await vw!.finish()
}
//ciimage
func isSpatialImage2(from ciImage: CIImage) {
let context = CIContext()
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
return
}
let dataProvider = CGDataProvider(data: cgImage.dataProvider!.data! as CFData)
let imageSource = CGImageSourceCreateWithDataProvider(dataProvider!, nil)
let frameCount = CGImageSourceGetCount(imageSource!)
print(frameCount)
for index in 0..<frameCount {
let properties = CGImageSourceCopyPropertiesAtIndex(imageSource!, index, nil) as? [CFString: Any]
print(properties as Any)
guard let frameImage = CGImageSourceCreateImageAtIndex(imageSource!, index, nil) else {
continue
}
print(frameImage)
}
}
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
else{throw VideoReaderError.invalidVideo}
let naturalSize = try await track.load(.naturalSize)
let naturalTransform = try await track.load(.preferredTransform)
let size = naturalSize.applying(naturalTransform)
return (naturalTransform, CGSize(width: abs(size.width), height: abs(size.height)) )
}
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
}

View File

@ -0,0 +1,89 @@
//
// VideoConvertor3.swift
// tdvideo
//
// Created by mac on 2024/2/18.
//
import Foundation
import AVKit
import VideoToolbox
import CoreImage
import ImageIO
class VideoConvertor3 {
///
var datas:NSMutableArray = NSMutableArray()
func convertVideo( asset : AVAsset, progress: ((Float)->())? = nil ) async throws {
let assetReader = try AVAssetReader(asset: asset)
//
let output = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader.add(output)
assetReader.startReading()
while let nextSampleBuffer = output.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
let presentationTime = CMSampleBufferGetPresentationTimeStamp(nextSampleBuffer)
let dataDic = ["time":presentationTime,"left":lciImage,"right":rciImage] as [String : Any]
print("解码")
datas.add(dataDic)
}
}
print( "status - \(assetReader.status)")
print( "status - \(assetReader.error?.localizedDescription ?? "None")")
print( "解码完成")
progress?(0.5)
}
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
return CIImage(cgImage: image.cgImage!)
}
}

View File

@ -0,0 +1,125 @@
//
// VideoConvertor4.swift
// tdvideo
//
// Created by mac on 2024/2/22.
//
import UIKit
import Foundation
import AVKit
import VideoToolbox
import CoreImage
import ImageIO
class VideoConvertor4 {
///
var leftEyeImage: CVPixelBuffer?
///
var rightEyeImage: CVPixelBuffer?
func convertVideo( asset : AVAsset, outputFile: URL, progress: ((Float)->())? = nil ) async throws {
do {
try FileManager.default.removeItem(atPath: outputFile.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
let assetReader = try AVAssetReader(asset: asset)
// print("")
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("不是空间视频")
}
//()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)
//
//
let vw:VideoWriter?
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
//
let output = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader.add(output)
assetReader.startReading()
let duration = try await asset.load(.duration)
while let nextSampleBuffer = output.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
leftEyeImage = leftEyePixelBuffer
rightEyeImage = rightEyePixelBuffer
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
compositeFilter.setValue(lciImage, forKey: kCIInputImageKey)
compositeFilter.setValue(rciImage, forKey: kCIInputBackgroundImageKey)
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
_ = vw!.add(image: compositeFilter.outputImage!, presentationTime: time)
print( "Added frame at \(time)")
progress?( Float(time.value)/Float(duration.value))
try await Task.sleep(nanoseconds: 3_000_000)
}
}
print( "status - \(assetReader.status)")
print( "status - \(assetReader.error?.localizedDescription ?? "None")")
print( "Finished")
_ = try await vw!.finish()
}
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
else{throw VideoReaderError.invalidVideo}
let naturalSize = try await track.load(.naturalSize)
let naturalTransform = try await track.load(.preferredTransform)
let size = naturalSize.applying(naturalTransform)
return (naturalTransform, CGSize(width: abs(size.width), height: abs(size.height)) )
}
}

View File

@ -0,0 +1,35 @@
//
// VideoFile.swift
// vp
//
// Created by soldoros on 2024/1/17.
//
import SwiftUI
import UniformTypeIdentifiers
///' FileDocument 'SwiftUI' fileexporters '
struct VideoFile: FileDocument {
static var readableContentTypes: [UTType] = [.movie, .quickTimeMovie, .mpeg4Movie]
// empty data
var data: Data
init() {
self.data = Data()
}
init(configuration: ReadConfiguration) throws {
if let readData = configuration.file.regularFileContents {
data = readData
} else {
data = Data()
}
}
func fileWrapper(configuration: WriteConfiguration) throws -> FileWrapper {
FileWrapper(regularFileWithContents: data)
}
}

View File

@ -0,0 +1,7 @@
//
// VideoPlayer.swift
// tdvideo
//
// Created by mac on 2024/2/18.
//

View File

@ -0,0 +1,40 @@
//
// VideoPreview.swift
// vp
//
// Created by soldoros on 2024/1/17.
//
import AVFoundation
import MetalKit
import SwiftUI
//
//// Swift
//struct VideoPreviewView: NSViewRepresentable {
// //
// var leftEyePreviewImage: CVPixelBuffer
//
// //
// var rightEyePreviewImage: CVPixelBuffer
//
// // Metal
// func makeNSView(context: Context) -> MetalPlayer {
// let ciImage = CIImage(cvPixelBuffer: leftEyePreviewImage)
// let frame = CGRect(
// x: 0,
// y: 0,
// width: ciImage.extent.width * 2,
// height: ciImage.extent.height
// )
// return MetalPlayer(frame: frame)
// }
//
// // Metal
// func updateNSView(_ nsView: MetalPlayer, context: Context) {
// nsView.render(
// leftPixelBuffer: leftEyePreviewImage,
// rightPixelBuffer: rightEyePreviewImage
// )
// }
//}

View File

@ -0,0 +1,161 @@
//
// VideoWriter.swift
// SpacialVideoConvertor
//
// Created by Andy Qua on 04/01/2024.
//
// Based on code from xaphod/VideoWriter.swift - https://gist.github.com/xaphod/de83379cc982108a5b38115957a247f9
//
import Foundation
import AVFoundation
import CoreImage
class VideoWriter {
fileprivate var writer: AVAssetWriter
fileprivate var writerInput: AVAssetWriterInput
fileprivate var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor
fileprivate let queue: DispatchQueue
static var ciContext = CIContext.init() // we reuse a single context for performance reasons
let pixelSize: CGSize
var lastPresentationTime: CMTime?
init?(url: URL, width: Int, height: Int, orientation: CGAffineTransform, sessionStartTime: CMTime, isRealTime: Bool, queue: DispatchQueue) {
print("VideoWriter init: width=\(width) height=\(height), url=\(url)")
self.queue = queue
let outputSettings: [String:Any] = [
AVVideoCodecKey : AVVideoCodecType.h264, // or .hevc if you like
AVVideoWidthKey : width,
AVVideoHeightKey: height,
]
self.pixelSize = CGSize.init(width: width, height: height)
let input = AVAssetWriterInput.init(mediaType: .video, outputSettings: outputSettings)
input.expectsMediaDataInRealTime = isRealTime
input.transform = orientation
guard
let writer = try? AVAssetWriter.init(url: url, fileType: .mp4),
writer.canAdd(input),
sessionStartTime != .invalid
else {
return nil
}
let sourceBufferAttributes: [String:Any] = [
String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_32ARGB, // yes, ARGB is right here for images...
String(kCVPixelBufferWidthKey) : width,
String(kCVPixelBufferHeightKey) : height,
]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor.init(assetWriterInput: input, sourcePixelBufferAttributes: sourceBufferAttributes)
self.pixelBufferAdaptor = pixelBufferAdaptor
writer.add(input)
writer.startWriting()
writer.startSession(atSourceTime: sessionStartTime)
if let error = writer.error {
NSLog("VideoWriter init: ERROR - \(error)")
return nil
}
self.writer = writer
self.writerInput = input
}
func add(image: CIImage, presentationTime: CMTime) -> Bool {
if self.writerInput.isReadyForMoreMediaData == false {
return false
}
if self.pixelBufferAdaptor.appendPixelBufferForImage(image, presentationTime: presentationTime) {
self.lastPresentationTime = presentationTime
return true
}
return false
}
func add(buffer: CVPixelBuffer, presentationTime: CMTime) -> Bool {
if self.writerInput.isReadyForMoreMediaData == false {
return false
}
if self.pixelBufferAdaptor.append(buffer, withPresentationTime: presentationTime) {
self.lastPresentationTime = presentationTime
return true
}
return false
}
func add(sampleBuffer: CMSampleBuffer) -> Bool {
if self.writerInput.isReadyForMoreMediaData == false {
print("VideoWriter: not ready for more data")
return false
}
if self.writerInput.append(sampleBuffer) {
self.lastPresentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
return true
}
return false
}
func finish() async throws -> AVAsset? {
writerInput.markAsFinished()
print("VideoWriter: calling writer.finishWriting()")
await writer.finishWriting()
if self.writer.status != .completed {
print("VideoWriter finish: error in finishWriting - \(self.writer.error?.localizedDescription ?? "Unknown")")
return nil
}
let asset = AVURLAsset.init(url: self.writer.outputURL, options: [AVURLAssetPreferPreciseDurationAndTimingKey : true])
let duration = try await CMTimeGetSeconds( asset.load(.duration) )
// can check for minimum duration here (ie. consider a failure if too short)
print("VideoWriter: finishWriting() complete, duration=\(duration)")
return asset
}
}
extension AVAssetWriterInputPixelBufferAdaptor {
func appendPixelBufferForImage(_ image: CIImage, presentationTime: CMTime) -> Bool {
var appendSucceeded = false
autoreleasepool {
guard let pixelBufferPool = self.pixelBufferPool else {
print("appendPixelBufferForImage: ERROR - missing pixelBufferPool") // writer can have error: writer.error=\(String(describing: self.writer.error))
return
}
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity: 1)
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
pixelBufferPool,
pixelBufferPointer
)
if let pixelBuffer = pixelBufferPointer.pointee, status == 0 {
pixelBuffer.fillPixelBufferFromImage(image)
appendSucceeded = self.append(pixelBuffer, withPresentationTime: presentationTime)
if !appendSucceeded {
// If a result of NO is returned, clients can check the value of AVAssetWriter.status to determine whether the writing operation completed, failed, or was cancelled. If the status is AVAssetWriterStatusFailed, AVAsset.error will contain an instance of NSError that describes the failure.
print("VideoWriter appendPixelBufferForImage: ERROR appending")
}
pixelBufferPointer.deinitialize(count: 1)
} else {
print("VideoWriter appendPixelBufferForImage: ERROR - Failed to allocate pixel buffer from pool, status=\(status)") // -6680 = kCVReturnInvalidPixelFormat
}
pixelBufferPointer.deallocate()
}
return appendSucceeded
}
}
extension CVPixelBuffer {
func fillPixelBufferFromImage(_ image: CIImage) {
CVPixelBufferLockBaseAddress(self, [])
VideoWriter.ciContext.render(image, to: self)
CVPixelBufferUnlockBaseAddress(self, [])
}
}

View File

@ -0,0 +1,112 @@
//
// ViewController.swift
// tdvideo
//
// Created by aaa on 2024/1/19.
// https://www.finnvoorhees.com/words/reading-and-writing-spatial-video-with-avfoundation#reading-spatial-video-using-avassetreader
//import UIKit
//import AVKit
//import AVFoundation
//import CoreImage
//import Foundation
//import Observation
//import VideoToolbox
//import CoreMedia
import UIKit
class ViewController: UIViewController, UITableViewDelegate, UITableViewDataSource {
let options = ["视频转码", "图片转码", "设备投流","视频导出","普通图片合成空间图片","普通视频合成空间视频","拍摄空间图片","拍摄空间视频","边转边播"]
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.brown
let tableView = UITableView(frame: view.bounds, style: .plain)
tableView.delegate = self
tableView.dataSource = self
tableView.register(UITableViewCell.self, forCellReuseIdentifier: "Cell")
view.addSubview(tableView)
}
// MARK: - UITableViewDataSource
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return options.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath)
cell.textLabel?.text = options[indexPath.row]
return cell
}
// MARK: - UITableViewDelegate
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
tableView.deselectRow(at: indexPath, animated: true)
//
if(indexPath.row == 0){
let vc:ViewController2 = ViewController2()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 1){
let vc:PlayController = PlayController()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 2){
let vc = PlayContoller4()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 3){
let vc = PlayContoller6()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 4){
let vc = PlayContoller7()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 5){
let vc = PlayContoller10()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 6){
let vc = PlayContoller5()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 7){
let vc = PlayContoller9()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 8){
let vc = PlayContoller8()
self.present(vc, animated: true, completion: nil)
}
//
if(indexPath.row == 9){
let vc = PlayContoller11()
self.present(vc, animated: true, completion: nil)
}
}
}

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict/>
</plist>

View File

@ -0,0 +1,505 @@
//
// PlayController.swift
// tdvideo
//
// Created by aaa on 2024/1/25.
//
import Foundation
import AVKit
import VideoToolbox
import CoreVideo
import UIKit
import ImageIO
import CoreImage
import Photos
//
class PlayController: UIViewController {
var imgData:Data?
///
var leftEyeImage: CVPixelBuffer?
///
var rightEyeImage: CVPixelBuffer?
//
var lvjing = "CIGaussianBlur"
//
var type = 0
// var playerItem:AVPlayerItem?
var playerLay:AVPlayerLayer?
var player:AVPlayer = AVPlayer()
var btn3:UIButton?
var sourceVideoURL:URL?
var outputVideoURL:URL?
var playerLooper: AVPlayerLooper?
var mImgView: UIImageView?
//
// let makerAppleProperties = imageProperties["{HEIF}"]
func isSpatialImage(imageURL: URL) -> Bool {
guard let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil) else {
return false
}
guard let properties = CGImageSourceCopyPropertiesAtIndex(imageSource, 1, nil) as? [CFString: Any] else {
return false
}
print(properties)
/*
[ProfileName: sRGB IEC61966-2.1, {TIFF}: {
Orientation = 1;
TileLength = 512;
TileWidth = 512;
}, PixelWidth: 4032, PixelHeight: 3024, {HEIF}: {
CameraExtrinsics = {
CoordinateSystemID = 0;
Position = (
"-0.019238",
0,
0
);
Rotation = (
1,
0,
0,
0,
1,
0,
0,
0,
1
);
};
}, Depth: 8, Orientation: 1, ColorModel: RGB]
{HEIF}
*/
//gif
// let frameCount = CGImageSourceGetCount(imageSource)
// if(frameCount == 1){
// return false
// }
return true
}
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.brown
let path = Bundle.main.path(forResource: "img3", ofType: "HEIC")
sourceVideoURL = URL.init(filePath: path!)
outputVideoURL = URL.documentsDirectory.appending(path:"output11114.jpg")
let nsdata = NSData(contentsOf: sourceVideoURL!)
imgData = nsdata as? Data
let isSpatial = isSpatialImage(imageURL: sourceVideoURL!)
if !isSpatial {
print("这不是一张空间图片")
return
}
mImgView = UIImageView()
mImgView!.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 180)
self.view.addSubview(mImgView!)
let image = UIImage(contentsOfFile: sourceVideoURL!.path)
mImgView!.image = image
mImgView!.isUserInteractionEnabled = true
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(imageTapped(_:)))
mImgView!.addGestureRecognizer(tapGesture)
//
let segmentedControl = UISegmentedControl(items: ["空间照片", "平行眼", "交叉眼", "红蓝立体"])
//
segmentedControl.frame = CGRect(x: 20, y: 700, width: 360, height: 45)
//
segmentedControl.selectedSegmentIndex = 0
//
self.view.addSubview(segmentedControl)
segmentedControl.layer.borderWidth = 1.0 //
segmentedControl.layer.borderColor = UIColor.blue.cgColor //
segmentedControl.tintColor = UIColor.blue //
let normalTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.white]
let selectedTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.blue]
segmentedControl.setTitleTextAttributes(normalTextAttributes, for: .normal)
segmentedControl.setTitleTextAttributes(selectedTextAttributes, for: .selected)
//
segmentedControl.addTarget(self, action: #selector(segmentedControlValueChanged(_:)), for: .valueChanged)
}
@objc func imageTapped(_ sender: UITapGestureRecognizer) {
let vc:PlayControllerImg = PlayControllerImg()
self.present(vc, animated: true, completion: nil)
vc.mediaSelectedHandler = { [self]data in
print("回调")
print(data)
imgData = data
let image = UIImage(data: imgData!)
mImgView!.image = image
}
}
//"", "", "", ""
@objc func segmentedControlValueChanged(_ sender: UISegmentedControl) {
//
let selectedIndex = sender.selectedSegmentIndex
print("选中了第 \(selectedIndex) 个选项")
player.pause()
NotificationCenter.default.removeObserver(self)
mImgView!.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 180)
// guard let imageSource = CGImageSourceCreateWithURL(sourceVideoURL! as CFURL, nil) else {
// return
// }
guard let imageSource = CGImageSourceCreateWithData(imgData! as CFData, nil) else {
return
}
print(imageSource)
let frameCount = CGImageSourceGetCount(imageSource)
var frames: [CGImage] = []
for index in 0..<frameCount {
guard let frameImage = CGImageSourceCreateImageAtIndex(imageSource, index, nil) else {
continue
}
frames.append(frameImage)
}
if(frames.count < 2){return}
let lciImage = CIImage(cgImage: frames.first!)
let rciImage = CIImage(cgImage: frames[1])
//
if(selectedIndex == 0){
let image = UIImage(contentsOfFile: sourceVideoURL!.path)
mImgView!.image = image
}
//
if(selectedIndex == 1){
mImgView!.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 130)
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
DispatchQueue.main.async { [weak self] in
self!.mImgView!.image = lastImg
}
}
//
if(selectedIndex == 2){
mImgView!.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 130)
let newpb = joinImages( leftImage:rciImage , rightImage:lciImage )
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
DispatchQueue.main.async { [weak self] in
self!.mImgView!.image = lastImg
}
}
//
if(selectedIndex == 3){
//
mImgView!.frame = CGRect.init(x: 0, y: 180, width: self.view.frame.size.width, height: 380)
let redColorMatrix: [CGFloat] = [
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.5, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let blueColorMatrix: [CGFloat] = [
0.5, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let redFilter = CIFilter(name: "CIColorMatrix")!
redFilter.setValue(lciImage, forKey: kCIInputImageKey)
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
let blueFilter = CIFilter(name: "CIColorMatrix")!
blueFilter.setValue(rciImage, forKey: kCIInputImageKey)
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
//
if let redOutputImage = redFilter.outputImage,
let blueOutputImage = blueFilter.outputImage {
// CIScreenBlendMode:
// CIHardLightBlendMode: 使
// CILightenBlendMode:
// CIColorDodgeBlendMode: 使
// CIColorBurnBlendMode: 使
// CIDarkenBlendMode:
// CILinearDodgeBlendMode: 使线
// CIMultiplyBlendMode:
// CISourceOverCompositing:
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
// let sharpenedFilter = CIFilter(name: "CISharpenLuminance")!
// sharpenedFilter.setValue(compositeFilter.outputImage, forKey: kCIInputImageKey)
// sharpenedFilter.setValue(2, forKey: kCIInputSharpnessKey)
// let colorControlsFilter = CIFilter(name: "CIColorControls")!
// colorControlsFilter.setValue(sharpenedFilter.outputImage, forKey: kCIInputImageKey)
// colorControlsFilter.setValue(0.7, forKey: kCIInputSaturationKey)
let lastImg = compositeFilter.outputImage!
DispatchQueue.main.async { [weak self] in
self!.mImgView!.image = UIImage(ciImage: lastImg)
}
}
}
}
func createCVPixelBuffer(from image: UIImage, with frame: CGRect) -> CVPixelBuffer? {
let options: [String: Any] = [
kCVPixelBufferCGImageCompatibilityKey as String: true,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
]
var pixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault,
Int(frame.width),
Int(frame.height),
kCVPixelFormatType_32BGRA,
options as CFDictionary,
&pixelBuffer)
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
return nil
}
CVPixelBufferLockBaseAddress(buffer, [])
let pixelData = CVPixelBufferGetBaseAddress(buffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(data: pixelData,
width: Int(frame.width),
height: Int(frame.height),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
space: colorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) else {
return nil
}
context.translateBy(x: -frame.origin.x, y: -frame.origin.y)
context.draw(image.cgImage!, in: CGRect(origin: .zero, size: image.size))
CVPixelBufferUnlockBaseAddress(buffer, [])
return buffer
}
//
func joinImages2( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
@objc func buttonPressed(sender:UIButton){
if(sender.tag == 10){
let vc:PlayContoller4 = PlayContoller4()
self.navigationController?.pushViewController(vc, animated: true)
}
}
func convertVideo( inputFile : URL, outputFile: URL, progress: ((Float)->())? = nil ) async throws {
do {
try FileManager.default.removeItem(atPath: outputFile.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
// Load the AVAsset
let asset = AVAsset(url: inputFile)
let assetReader = try AVAssetReader(asset: asset)
//
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("该视频不是空间视频")
}
//()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)
//
//
let vw:VideoWriter?
if(type == 3){
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
else{
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height/2), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
//
let output = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader.add(output)
assetReader.startReading()
let duration = try await asset.load(.duration)
if let playerItem = player.currentItem {
playerItem.videoComposition = AVVideoComposition(asset: playerItem.asset) { request in
print(request.sourceImage)
}
}
while let nextSampleBuffer = output.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
//
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
_ = vw!.add(image: newpb, presentationTime: time)
// print( "Added frame at \(time)")
progress?( Float(time.value)/Float(duration.value))
// try await Task.sleep(nanoseconds: 3_000_000)
}
}
_ = try await vw!.finish()
print( "Finished")
}
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
else{throw VideoReaderError.invalidVideo}
let naturalSize = try await track.load(.naturalSize)
let naturalTransform = try await track.load(.preferredTransform)
let size = naturalSize.applying(naturalTransform)
return (naturalTransform, CGSize(width: abs(size.width), height: abs(size.height)) )
}
func convertCIImageToUIImage(ciImage: CIImage) -> UIImage? {
let context = CIContext(options: nil)
if let cgImage = context.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage)
return uiImage
}
return nil
}
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
func pixelBuffer(from ciImage: CIImage) -> CVPixelBuffer? {
var pixelBuffer: CVPixelBuffer?
let attributes: [String: Any] = [
kCVPixelBufferCGImageCompatibilityKey as String: kCFBooleanTrue,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: kCFBooleanTrue
]
let width = Int(ciImage.extent.width)
let height = Int(ciImage.extent.height)
let status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, attributes as CFDictionary, &pixelBuffer)
if status == kCVReturnSuccess, let pixelBuffer = pixelBuffer {
let context = CIContext()
context.render(ciImage, to: pixelBuffer)
return pixelBuffer
}
return nil
}
}

View File

@ -0,0 +1,283 @@
//
// PlayController2.swift
// tdvideo
//
// Created by mac on 2024/2/1.
//
/*
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
let image = photos[indexPath.item]
let h = isSpatialImage(from: image)
self.navigationController?.popViewController(animated: true)
}
//
DispatchQueue.main.async { [self] in
collectionView.reloadData()
}
*/
import UIKit
import Photos
import ImageIO
import CoreFoundation
import UIKit
import Photos
import ImageIO
import CoreGraphics
import MobileCoreServices
import AVKit
class PhotoCell: UICollectionViewCell {
let imageView: UIImageView = {
let imageView = UIImageView()
imageView.contentMode = .scaleAspectFill
imageView.clipsToBounds = true
return imageView
}()
let frameCountLabel: UILabel = {
let label = UILabel()
label.textColor = .white
label.backgroundColor = .red
label.textAlignment = .center
label.font = UIFont.boldSystemFont(ofSize: 12)
label.layer.cornerRadius = 8
label.clipsToBounds = true
return label
}()
override init(frame: CGRect) {
super.init(frame: frame)
setupViews()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setupViews()
}
private func setupViews() {
addSubview(imageView)
addSubview(frameCountLabel)
imageView.translatesAutoresizingMaskIntoConstraints = false
frameCountLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
imageView.topAnchor.constraint(equalTo: topAnchor),
imageView.leadingAnchor.constraint(equalTo: leadingAnchor),
imageView.trailingAnchor.constraint(equalTo: trailingAnchor),
imageView.bottomAnchor.constraint(equalTo: bottomAnchor),
frameCountLabel.topAnchor.constraint(equalTo: topAnchor, constant: 8),
frameCountLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 8),
frameCountLabel.widthAnchor.constraint(equalToConstant: 40),
frameCountLabel.heightAnchor.constraint(equalToConstant: 20)
])
}
}
class PlayControllerImg: UIViewController, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout {
var collectionView: UICollectionView!
var fetchResult: PHFetchResult<PHAsset>!
var photos: [UIImage] = []
var mediaSelectedHandler: ((Data) -> Void)?
override func viewDidLoad() {
super.viewDidLoad()
setupCollectionView()
fetchPhotos()
collectionView.dataSource = self
collectionView.delegate = self
}
private func setupCollectionView() {
let layout = UICollectionViewFlowLayout()
layout.minimumLineSpacing = 10
layout.minimumInteritemSpacing = 10
collectionView = UICollectionView(frame: view.bounds, collectionViewLayout: layout)
collectionView.backgroundColor = .white
collectionView.register(PhotoCell.self, forCellWithReuseIdentifier: "PhotoCell")
view.addSubview(collectionView)
collectionView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
collectionView.topAnchor.constraint(equalTo: view.topAnchor),
collectionView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
collectionView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
collectionView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
])
}
// PHAsset.fetchAssets(with: .image, options: fetchOptions)
// PHAsset.fetchAssets(with: fetchOptions)
func fetchPhotos() {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
fetchResult = PHAsset.fetchAssets(with: .image, options: fetchOptions)
for index in 0..<fetchResult.count {
let asset = fetchResult.object(at: index)
if asset.mediaType == .image {
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (imageData, _, _, _) in
if let imageData = imageData {
if let image = UIImage(data: imageData) {
self.photos.append(image)
}
}
}
}
}
}
//
func isSpatialVideo(asset: AVAsset) -> Bool {
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
let isSpatialVideo = metadata.contains { item in
if let identifier = item.identifier?.rawValue {
return identifier == "mdta/com.apple.quicktime.spatial.format-version"
}
return false
}
return isSpatialVideo
}
func isSSVideo(asset:AVAsset)async throws->Bool{
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
return false
}
return true
}
// MARK: - UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return photos.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "PhotoCell", for: indexPath) as! PhotoCell
cell.imageView.image = photos[indexPath.item]
let frameCount = getFrameCount(for: indexPath.item)
cell.frameCountLabel.isHidden = frameCount <= 1
cell.frameCountLabel.text = "\(frameCount)"
return cell
}
// MARK: - UICollectionViewDelegateFlowLayout
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
let width = collectionView.bounds.width / 3 - 10
return CGSize(width: width, height: width)
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumLineSpacingForSectionAt section: Int) -> CGFloat {
return 10
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumInteritemSpacingForSectionAt section: Int) -> CGFloat {
return 10
}
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
//
let asset = fetchResult.object(at: indexPath.item)
//
if asset.mediaType == .image {
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (imageData, _, _, _) in
if let imageData = imageData {
self.mediaSelectedHandler?(imageData)
}
//
DispatchQueue.main.async {
self.dismiss(animated: true, completion: nil)
}
}
}
}
private func getMediaURL(from asset: PHAsset, completion: @escaping (URL) -> Void) {
if asset.mediaType == .video {
let requestOptions = PHVideoRequestOptions()
requestOptions.isNetworkAccessAllowed = true
PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions) { (avAsset, _, _) in
if let avAsset = avAsset as? AVURLAsset {
let mediaURL = avAsset.url
completion(mediaURL)
}
}
} else {
}
}
private func getFrameCount(for index: Int) -> Int {
let asset = fetchResult.object(at: index)
if let imageData = getImageData(for: asset) {
if let cgImageSource = CGImageSourceCreateWithData(imageData as CFData, nil) {
return CGImageSourceGetCount(cgImageSource)
}
}
return 0
}
private func getImageData(for asset: PHAsset) -> Data? {
var imageData: Data?
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (data, _, _, _) in
imageData = data
}
return imageData
}
func convertCGImageToCFData(cgImage: CGImage) -> CFData? {
let data = CFDataCreateMutable(kCFAllocatorDefault, 0)
if let data = data {
if let destination = CGImageDestinationCreateWithData(data, kUTTypePNG, 1, nil) {
CGImageDestinationAddImage(destination, cgImage, nil)
CGImageDestinationFinalize(destination)
}
}
return data
}
}

View File

@ -0,0 +1,299 @@
//
// ViewController2.swift
// tdvideo
//
// Created by mac on 2024/2/4.
//
import UIKit
import AVKit
import AVFoundation
import CoreImage
import Foundation
import Observation
import VideoToolbox
import CoreMedia
import MobileCoreServices
//
class ViewController2: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
// let convertor = VideoConvertor()
let convertor2 = VideoConvertor2()
let videoConverter = SpatialVideoConverter()
var metview:MetalPlayer?
var videoOriginalAsset:AVAsset?
var videoTempAsset:AVAsset?
var sourceVideoURL:URL?
var outputVideoURL:URL?
var playerLay:AVPlayerLayer?
var player:AVPlayer = AVPlayer()
var btn3:UIButton?
// var selectedIndex = 0
func startObserving() {
let nc = NotificationCenter.default
nc.addObserver(self, selector: #selector(handleNotification(_:)), name: NSNotification.Name(rawValue: "upvideo"), object: nil)
}
@objc func handleNotification(_ notification: Notification) {
//
if notification.name.rawValue == "upvideo" {
}
}
override func viewDidLoad() {
super.viewDidLoad()
// startObserving()
self.view.backgroundColor = UIColor.brown
///
// print(VTIsStereoMVHEVCEncodeSupported())
let btn1 = UIButton.init(frame: CGRect.init(x: 20, y: 120, width: 180, height: 50))
btn1.setTitle("从相册选择视频", for: UIControl.State.normal)
self.view.addSubview(btn1)
btn1.tag = 10
btn1.addTarget(self, action: #selector(buttonPressed(sender:)), for: UIControl.Event.touchUpInside)
//
// let btn2 = UIButton.init(frame: CGRect.init(x: 250, y: 120, width: 100, height: 50))
// btn2.setTitle("", for: UIControl.State.normal)
// self.view.addSubview(btn2)
// btn2.tag = 11
// btn2.addTarget(self, action: #selector(buttonPressed(sender:)), for: UIControl.Event.touchUpInside)
btn3 = UIButton.init(frame: CGRect.init(x: 150, y: 60, width: 180, height: 50))
btn3!.setTitle("进度=0.0", for: UIControl.State.normal)
self.view.addSubview(btn3!)
btn3!.tag = 12
let path = Bundle.main.path(forResource: "IMG_0071", ofType: "MOV")
sourceVideoURL = URL.init(filePath: path!)
outputVideoURL = URL.documentsDirectory.appending(path:"output1111.mp4")
videoOriginalAsset = AVAsset(url: sourceVideoURL!)
videoTempAsset = videoOriginalAsset
//
let segmentedControl = UISegmentedControl(items: ["立体视频","空间视频", "交叉眼", "红蓝立体","高斯模糊"])
//
segmentedControl.frame = CGRect(x: 20, y: 700, width: 360, height: 45)
//
segmentedControl.selectedSegmentIndex = 0
//
self.view.addSubview(segmentedControl)
segmentedControl.layer.borderWidth = 1.0 //
segmentedControl.layer.borderColor = UIColor.blue.cgColor //
segmentedControl.tintColor = UIColor.blue //
let normalTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.white]
let selectedTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.blue]
segmentedControl.setTitleTextAttributes(normalTextAttributes, for: .normal)
segmentedControl.setTitleTextAttributes(selectedTextAttributes, for: .selected)
//
segmentedControl.addTarget(self, action: #selector(segmentedControlValueChanged(_:)), for: .valueChanged)
playerLay = AVPlayerLayer()
playerLay!.backgroundColor = UIColor.black.cgColor
playerLay!.frame = CGRect.init(x: 10, y: 180, width: 350, height: 380)
self.view.layer.addSublayer(playerLay!)
playerLay!.cornerRadius = 6
play()
// AVPlayerVideoOutput()
// player.addPeriodicTimeObserver(
// forInterval: CMTime(value: 1, timescale: 30),
// queue: .main
// ) { _ in
//
//
// }
}
//
func play(){
let playerItem = AVPlayerItem(asset: videoTempAsset!)
playerLay!.player = AVPlayer(playerItem: playerItem)
playerLay!.player!.play()
}
@objc func segmentedControlValueChanged(_ sender: UISegmentedControl) {
//
let selectedIndex = sender.selectedSegmentIndex
print("选中了第 \(selectedIndex) 个选项")
player.pause()
NotificationCenter.default.removeObserver(self)
//
if(selectedIndex == 0){
videoTempAsset = videoOriginalAsset
play()
}
else{
outputVideoURL = URL.documentsDirectory.appending(path:"output11112.mp4")
}
//
if(selectedIndex == 1){
// Task {
// try await videoConverter.convertStereoscopicVideoToSpatialVideo(sourceVideoURL: sourceVideoURL!,outputVideoURL: outputVideoURL!){[weak self] progress in
// print(progress)
// DispatchQueue.main.async { [weak self] in
// self!.btn3!.setTitle("=" + String(progress), for: UIControl.State.normal)
// if(progress > 0.99){
// self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
// self!.play()
// }
// }
// }
// }
}
//
if(selectedIndex == 2){
Task {
convertor2.type = 2
try await convertor2.convertVideo(asset: videoTempAsset!, outputFile: outputVideoURL! ) { [self] progress in
print(progress)
DispatchQueue.main.async { [weak self] in
self!.btn3!.setTitle("进度=" + String(progress), for: UIControl.State.normal)
if(progress > 0.99){
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
self!.play()
}
}
}
}
}
//
if(selectedIndex == 3){
Task {
convertor2.type = 3
try await convertor2.convertVideo(asset: videoTempAsset!, outputFile: outputVideoURL! ) { [self] progress in
print(progress)
DispatchQueue.main.async { [weak self] in
self!.btn3!.setTitle("进度=" + String(progress), for: UIControl.State.normal)
if(progress > 0.99){
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
self!.play()
}
}
}
}
}
//
if(selectedIndex == 4){
Task {
convertor2.type = 4
try await convertor2.convertVideo(asset: videoTempAsset!, outputFile: outputVideoURL! ) { [self] progress in
print(progress)
DispatchQueue.main.async { [weak self] in
self!.btn3!.setTitle("进度=" + String(progress), for: UIControl.State.normal)
if(progress > 0.99){
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
self!.play()
}
}
}
}
}
}
@objc func buttonPressed(sender:UIButton){
if(sender.tag == 10){
// let imagePickerController = UIImagePickerController()
// imagePickerController.delegate = self
// imagePickerController.sourceType = .photoLibrary
// imagePickerController.allowsEditing = false
// imagePickerController.mediaTypes = [kUTTypeMovie as String]
// present(imagePickerController, animated: true, completion: nil)
let vc:PlayControllerVideo = PlayControllerVideo()
self.present(vc, animated: true, completion: nil)
vc.mediaSelectedHandler = { [self]ass in
print("回调")
print(ass)
videoTempAsset = ass
play()
}
} else{
}
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let mediaType = info[UIImagePickerController.InfoKey.mediaType] as? String, mediaType == "public.movie" {
let videoURL = info[.mediaURL] as? URL
print("Selected video URL: \(videoURL)")
sourceVideoURL = videoURL
videoOriginalAsset = AVAsset(url: sourceVideoURL!)
videoTempAsset = videoOriginalAsset
if(!isSpatialVideo(asset: videoTempAsset!)){
showTextAlert(title: "提示", message: "当前视频不是空间视频")
}
play()
}
dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
dismiss(animated: true, completion: nil)
}
//
func isSpatialVideo(asset: AVAsset) -> Bool {
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
let isSpatialVideo = metadata.contains { item in
if let identifier = item.identifier?.rawValue {
return identifier == "mdta/com.apple.quicktime.spatial.format-version"
}
return false
}
return isSpatialVideo
}
func showTextAlert(title: String, message: String) {
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
let okAction = UIAlertAction(title: "OK", style: .default, handler: nil)
alertController.addAction(okAction)
//
present(alertController, animated: true, completion: nil)
}
}