diff --git a/OrderScheduling.xcodeproj/project.pbxproj b/OrderScheduling.xcodeproj/project.pbxproj index e507b8d..5169ea4 100644 --- a/OrderScheduling.xcodeproj/project.pbxproj +++ b/OrderScheduling.xcodeproj/project.pbxproj @@ -35,6 +35,20 @@ 791887C62A84D9DF007EA0C1 /* DispatchOrderController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 791887C52A84D9DF007EA0C1 /* DispatchOrderController.swift */; }; 792EE0952AA74E0A00A212AB /* PushNotiCommonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 792EE0942AA74E0A00A212AB /* PushNotiCommonView.swift */; }; 792EE0972AA74E5800A212AB /* PushNotiCommonTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 792EE0962AA74E5800A212AB /* PushNotiCommonTool.swift */; }; + 7938A6502E3B51270017508A /* VehicleMonitorHistoryController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */; }; + 7938A8252E4055800017508A /* YFTimer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A81E2E4055800017508A /* YFTimer.m */; }; + 7938A8292E4055800017508A /* VideoPlayView.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8062E4055800017508A /* VideoPlayView.m */; }; + 7938A82B2E4055800017508A /* YFTimerManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8202E4055800017508A /* YFTimerManager.m */; }; + 7938A82C2E4055800017508A /* AAPLEAGLLayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A80E2E4055800017508A /* AAPLEAGLLayer.m */; }; + 7938A82E2E4055800017508A /* PCMStreamPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8162E4055800017508A /* PCMStreamPlayer.m */; }; + 7938A82F2E4055800017508A /* SRWebSocket.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8182E4055800017508A /* SRWebSocket.m */; }; + 7938A8312E4055800017508A /* g711.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8102E4055800017508A /* g711.m */; }; + 7938A8332E4055800017508A /* g726.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8122E4055800017508A /* g726.m */; }; + 7938A8352E4055800017508A /* H264DecodeTool.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8142E4055800017508A /* H264DecodeTool.m */; }; + 7938A83D2E4055D50017508A /* YFProgressHUD.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8372E4055D50017508A /* YFProgressHUD.m */; }; + 7938A83E2E4055D50017508A /* YLGIFImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8392E4055D50017508A /* YLGIFImage.m */; }; + 7938A83F2E4055D50017508A /* YLImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A83B2E4055D50017508A /* YLImageView.m */; }; + 7938A8452E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */; }; 7940277A2B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */; }; 7940277C2B3E9ECB00EC52D4 /* ConditionalSearchView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7940277B2B3E9ECB00EC52D4 /* ConditionalSearchView.swift */; }; 7940277E2B43B9B600EC52D4 /* ConditionalSearchTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7940277D2B43B9B600EC52D4 /* ConditionalSearchTool.swift */; }; @@ -60,7 +74,6 @@ 79CECC222A8A2A2900B95D8B /* VehicleMonitoringController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */; }; 79CECC242A8B16D400B95D8B /* VehicleMonitoringListController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */; }; 79CECC262A8C749B00B95D8B /* VehicleMonitorVideoController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */; }; - 79CECC282A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */; }; 79DD0DAA2A9481BC00768FE7 /* NotificationAuthTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DA92A9481BC00768FE7 /* NotificationAuthTool.swift */; }; 79DD0DB12A94B3DB00768FE7 /* EmptyView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DB02A94B3DB00768FE7 /* EmptyView.swift */; }; 79DD0DB42A95F00B00768FE7 /* Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DB32A95F00B00768FE7 /* Extension.swift */; }; @@ -68,6 +81,7 @@ 79E434252AA1919400AEB16C /* CommonAlertView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434242AA1919400AEB16C /* CommonAlertView.swift */; }; 79E434282AA1EFA500AEB16C /* SystemCall.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434272AA1EFA500AEB16C /* SystemCall.swift */; }; 79E4342A2AA5833F00AEB16C /* CustomPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434292AA5833F00AEB16C /* CustomPicker.swift */; }; + 79EA0A912E3753D100320195 /* VerticalLoopScrollLabel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */; }; 79EAD8142A7B86610036E093 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 79EAD8132A7B86610036E093 /* Assets.xcassets */; }; 79EAD8172A7B86610036E093 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 79EAD8152A7B86610036E093 /* LaunchScreen.storyboard */; }; 79FB75EC2A988EC000DB00A4 /* MessageCenterTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79FB75EB2A988EC000DB00A4 /* MessageCenterTool.swift */; }; @@ -145,6 +159,32 @@ 791887C52A84D9DF007EA0C1 /* DispatchOrderController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DispatchOrderController.swift; sourceTree = ""; }; 792EE0942AA74E0A00A212AB /* PushNotiCommonView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PushNotiCommonView.swift; sourceTree = ""; }; 792EE0962AA74E5800A212AB /* PushNotiCommonTool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PushNotiCommonTool.swift; sourceTree = ""; }; + 7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitorHistoryController.swift; sourceTree = ""; }; + 7938A8052E4055800017508A /* VideoPlayView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VideoPlayView.h; sourceTree = ""; }; + 7938A8062E4055800017508A /* VideoPlayView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VideoPlayView.m; sourceTree = ""; }; + 7938A80D2E4055800017508A /* AAPLEAGLLayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AAPLEAGLLayer.h; sourceTree = ""; }; + 7938A80E2E4055800017508A /* AAPLEAGLLayer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AAPLEAGLLayer.m; sourceTree = ""; }; + 7938A80F2E4055800017508A /* g711.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = g711.h; sourceTree = ""; }; + 7938A8102E4055800017508A /* g711.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = g711.m; sourceTree = ""; }; + 7938A8112E4055800017508A /* g726.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = g726.h; sourceTree = ""; }; + 7938A8122E4055800017508A /* g726.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = g726.m; sourceTree = ""; }; + 7938A8132E4055800017508A /* H264DecodeTool.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = H264DecodeTool.h; sourceTree = ""; }; + 7938A8142E4055800017508A /* H264DecodeTool.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = H264DecodeTool.m; sourceTree = ""; }; + 7938A8152E4055800017508A /* PCMStreamPlayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PCMStreamPlayer.h; sourceTree = ""; }; + 7938A8162E4055800017508A /* PCMStreamPlayer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = PCMStreamPlayer.m; sourceTree = ""; }; + 7938A8172E4055800017508A /* SRWebSocket.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SRWebSocket.h; sourceTree = ""; }; + 7938A8182E4055800017508A /* SRWebSocket.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SRWebSocket.m; sourceTree = ""; }; + 7938A81D2E4055800017508A /* YFTimer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFTimer.h; sourceTree = ""; }; + 7938A81E2E4055800017508A /* YFTimer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFTimer.m; sourceTree = ""; }; + 7938A81F2E4055800017508A /* YFTimerManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFTimerManager.h; sourceTree = ""; }; + 7938A8202E4055800017508A /* YFTimerManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFTimerManager.m; sourceTree = ""; }; + 7938A8362E4055D50017508A /* YFProgressHUD.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFProgressHUD.h; sourceTree = ""; }; + 7938A8372E4055D50017508A /* YFProgressHUD.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFProgressHUD.m; sourceTree = ""; }; + 7938A8382E4055D50017508A /* YLGIFImage.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YLGIFImage.h; sourceTree = ""; }; + 7938A8392E4055D50017508A /* YLGIFImage.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YLGIFImage.m; sourceTree = ""; }; + 7938A83A2E4055D50017508A /* YLImageView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YLImageView.h; sourceTree = ""; }; + 7938A83B2E4055D50017508A /* YLImageView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YLImageView.m; sourceTree = ""; }; + 7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringVideoDetailController.swift; sourceTree = ""; }; 794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringConfigView.swift; sourceTree = ""; }; 7940277B2B3E9ECB00EC52D4 /* ConditionalSearchView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConditionalSearchView.swift; sourceTree = ""; }; 7940277D2B43B9B600EC52D4 /* ConditionalSearchTool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConditionalSearchTool.swift; sourceTree = ""; }; @@ -170,7 +210,6 @@ 79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringController.swift; sourceTree = ""; }; 79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringListController.swift; sourceTree = ""; }; 79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitorVideoController.swift; sourceTree = ""; }; - 79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringVideoDetailController.swift; sourceTree = ""; }; 79CECC9D2A8E03C200B95D8B /* MediaPlayer.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MediaPlayer.framework; path = System/Library/Frameworks/MediaPlayer.framework; sourceTree = SDKROOT; }; 79CECC9F2A8E03CF00B95D8B /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 79CECCA12A8E03D900B95D8B /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; }; @@ -196,6 +235,7 @@ 79E434242AA1919400AEB16C /* CommonAlertView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CommonAlertView.swift; sourceTree = ""; }; 79E434272AA1EFA500AEB16C /* SystemCall.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SystemCall.swift; sourceTree = ""; }; 79E434292AA5833F00AEB16C /* CustomPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomPicker.swift; sourceTree = ""; }; + 79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VerticalLoopScrollLabel.swift; sourceTree = ""; }; 79EAD8072A7B86600036E093 /* OrderScheduling.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = OrderScheduling.app; sourceTree = BUILT_PRODUCTS_DIR; }; 79EAD8132A7B86610036E093 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 79EAD8162A7B86610036E093 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; @@ -436,6 +476,77 @@ path = Tool; sourceTree = ""; }; + 7938A80B2E4055800017508A /* view */ = { + isa = PBXGroup; + children = ( + 7938A8052E4055800017508A /* VideoPlayView.h */, + 7938A8062E4055800017508A /* VideoPlayView.m */, + ); + path = view; + sourceTree = ""; + }; + 7938A80C2E4055800017508A /* Video */ = { + isa = PBXGroup; + children = ( + 7938A80B2E4055800017508A /* view */, + ); + path = Video; + sourceTree = ""; + }; + 7938A8192E4055800017508A /* VideoTools */ = { + isa = PBXGroup; + children = ( + 7938A80D2E4055800017508A /* AAPLEAGLLayer.h */, + 7938A80E2E4055800017508A /* AAPLEAGLLayer.m */, + 7938A80F2E4055800017508A /* g711.h */, + 7938A8102E4055800017508A /* g711.m */, + 7938A8112E4055800017508A /* g726.h */, + 7938A8122E4055800017508A /* g726.m */, + 7938A8132E4055800017508A /* H264DecodeTool.h */, + 7938A8142E4055800017508A /* H264DecodeTool.m */, + 7938A8152E4055800017508A /* PCMStreamPlayer.h */, + 7938A8162E4055800017508A /* PCMStreamPlayer.m */, + 7938A8172E4055800017508A /* SRWebSocket.h */, + 7938A8182E4055800017508A /* SRWebSocket.m */, + ); + path = VideoTools; + sourceTree = ""; + }; + 7938A8212E4055800017508A /* YFTimerTool */ = { + isa = PBXGroup; + children = ( + 7938A81D2E4055800017508A /* YFTimer.h */, + 7938A81E2E4055800017508A /* YFTimer.m */, + 7938A81F2E4055800017508A /* YFTimerManager.h */, + 7938A8202E4055800017508A /* YFTimerManager.m */, + ); + path = YFTimerTool; + sourceTree = ""; + }; + 7938A8222E4055800017508A /* Video */ = { + isa = PBXGroup; + children = ( + 7938A80C2E4055800017508A /* Video */, + 7938A8192E4055800017508A /* VideoTools */, + 7938A8212E4055800017508A /* YFTimerTool */, + 7938A83C2E4055D50017508A /* YFProgressHUD */, + ); + path = Video; + sourceTree = ""; + }; + 7938A83C2E4055D50017508A /* YFProgressHUD */ = { + isa = PBXGroup; + children = ( + 7938A8362E4055D50017508A /* YFProgressHUD.h */, + 7938A8372E4055D50017508A /* YFProgressHUD.m */, + 7938A8382E4055D50017508A /* YLGIFImage.h */, + 7938A8392E4055D50017508A /* YLGIFImage.m */, + 7938A83A2E4055D50017508A /* YLImageView.h */, + 7938A83B2E4055D50017508A /* YLImageView.m */, + ); + path = YFProgressHUD; + sourceTree = ""; + }; 7949FF122B51093F00B75A21 /* CustomMap */ = { isa = PBXGroup; children = ( @@ -658,6 +769,7 @@ children = ( 79B966372AB0651C00308A8D /* VehicleLogoutView.swift */, 794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */, + 79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */, ); path = View; sourceTree = ""; @@ -668,7 +780,8 @@ 79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */, 79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */, 79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */, - 79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */, + 7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */, + 7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */, ); path = ViewController; sourceTree = ""; @@ -754,6 +867,7 @@ 791887732A7CD633007EA0C1 /* Rescue */, 7918873F2A7CCCCD007EA0C1 /* Main */, 79DD0DAB2A94A0EE00768FE7 /* Source */, + 7938A8222E4055800017508A /* Video */, 79EAD8132A7B86610036E093 /* Assets.xcassets */, 79EAD8152A7B86610036E093 /* LaunchScreen.storyboard */, 79EAD8182A7B86610036E093 /* Info.plist */, @@ -1012,6 +1126,15 @@ 791887952A80C361007EA0C1 /* WebViewController.swift in Sources */, 79CECC192A89EE6A00B95D8B /* ReviewFailedController.swift in Sources */, 791887C62A84D9DF007EA0C1 /* DispatchOrderController.swift in Sources */, + 7938A8252E4055800017508A /* YFTimer.m in Sources */, + 7938A8292E4055800017508A /* VideoPlayView.m in Sources */, + 7938A82B2E4055800017508A /* YFTimerManager.m in Sources */, + 7938A82C2E4055800017508A /* AAPLEAGLLayer.m in Sources */, + 7938A82E2E4055800017508A /* PCMStreamPlayer.m in Sources */, + 7938A82F2E4055800017508A /* SRWebSocket.m in Sources */, + 7938A8312E4055800017508A /* g711.m in Sources */, + 7938A8332E4055800017508A /* g726.m in Sources */, + 7938A8352E4055800017508A /* H264DecodeTool.m in Sources */, 7918877B2A7CDD1A007EA0C1 /* Initial.swift in Sources */, 7940277A2B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift in Sources */, 791887C42A84BFDB007EA0C1 /* Tool.swift in Sources */, @@ -1020,6 +1143,7 @@ 7918878F2A809E37007EA0C1 /* TimerStrings.swift in Sources */, 7918878B2A7CE9E0007EA0C1 /* main.swift in Sources */, 791887A02A80CA10007EA0C1 /* RequestList.swift in Sources */, + 7938A6502E3B51270017508A /* VehicleMonitorHistoryController.swift in Sources */, 79FB761C2A9EEC3700DB00A4 /* GroupData.swift in Sources */, 79FB76172A9DFC9600DB00A4 /* NotificationSetUpController.swift in Sources */, 79CECC262A8C749B00B95D8B /* VehicleMonitorVideoController.swift in Sources */, @@ -1045,9 +1169,13 @@ 79FB76222A9EEED900DB00A4 /* CommonKeyStrings.swift in Sources */, 79DD0DBB2A971EB300768FE7 /* ZDViewController.swift in Sources */, 791887BF2A839716007EA0C1 /* EntryStrings.swift in Sources */, + 7938A83D2E4055D50017508A /* YFProgressHUD.m in Sources */, + 7938A83E2E4055D50017508A /* YLGIFImage.m in Sources */, + 7938A83F2E4055D50017508A /* YLImageView.m in Sources */, 794FBB192A8F4AF000D57BB8 /* MessageCount.swift in Sources */, 791887892A7CE79E007EA0C1 /* LoginController.swift in Sources */, 791887A42A80CA30007EA0C1 /* ResponseModel.swift in Sources */, + 7938A8452E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift in Sources */, 794FBB0D2A8F040D00D57BB8 /* HistoryController.swift in Sources */, 7918878D2A8081D4007EA0C1 /* ActionStrings.swift in Sources */, 791887972A80C6CD007EA0C1 /* LocalizedStrings.swift in Sources */, @@ -1055,12 +1183,12 @@ 79CECC222A8A2A2900B95D8B /* VehicleMonitoringController.swift in Sources */, 791887452A7CD05B007EA0C1 /* MainTabBarController.swift in Sources */, 791887792A7CD64C007EA0C1 /* RescueController.swift in Sources */, + 79EA0A912E3753D100320195 /* VerticalLoopScrollLabel.swift in Sources */, 79CECC122A89BD1A00B95D8B /* MessageCenterController.swift in Sources */, 794FBB1F2A92F7C300D57BB8 /* WebViewTool.swift in Sources */, 791887822A7CE71D007EA0C1 /* AppKeyStrings.swift in Sources */, 7940277C2B3E9ECB00EC52D4 /* ConditionalSearchView.swift in Sources */, 79CB07CC2AA8465A00154B61 /* UserPermission.swift in Sources */, - 79CECC282A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift in Sources */, 792EE0972AA74E5800A212AB /* PushNotiCommonTool.swift in Sources */, 79FB75F02A98A26C00DB00A4 /* AcceptOrderTool.swift in Sources */, 791887A12A80CA10007EA0C1 /* ApiList.swift in Sources */, diff --git a/OrderScheduling/.DS_Store b/OrderScheduling/.DS_Store index 11fc25c..3c7bad3 100644 Binary files a/OrderScheduling/.DS_Store and b/OrderScheduling/.DS_Store differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/Contents.json new file mode 100644 index 0000000..01eadcb --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_alarm.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_alarm@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_alarm@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm.png new file mode 100644 index 0000000..51e78fd Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@2x.png new file mode 100644 index 0000000..995b7f7 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@3x.png new file mode 100644 index 0000000..9c72025 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@3x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/Contents.json new file mode 100644 index 0000000..5da3b52 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_alarm_level1.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_alarm_level1@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_alarm_level1@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1.png new file mode 100644 index 0000000..52348b1 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@2x.png new file mode 100644 index 0000000..74761e9 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@3x.png new file mode 100644 index 0000000..1c6ccaa Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@3x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/Contents.json new file mode 100644 index 0000000..61533eb --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_alarm_level2.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_alarm_level2@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_alarm_level2@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2.png new file mode 100644 index 0000000..52e75b0 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@2x.png new file mode 100644 index 0000000..011e0ca Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@3x.png new file mode 100644 index 0000000..b98b30e Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@3x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/Contents.json new file mode 100644 index 0000000..91b16e0 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_alarm_level3.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_alarm_level3@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_alarm_level3@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3.png new file mode 100644 index 0000000..6db7509 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@2x.png new file mode 100644 index 0000000..4731d71 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@3x.png new file mode 100644 index 0000000..0248135 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@3x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/Contents.json new file mode 100644 index 0000000..ec11807 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_channel_alarm_icon.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_channel_alarm_icon@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_channel_alarm_icon@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon.png new file mode 100644 index 0000000..e453daa Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon@2x.png new file mode 100644 index 0000000..7fbc10d Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon@3x.png new file mode 100644 index 0000000..be4b902 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon@3x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/Contents.json new file mode 100644 index 0000000..29af728 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_history_icon.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_history_icon@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_history_icon@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon.png new file mode 100644 index 0000000..6a05253 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@2x.png new file mode 100644 index 0000000..fc6878c Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@3x.png new file mode 100644 index 0000000..b19972b Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@3x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/Contents.json new file mode 100644 index 0000000..fbfe1a3 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_offline_icon.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_offline_icon@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_offline_icon@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon.png new file mode 100644 index 0000000..7235678 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon@2x.png new file mode 100644 index 0000000..fff5d46 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon@3x.png new file mode 100644 index 0000000..9ff0c9d Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon@3x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/Contents.json new file mode 100644 index 0000000..eea7be6 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_video_icon.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_video_icon@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_video_icon@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon.png new file mode 100644 index 0000000..69826a8 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@2x.png new file mode 100644 index 0000000..408138b Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@3x.png new file mode 100644 index 0000000..8f68be0 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@3x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/Contents.json new file mode 100644 index 0000000..a903b28 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/Contents.json @@ -0,0 +1,22 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "ww_video_paly@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "ww_video_paly@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@2x.png new file mode 100644 index 0000000..3b80d2b Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@2x.png differ diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@3x.png new file mode 100644 index 0000000..06bb804 Binary files /dev/null and b/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@3x.png differ diff --git a/OrderScheduling/Common/WebView/WebViewController.swift b/OrderScheduling/Common/WebView/WebViewController.swift index 1e5a7c0..f75d222 100644 --- a/OrderScheduling/Common/WebView/WebViewController.swift +++ b/OrderScheduling/Common/WebView/WebViewController.swift @@ -115,6 +115,14 @@ class WebViewController : ZDViewController { } webView.configuration.userContentController.removeScriptMessageHandler(forName: "nativeObject") } + + override func dd_backActionPop(_ isAnimated: Bool) { + if webView.canGoBack == true { + webView.goBack() + }else{ + super.dd_backActionPop(isAnimated) + } + } } extension WebViewController : WKScriptMessageHandler { @@ -141,6 +149,11 @@ extension WebViewController : WKScriptMessageHandler { let vc = AdditionalPhotoController(userOrderId: Int(userOrderId) ?? 0, orderCode: orderCode, taskOrderId: Int(taskOrderId) ?? 0,canModify: canModify) navigationController?.pushViewController(vc, animated: true) } + }else if action == "goMonitoring" { + let params = dict?["params"] as? [String:Any] + let code = params?["code"] as? String + let vc = VehicleMonitorHistoryController(code: code) + navigationController?.pushViewController(vc, animated: true) } } } diff --git a/OrderScheduling/Common/WebView/WebViewTool.swift b/OrderScheduling/Common/WebView/WebViewTool.swift index 9dba6d2..f83930b 100644 --- a/OrderScheduling/Common/WebView/WebViewTool.swift +++ b/OrderScheduling/Common/WebView/WebViewTool.swift @@ -40,6 +40,8 @@ open class WebViewTool : NSObject { case invoiceListInfo = "开票信息" case indexList = "二手车信息" case reportIndex = "报备" + case vehicleAlarmDetail = "报警详情" + case vehicleAlarmList = "车辆报警" } public override init() { @@ -144,6 +146,12 @@ open class WebViewTool : NSObject { case .reportIndex: vc = WebViewController(showNavBar:true, title: WebViewNameEnum.reportIndex.rawValue, url: "\((h5Models?.reportIndex)!)?token=\((USER.token)!)"+(appending ?? "")) break + case .vehicleAlarmList: + vc = WebViewController(showNavBar:true, title: WebViewNameEnum.vehicleAlarmList.rawValue, url: "\((h5Models?.vehicleAlarmList)!)?token=\((USER.token)!)"+(appending ?? "")) + break + case .vehicleAlarmDetail: + vc = WebViewController(showNavBar:true, title: WebViewNameEnum.vehicleAlarmDetail.rawValue, url: "\((h5Models?.vehicleAlarmDetail)!)?token=\((USER.token)!)"+(appending ?? "")) + break } if let vc { diff --git a/OrderScheduling/HttpRequestCenter/ApiList.swift b/OrderScheduling/HttpRequestCenter/ApiList.swift index d3ea816..58d3cc2 100644 --- a/OrderScheduling/HttpRequestCenter/ApiList.swift +++ b/OrderScheduling/HttpRequestCenter/ApiList.swift @@ -34,7 +34,11 @@ open class ApiList { public let vehicleMonitorList = "/supplierAppV2/dispatchApp/order/vehicleMonitorList" - public let getRtspChannel = "/gps/thirdparty-vehicle-position/getRtspUrl" + public let getRealtimeUrl = "/gps/xq-video-monitor/getRealtimeUrl" + + public let getReplayUrl = "/gps/xq-video-monitor/getReplayUrl" + + public let closeHistoryControl = "/gps/xq-video-monitor/closeHistoryControl" public let orderPhotoList = "/supplierAppV2/dispatchApp/order/orderPhotoList" @@ -69,4 +73,8 @@ open class ApiList { public let getConfigByCode = "/base/baseConfig/getConfigByCode" public let thisWeekNumber = "/toc-user/car-admin/thisWeekNumber" + + public let alarmList = "/supplierAppV2/dispatchApp/alarm/alarmList" + + public let getAlarmByCode = "/supplierAppV2/dispatchApp/alarm/getAlarmByCode" } diff --git a/OrderScheduling/HttpRequestCenter/ParametersList.swift b/OrderScheduling/HttpRequestCenter/ParametersList.swift index 715a580..8641e38 100644 --- a/OrderScheduling/HttpRequestCenter/ParametersList.swift +++ b/OrderScheduling/HttpRequestCenter/ParametersList.swift @@ -181,6 +181,14 @@ public struct RtspChannelParameters : Encodable { var external : Int = 1 } +public struct GetVideoUrlParameters : Encodable { + var vehicleId : Int? + var simNumber : String? + var channel : Int? + var startDate : String? + var endDate : String? +} + public struct OrderPhotoListParameters : Encodable { var userOrderId : Int var orderCode : String @@ -234,3 +242,18 @@ public struct GiveUpUserOrderParameters : Encodable { public struct ConfigByCodeParameters : Encodable { var code : String } + +public struct AlarmListParameters : Encodable { + var pageNum : Int + var pageSize : Int = 50 + var orderBy : String = "create_time" + var supplierId : Int? + var handStatus : Int + public enum HandStatusEnum : Int { + case pending = 0,dealWithByTechnical,dealWithByOperations + } +} + +public struct GetAlarmByCodeParameters : Encodable { + var code : String? +} diff --git a/OrderScheduling/HttpRequestCenter/RequestList.swift b/OrderScheduling/HttpRequestCenter/RequestList.swift index 554aac8..c38f8f4 100644 --- a/OrderScheduling/HttpRequestCenter/RequestList.swift +++ b/OrderScheduling/HttpRequestCenter/RequestList.swift @@ -75,8 +75,16 @@ open class RequestList { return DDAF.post(urlString: HOST+API.vehicleMonitorList,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel.self) } - func getRtspChannel(prameters:P) -> Single?> { - return DDAF.post(urlString: HOST+API.getRtspChannel,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel<[String]>.self) + func getRealtimeUrl(prameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.getRealtimeUrl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel.self) + } + + func getReplayUrl(prameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.getReplayUrl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel.self) + } + + func closeHistoryControl(prameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.closeHistoryControl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel.self) } func orderPhotoList(prameters:P) -> Single?> { @@ -144,4 +152,12 @@ open class RequestList { func thisWeekNumber()-> Single?> { return DDAF.get(urlString: HOST+API.thisWeekNumber,encoding: URLEncodedFormParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel.self) } + + func alarmList(parameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.alarmList,parameters: parameters,encoding: JSONParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel<[AlarmListDataModel]>.self) + } + + func getAlarmByCode(parameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.getAlarmByCode,parameters: parameters,encoding: URLEncodedFormParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel.self) + } } diff --git a/OrderScheduling/HttpResponseModel/ResponseModel.swift b/OrderScheduling/HttpResponseModel/ResponseModel.swift index f963123..8a3567d 100644 --- a/OrderScheduling/HttpResponseModel/ResponseModel.swift +++ b/OrderScheduling/HttpResponseModel/ResponseModel.swift @@ -17,6 +17,10 @@ class ResponseModel : Decodable { var total : Int? } +struct CommonError : Error { + +} + class LoginDataModel : Decodable { var accessToken : LoginDataAccessTokenModel var refreshToken : LoginDataRefreshTokenModel @@ -193,7 +197,7 @@ public class VehicleMonitorListDataModel : Decodable { var taskList : [TaskModel]? var isSelected : Bool? = false var zIndex : Int? = 0 - + var number : String? public enum TerminalTypeEnum : String,Decodable { case APP = "APP" case GPS = "GPS" @@ -267,6 +271,8 @@ public class DispatchAppH5UrlDataModel : Decodable { var invoiceListInfo : String var indexList : String var reportIndex : String + var vehicleAlarmList : String + var vehicleAlarmDetail : String } public class VersionCheckDataModel : Decodable { @@ -370,3 +376,22 @@ public class JumpPageDataModel : Decodable { var url : String? var content : String? } + +public class AlarmListDataModel : Decodable { + var vehicleName : String? + var alarmTypeString : String? + var code : String? +} + +public class GetAlarmByCodeDataModel : Decodable { + var imei : String? + var channel : Int? + var startTime : String? + var endTime : String? + var vehicleId : Int? +} + +public class GetVideoUrlDataModel : Decodable { + var channelList : [String]? + var realtimeList : [String]? +} diff --git a/OrderScheduling/Main/OrderScheduling-Bridging-Header.h b/OrderScheduling/Main/OrderScheduling-Bridging-Header.h index 7e3f1db..fa69a6a 100644 --- a/OrderScheduling/Main/OrderScheduling-Bridging-Header.h +++ b/OrderScheduling/Main/OrderScheduling-Bridging-Header.h @@ -9,3 +9,5 @@ # ifdef NSFoundationVersionNumber_iOS_9_x_Max # import # endif + +# import "VideoPlayView.h" diff --git a/OrderScheduling/Rescue/View/AcceptOrderTool.swift b/OrderScheduling/Rescue/View/AcceptOrderTool.swift index b965d9c..c95284f 100644 --- a/OrderScheduling/Rescue/View/AcceptOrderTool.swift +++ b/OrderScheduling/Rescue/View/AcceptOrderTool.swift @@ -30,11 +30,13 @@ open class AcceptOrderTool : NSObject { // 来到首页的救援中-待接单 let tabBarVc = UIApplication.shared.dd_keyWindow.rootViewController as? MainTabBarController let currentNav = tabBarVc?.selectedViewController as? UINavigationController - currentNav?.popToRootViewController(animated: false) - tabBarVc?.selectedIndex = 0 - let nav = tabBarVc?.children.first as? UINavigationController - let vc = nav?.children.first as? RescueController - vc?.categoryView.selectItem(at: 0) + currentNav?.popToRootViewController(animated: true) + DispatchQueue.main.asyncAfter(deadline: .now()+0.25, execute: { + tabBarVc?.selectedIndex = 0 + let nav = tabBarVc?.children.first as? UINavigationController + let vc = nav?.children.first as? RescueController + vc?.categoryView.selectItem(at: 0) + }) } } }) diff --git a/OrderScheduling/VehicleMonitoring/.DS_Store b/OrderScheduling/VehicleMonitoring/.DS_Store index d48252a..60042e7 100644 Binary files a/OrderScheduling/VehicleMonitoring/.DS_Store and b/OrderScheduling/VehicleMonitoring/.DS_Store differ diff --git a/OrderScheduling/VehicleMonitoring/View/VerticalLoopScrollLabel.swift b/OrderScheduling/VehicleMonitoring/View/VerticalLoopScrollLabel.swift new file mode 100644 index 0000000..6427661 --- /dev/null +++ b/OrderScheduling/VehicleMonitoring/View/VerticalLoopScrollLabel.swift @@ -0,0 +1,93 @@ +// +// VerticalLoopScrollLabel.swift +// OrderScheduling +// +// Created by 中道 on 2025/7/28. +// + +import UIKit + +/// 可点击的纵向轮播标签控件,支持点击获取当前 index +class VerticalLoopScrollLabel: UIView { + private let scrollView = UIScrollView() + private let label1 = UILabel() + private let label2 = UILabel() + private var timer: Timer? + + /// 点击回调,返回当前索引 + var onTap: ((Int) -> Void)? + + var items: [String] = [] { + didSet { + guard !items.isEmpty else { return } + currentIdx = 0 + label1.text = items.first + label2.text = items.count > 1 ? items[1] : items.first + setNeedsLayout() + startLoop() + } + } + + var interval: TimeInterval = 2.5 + private var currentIdx = 0 + private var isLabel1OnTop = true + + override init(frame: CGRect) { + super.init(frame: frame) + clipsToBounds = true + scrollView.isScrollEnabled = false + addSubview(scrollView) + + let tap = UITapGestureRecognizer(target: self, action: #selector(handleTap)) + addGestureRecognizer(tap) + + for label in [label1, label2] { + label.font = .systemFont(ofSize: 16, weight: .semibold) + label.textColor = .white + label.textAlignment = .left + label.numberOfLines = 1 + label.lineBreakMode = .byTruncatingTail + scrollView.addSubview(label) + } + } + required init?(coder: NSCoder) { fatalError() } + + override func layoutSubviews() { + super.layoutSubviews() + scrollView.frame = bounds + label1.frame = CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height) + label2.frame = CGRect(x: 0, y: bounds.height, width: bounds.width, height: bounds.height) + scrollView.contentSize = CGSize(width: bounds.width, height: bounds.height * 2) + } + + private func startLoop() { + timer?.invalidate() + guard items.count > 1 else { return } + timer = Timer.scheduledTimer(withTimeInterval: interval, repeats: true, block: { [weak self] _ in + self?.scrollNext() + }) + } + + private func scrollNext() { + let fromLabel = isLabel1OnTop ? label1 : label2 + let toLabel = isLabel1OnTop ? label2 : label1 + let nextIdx = (currentIdx + 1) % items.count + toLabel.text = items[nextIdx] + // 动画向上滚动 + UIView.animate(withDuration: 0.35, animations: { + self.scrollView.contentOffset = CGPoint(x: 0, y: self.bounds.height) + }, completion: { _ in + // 滚动完后交换内容和位置 + self.scrollView.contentOffset = .zero + fromLabel.text = toLabel.text + self.currentIdx = nextIdx + self.isLabel1OnTop.toggle() + }) + } + + @objc private func handleTap() { + onTap?(currentIdx) + } + + deinit { timer?.invalidate() } +} diff --git a/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitorHistoryController.swift b/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitorHistoryController.swift new file mode 100644 index 0000000..32478d6 --- /dev/null +++ b/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitorHistoryController.swift @@ -0,0 +1,467 @@ +// +// VehicleMonitorHistoryController.swift +// OrderScheduling +// +// Created by 中道 on 2025/7/31. +// + +import UIKit +import DDAutoUIKit_Private +import SnapKit +import RxSwift +import RxCocoa +import BRPickerView + +class VehicleMonitorHistoryController : ZDViewController { + var code : String? + let timeline = TimelineView() + let dateView = DateSwitcherView() + let realtimeButton = UIButton() + let disposeBag = DisposeBag() + var fromDateString : String? + var toDateString : String? + var alarmResponse : ResponseModel? + var refreshSub = ReplaySubject.create(bufferSize: 1) + var replaySub = ReplaySubject?>.create(bufferSize: 1) + var videoView = VideoPlayView() + var closeSub = ReplaySubject?>.create(bufferSize: 1) + init(code: String?) { + self.code = code + super.init(nibName: nil, bundle: nil) + } + + @MainActor required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + NotificationCenter.default.removeObserver(self) + } + + override func viewDidLoad() { + super.viewDidLoad() + dd_navigationItemTitle = "监控回放" + dd_navigationBarBackgroundColor = .hex("354683") + dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)] + dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white] + + // 回调当前选中时间 + timeline.onTimeSelected = {[weak self] hour, minute in + self?.fromDateString = self?.dateView.getDateString()?.appending(" \(hour):\(minute):00") + if let fromDateString = self?.fromDateString,let dateFormatter = self?.timeline.dateFormatter { + let toDate = Date(timeIntervalSince1970: ((NSDate.br_date(from: fromDateString, dateFormat: dateFormatter)?.timeIntervalSince1970 ?? 0) + 600)) + self?.toDateString = NSDate.br_string(from: toDate, dateFormat: dateFormatter) + + }else{ + self?.toDateString = self?.fromDateString + } + + if let alarmResponse = self?.alarmResponse { + self?.replaySub.onNext(alarmResponse) + } + } + + realtimeButton.setTitleColor(.white, for: .normal) + realtimeButton.titleLabel?.font = .dd_systemFont(ofSize: 16, weight: .semibold) + realtimeButton.backgroundColor = .dd_hex(light: "0273EE", dark: "0273EE") + realtimeButton.setImage(UIImage(named: "vehicleMonitoring_history_icon"), for: .normal) + realtimeButton.layer.cornerRadius = 6 + realtimeButton.setTitle("实时监控", for: .normal) + realtimeButton.dd_customize(with: .ImageLeftPaddingTitleRightWithWholeCenter, padding: 10) + realtimeButton.rx.tap + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] _ in + self?.videoView.endShow() + self?.closeSub.onNext(self?.alarmResponse) + self?.navigationController?.pushViewController(VehicleMonitorVideoController(vehicleId: self?.alarmResponse?.data?.vehicleId,simNumber: self?.alarmResponse?.data?.imei), animated: true) + }) + .disposed(by: disposeBag) + + refreshSub + .observe(on: ConcurrentMainScheduler.instance) + .do(onNext: {[weak self] response in + self?.view.dd_showHUD() + }) + .flatMapLatest { code in + return RQ.getAlarmByCode(parameters: GetAlarmByCodeParameters(code: code)) + .flatMap { response in + return Single.create { single in + if response?.success == true { + single(.success(response)) + }else{ + single(.failure(CommonError())) + } + return Disposables.create() + } + } + } + .retry(when: { (rxError: Observable) -> Observable in + return rxError.flatMap({ error in + return Observable.timer(RxTimeInterval.seconds(5), scheduler: MainScheduler.asyncInstance) + }) + }) + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] response in + self?.initUI(response: response) + }) + .disposed(by: disposeBag) + + replaySub + .observe(on: ConcurrentMainScheduler.instance) + .do(onNext: {[weak self] response in + self?.view.dd_showHUD() + }) + .flatMap({[weak self] response in + return RQ.getReplayUrl(prameters: GetVideoUrlParameters(vehicleId: response?.data?.vehicleId, simNumber: response?.data?.imei, channel: response?.data?.channel, startDate: self?.fromDateString, endDate: self?.toDateString)) + }) + .observe(on: ConcurrentMainScheduler.instance) + .do(onNext: {[weak self] response in + self?.view.dd_hideHUD() + }) + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] response in + if response?.success == true { + if let first = response?.data?.realtimeList?.first { + self?.videoView.wsUrl = first + if self?.videoView.isPlaying == true { + self?.videoView.beginShow() + }else{ + } + } + }else{ + self?.view.dd_makeToast(response?.msg) + } + }).disposed(by: disposeBag) + + closeSub + .flatMapLatest { response in + return RQ.closeHistoryControl(prameters: GetVideoUrlParameters(simNumber: response?.data?.imei,channel:response?.data?.channel)) + } + .subscribe(onNext: { response in + }) + .disposed(by: disposeBag) + + + dateView.prevButton.rx.tap + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] _ in + if let date = self?.getPreDate(),let dateFormat = self?.dateView.dateFormatter { + self?.dateView.setDate(dateString: NSDate.br_string(from: date, dateFormat: dateFormat)) + } + }) + .disposed(by: disposeBag) + + dateView.nextButton.rx.tap + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] _ in + if let date = self?.getNextDate(),let dateFormat = self?.dateView.dateFormatter { + self?.dateView.setDate(dateString: NSDate.br_string(from: date, dateFormat: dateFormat)) + } + }) + .disposed(by: disposeBag) + + dateView.dateButton.rx.tap + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: { _ in + let picker = BRDatePickerView(pickerMode: .YMD) + picker.show() + }) + .disposed(by: disposeBag) + + NotificationCenter.default.rx + .notification(UIApplication.didEnterBackgroundNotification) + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: { [weak self] _ in + self?.videoView.endShow() + self?.closeSub.onNext(self?.alarmResponse) + }) + .disposed(by: disposeBag) + } + + override func viewWillLayoutSubviews() { + super.viewWillLayoutSubviews() + if timeline.superview == nil { + view.addSubview(videoView) + videoView.snp.makeConstraints { make in + make.top.equalTo(view.snp.top).offset(view.safeAreaInsets.top) + make.width.equalTo(auto(375)) + make.height.equalTo(auto(300)) + make.centerX.equalToSuperview() + } + + view.addSubview(dateView) + dateView.snp.makeConstraints { make in + make.centerX.equalToSuperview() + make.top.equalTo(videoView.snp.bottom).offset(20) + make.height.equalTo(64) + make.width.equalTo(200) + } + + view.addSubview(timeline) + timeline.snp.makeConstraints { make in + make.left.right.equalToSuperview() + make.height.equalTo(100) + make.top.equalTo(dateView.snp.bottom).offset(20) + } + + view.addSubview(realtimeButton) + realtimeButton.snp.makeConstraints { make in + make.left.right.equalToSuperview().inset(30) + make.height.equalTo(48) + make.top.equalTo(timeline.snp.bottom).offset(20) + } + + refreshSub.onNext(code) + } + } + + func initUI(response: ResponseModel?) { + alarmResponse = response + if let startTime = response?.data?.startTime { + dateView.setDate(dateString: startTime.components(separatedBy: " ").first) + let currentDate = (NSDate.br_date(from: startTime, dateFormat: timeline.dateFormatter) as? NSDate) ?? NSDate() + timeline.scrollToHour(hour: currentDate.br_hour,minute: currentDate.br_minute) + } + } + + func getPreDate() -> Date? { + if let dateString = dateView.getDateString() { + let date = NSDate.br_date(from: dateString, dateFormat: dateView.dateFormatter) as? NSDate + return date?.br_getNewDate(toDays: -1) + } + return Date() + } + + func getNextDate() -> Date? { + if let dateString = dateView.getDateString(){ + let date = NSDate.br_date(from: dateString, dateFormat: dateView.dateFormatter) as? NSDate + return date?.br_getNewDate(toDays: 1) + } + return Date() + } + + override func dd_backActionPop(_ isAnimated: Bool) { + super.dd_backActionPop(isAnimated) + closeSub.onNext(alarmResponse) + } +} + +class DateSwitcherView: UIView { + let dateFormatter = "yyyy-MM-dd" + + // MARK: - UI + let prevButton: UIButton = { + let btn = UIButton(type: .system) + btn.setImage(UIImage(systemName: "chevron.left"), for: .normal) + btn.tintColor = .darkGray + return btn + }() + + let nextButton: UIButton = { + let btn = UIButton(type: .system) + btn.setImage(UIImage(systemName: "chevron.right"), for: .normal) + btn.tintColor = .darkGray + return btn + }() + + let dateButton: UIButton = { + let btn = UIButton(type: .system) + btn.titleLabel?.font = .boldSystemFont(ofSize: 18) + btn.setTitleColor(.black, for: .normal) + btn.titleLabel?.textAlignment = .center + btn.backgroundColor = .clear + return btn + }() + + // MARK: - Init + override init(frame: CGRect) { + super.init(frame: frame) + setupUI() + } + required init?(coder: NSCoder) { + super.init(coder: coder) + setupUI() + } + + private func setupUI() { + + addSubview(prevButton) + addSubview(dateButton) + addSubview(nextButton) + + prevButton.snp.makeConstraints { make in + make.left.equalToSuperview().offset(18) + make.centerY.equalToSuperview() + make.width.height.equalTo(28) + } + + nextButton.snp.makeConstraints { make in + make.right.equalToSuperview().inset(18) + make.centerY.equalToSuperview() + make.width.height.equalTo(28) + } + + dateButton.snp.makeConstraints { make in + make.center.equalToSuperview() + } + } + + func setDate(dateString: String?) { + dateButton.setTitle(dateString, for: .normal) + } + + func getDateString() -> String? { + return dateButton.titleLabel?.text + } + +} + +class TimelineView: UIView, UIScrollViewDelegate { + var dateFormatter = "yyyy-MM-dd HH:mm:ss" + var onTimeSelected: ((String, String) -> Void)? + var selectHour : Int = 0 + var selectMinute : Int = 0 + private let scrollView = UIScrollView() + private let contentView = UIView() + private let indicatorView = UIView() + private let quarterWidth: CGFloat = 36 // 每15分钟宽度(1小时=4*36=144) + private let longTickHeight: CGFloat = 10 // 整点、半点高度 + private let shortTickHeight: CGFloat = 5 // 15/45分高度 + private let labelHeight: CGFloat = 18 + private let totalHours = 24 + + override init(frame: CGRect) { + super.init(frame: frame) + setupUI() + } + required init?(coder: NSCoder) { + super.init(coder: coder) + setupUI() + } + + private func setupUI() { + addSubview(scrollView) + scrollView.showsHorizontalScrollIndicator = false + scrollView.delegate = self + scrollView.snp.makeConstraints { $0.edges.equalToSuperview() } + scrollView.addSubview(contentView) + + indicatorView.backgroundColor = .gray + addSubview(indicatorView) + indicatorView.snp.makeConstraints { + $0.centerX.equalToSuperview() + $0.width.equalTo(2) + $0.top.bottom.equalToSuperview() + } + } + + override func layoutSubviews() { + super.layoutSubviews() + let contentWidth = CGFloat(totalHours) * 4 * quarterWidth + contentView.frame = CGRect(x: 0, y: 0, width: contentWidth, height: bounds.height) + scrollView.contentSize = contentView.bounds.size + let inset = bounds.width / 2 + scrollView.contentInset = UIEdgeInsets(top: 0, left: inset, bottom: 0, right: inset) + layoutTicks() + } + + private func layoutTicks() { + contentView.subviews.forEach { $0.removeFromSuperview() } + for hour in 0...create(bufferSize: 1) + private let disposeBag = DisposeBag() + private var videos : [String] = [] + var channels : [Int] = [] + var channel : Int? + var closeSub = ReplaySubject.create(bufferSize: 1) + var videoView = VideoPlayView() + public init(vehicleId:Int?,simNumber: String?) { + self.vehicleId = vehicleId + self.simNumber = simNumber + self.vehicleMonitorVideoView = VehicleMonitorVideoView() + super.init(nibName: nil, bundle: nil) + } + + public required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + NotificationCenter.default.removeObserver(self) + } + + open override func viewDidLoad() { + super.viewDidLoad() + dd_navigationItemTitle = "实时监控" + dd_navigationBarBackgroundColor = .hex("354683") + dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)] + dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white] + + vehicleMonitorVideoView.categoryView.delegate = self + reloadRelay .filter({[weak self] _ in - return self?.vehicleId != nil + return self?.vehicleId != nil && self?.simNumber != nil }) .observe(on: MainScheduler.instance) .do(onNext: {[weak self] _ in self?.view.dd_showHUD() }) .flatMapLatest {[weak self] _ in - return RQ.getRtspChannel(prameters: RtspChannelParameters(vehicleId: (self?.vehicleId)!)) + return RQ.getRealtimeUrl(prameters: GetVideoUrlParameters(vehicleId: (self?.vehicleId)!,simNumber: (self?.simNumber)!)) } .observe(on: MainScheduler.instance) .do(onNext: {[weak self] _ in @@ -37,12 +84,15 @@ extension VehicleMonitorVideoController { .subscribe(onNext: {[weak self] response in if response?.success == true { var channels : [String] = [] - for index in 0..<(response?.data?.count ?? 0) { - channels.append("通道"+"\(index + 1)") + self?.channels.removeAll() + for index in 0..<(response?.data?.channelList?.count ?? 0) { + let channelN = response?.data?.channelList?[index] ?? "" + channels.append("通道"+"\(channelN)") + self?.channels.append(Int(channelN) ?? 1) } if let data = response?.data { self?.videos.removeAll() - self?.videos.append(contentsOf: data) + self?.videos.append(contentsOf: data.realtimeList ?? []) } self?.vehicleMonitorVideoView.categoryView.titles = channels self?.vehicleMonitorVideoView.categoryView.reloadData() @@ -54,69 +104,52 @@ extension VehicleMonitorVideoController { }) .disposed(by: disposeBag) - reloadRelay.accept(nil) - } -} - -extension VehicleMonitorVideoController : JXCategoryViewDelegate { - public func categoryView(_ categoryView: JXCategoryBaseView!, didSelectedItemAt index: Int) { + closeSub + .flatMapLatest {[weak self] response in + return RQ.closeHistoryControl(prameters: GetVideoUrlParameters(simNumber: self?.simNumber,channel:self?.channel)) + } + .subscribe(onNext: { response in + }) + .disposed(by: disposeBag) - let vc = children.first as? VehicleMonitoringVideoDetailController - vc?.playAssetURL(assetURL: URL(string: videos[index])!) + NotificationCenter.default.rx + .notification(UIApplication.didEnterBackgroundNotification) + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: { [weak self] _ in + self?.videoView.endShow() + self?.closeSub.onNext(nil) + }) + .disposed(by: disposeBag) } -} -open class VehicleMonitorVideoController : ZDViewController { - private let vehicleId : Int? - private let vehicleMonitorVideoView : VehicleMonitorVideoView - private let reloadRelay = ReplayRelay.create(bufferSize: 1) - private let disposeBag = DisposeBag() - private var videos : [String] = [] - - public init(vehicleId:Int?) { - self.vehicleId = vehicleId - self.vehicleMonitorVideoView = VehicleMonitorVideoView() - super.init(nibName: nil, bundle: nil) - } - - public required init?(coder: NSCoder) { - fatalError("init(coder:) has not been implemented") - } - - open override func viewDidLoad() { - super.viewDidLoad() - dd_navigationItemTitle = "视频监控" - dd_navigationBarBackgroundColor = .hex("354683") - dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)] - dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white] - dd_backBarButtonItem?.tintColor = .hex("000000") - - vehicleMonitorVideoView.categoryView.delegate = self - view.addSubview(vehicleMonitorVideoView) - vehicleMonitorVideoView.snp.makeConstraints { make in - make.top.equalToSuperview().offset(CGRectGetHeight(UIApplication.shared.dd_statusBarFrame)+CGRectGetHeight(navigationController?.navigationBar.frame ?? .zero)) - make.left.right.bottom.equalToSuperview() + open override func viewWillLayoutSubviews() { + super.viewWillLayoutSubviews() + if vehicleMonitorVideoView.superview == nil { + view.addSubview(vehicleMonitorVideoView) + vehicleMonitorVideoView.snp.makeConstraints { make in + make.top.equalToSuperview().offset(view.safeAreaInsets.top) + make.left.right.bottom.equalToSuperview() + } + + vehicleMonitorVideoView.addSubview(videoView) + videoView.snp.makeConstraints { make in + make.top.equalTo(vehicleMonitorVideoView.categoryView.snp.bottom).offset(auto(10)) + make.width.equalTo(auto(375)) + make.height.equalTo(auto(300)) + make.centerX.equalToSuperview() + } + reloadRelay.accept(nil) } - - let videoDetailVc = VehicleMonitoringVideoDetailController(assetURL: nil) - videoDetailVc.dd_navigationBarBackgroundColor = .white - videoDetailVc.dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.hex("000000"),.font:UIFont.mediumFont(17)] - videoDetailVc.dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.hex("000000")] - - addChild(videoDetailVc) - vehicleMonitorVideoView.addSubview(videoDetailVc.view) - videoDetailVc.view.snp.makeConstraints { make in - make.top.equalTo(vehicleMonitorVideoView.categoryView.snp.bottom).offset(auto(10)) - make.width.equalTo(auto(375)) - make.height.equalTo(auto(300)) - make.centerX.equalToSuperview() - } - - addActions() } - + + open override func dd_backActionPop(_ isAnimated: Bool) { + super.dd_backActionPop(isAnimated) + videoView.endShow() + closeSub.onNext(nil) + } + open override var preferredStatusBarStyle: UIStatusBarStyle { - return .default + return .lightContent } } diff --git a/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitoringController.swift b/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitoringController.swift index 78d2a6d..011f60b 100644 --- a/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitoringController.swift +++ b/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitoringController.swift @@ -51,15 +51,22 @@ extension VehicleMonitoringController { .do(onNext: {[weak self] _ in self?.view.dd_showHUD() }) - .flatMapLatest { _ in - return Single.zip(RQ.vehicleMonitorList(),RQ.generalInfo()) + .flatMapLatest {[weak self] _ in + guard let self = self else { + return Single.just(( + nil as ResponseModel?, + nil as ResponseModel?, + [] as [AlarmListDataModel] + )) + } + return Single.zip(RQ.vehicleMonitorList(),RQ.generalInfo(),self.getAllAlarmList(pageNum: 1, alarmList: [])) } .observe(on: MainScheduler.instance) - .do(onNext: {[weak self] _,_ in + .do(onNext: {[weak self] _,_,_ in self?.view.dd_hideHUD() }) .observe(on: MainScheduler.instance) - .subscribe(onNext: {[weak self] response,generalInfo in + .subscribe(onNext: {[weak self] response,generalInfo,alarmList in if generalInfo?.success == true { /// 如果list列表数量为0的话就显示当前位置 if let lat = generalInfo?.data?.addressLat,let lon = generalInfo?.data?.addressLon { @@ -109,6 +116,22 @@ extension VehicleMonitoringController { self?.vehicleMonitoringView.vehicleMonitoringPannelView.categoryView.reloadData() } + if alarmList.count > 0 { + self?.alarmList = alarmList + + var items : [String] = [] + for i in 0.. Single<[AlarmListDataModel]> { + func recursive(pageNum: Int,alarmList: [AlarmListDataModel]) -> Single<[AlarmListDataModel]> { + return RQ.alarmList(parameters: AlarmListParameters(pageNum: pageNum, supplierId: USER.supplierId,handStatus: AlarmListParameters.HandStatusEnum.pending.rawValue)) + .flatMap { response in + if (response?.data?.count ?? 0) == 0 { + return Single.create { single in + single(.success(alarmList)) + return Disposables.create() + } + } + let addAlarmList = alarmList + (response?.data ?? []) + return recursive(pageNum: pageNum + 1, alarmList: addAlarmList) + } + } + + return recursive(pageNum: 1, alarmList: []) + } } extension VehicleMonitoringController : DDMAMapViewDelegate { @@ -721,6 +764,13 @@ extension VehicleMonitoringController { vehicleMonitoringListDetailView.updateData(taskModels: vehicleModel.taskList ?? []) + /// 视频按钮显示规则 + if USER.supplierType == 1 && vehicleModel.terminalType == VehicleMonitorListDataModel.ItemModel.TerminalTypeEnum.GPS.rawValue { + vehicleMonitoringListDetailView.videoButton.isHidden = false + }else{ + vehicleMonitoringListDetailView.videoButton.isHidden = true + } + /// 当为max时收回pannelView if pannelPanGes.panGesValue.expandLevel == .max { previousStateOfPannelView = .max @@ -833,6 +883,7 @@ open class VehicleMonitoringController : ZDViewController { private var vehicleLogoutModel : VehicleMonitorListDataModel.ItemModel? private var vehicleLogoutRelay = ReplayRelay.create(bufferSize: 1) + private var alarmList : [AlarmListDataModel] = [] private let disposeBag = DisposeBag() open override func viewDidLoad() { @@ -990,16 +1041,29 @@ open class VehicleMonitoringView : DDView { public let coverView : DDView public let tapGes : UITapGestureRecognizer public let panGes : UIPanGestureRecognizer + public let offlineView : VehicleDeviceOffLineView + public let alarmView : VehicleAlarmView public init(titles: [String]) { vehicleMonitoringPannelView = VehicleMonitoringPannelView(titles:titles) maMapView = DDMAMapView() coverView = DDView() tapGes = UITapGestureRecognizer() panGes = UIPanGestureRecognizer() + offlineView = VehicleDeviceOffLineView() + alarmView = VehicleAlarmView() super.init(frame: .zero) maMapView.maMapView.isRotateCameraEnabled = false addSubview(maMapView) + + offlineView.offlineIconImageView.image = UIImage(named: "vehicleMonitoring_offline_icon") + offlineView.backgroundColor = .dd_hex(light: "FB8958", dark: "FB8958") + offlineView.layer.cornerRadius = 6 + offlineView.isHidden = true + addSubview(offlineView) + alarmView.imageView.image = UIImage(named: "vehicleMonitoring_alarm") + alarmView.isHidden = true + addSubview(alarmView) coverView.addGestureRecognizer(tapGes) coverView.addGestureRecognizer(panGes) coverView.isHidden = true @@ -1009,6 +1073,19 @@ open class VehicleMonitoringView : DDView { coverView.snp.makeConstraints { make in make.edges.equalToSuperview() } + + alarmView.snp.makeConstraints { make in + make.top.equalToSuperview().offset(10) + make.right.equalToSuperview().offset(-10) + make.width.height.equalTo(50) + } + + offlineView.snp.makeConstraints { make in + make.centerY.equalTo(alarmView) + make.centerX.equalToSuperview() + make.height.equalTo(35) + make.width.equalTo(200) + } } required public init?(coder: NSCoder) { @@ -1016,6 +1093,70 @@ open class VehicleMonitoringView : DDView { } } +open class VehicleDeviceOffLineView : DDView { + let offlineIconImageView : DDImageView + let offLineLabel : VerticalLoopScrollLabel + public override init(frame: CGRect) { + offlineIconImageView = DDImageView() + offLineLabel = VerticalLoopScrollLabel() + super.init(frame: frame) + addSubview(offlineIconImageView) + addSubview(offLineLabel) + + offlineIconImageView.snp.makeConstraints { make in + make.left.equalTo(15) + make.centerY.equalToSuperview() + } + + offLineLabel.snp.makeConstraints { make in + make.left.equalTo(offlineIconImageView.snp.right).offset(10) + make.right.equalToSuperview().offset(-10) + make.top.bottom.equalToSuperview().inset(8) + } + + } + + @MainActor required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } +} + +open class VehicleAlarmView : DDView { + public let imageView : DDImageView + public let count : DDLabel + + public override init(frame: CGRect) { + self.imageView = DDImageView() + self.count = DDLabel() + super.init(frame: frame) + + addSubview(imageView) + count.layer.cornerRadius = 1 + count.layer.borderColor = UIColor.dd_hex(light: "FFFFFF", dark: "FFFFFF").cgColor + count.layer.borderWidth = 0.8 + count.layer.masksToBounds = true + count.backgroundColor = .dd_hex(light: "F93D3D", dark: "F93D3D") + count.textColor = .dd_hex(light: "FFFFFF", dark: "FFFFFF") + count.font = .dd_systemFont(ofSize: 12, weight: .semibold) + addSubview(count) + + imageView.snp.makeConstraints { make in + make.centerX.centerY.equalToSuperview() + make.width.height.lessThanOrEqualToSuperview() + } + + count.snp.makeConstraints { make in + make.right.equalTo(imageView.snp.right) + make.top.equalTo(imageView.snp.top) + } + + } + + @MainActor required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } +} + open class VehicleMonitoringPannelView : DDView { public let radiusView : DDView public let categoryView : JXCategoryNumberView @@ -1212,6 +1353,7 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg public let stateLabel : DDLabel public let vehicleLabel : DDLabel public let settingButton : DDButton + public let videoButton : DDButton public let nameLabel : DDLabel public let callButton : DDButton public let containerView : DDView @@ -1235,6 +1377,8 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg vehicleLabel = DDLabel.dd_init(withText: "", font: .mediumFont(auto(14)), textColor: .hex("11142F")) settingButton = DDButton.dd_initCustom() settingButton.setBackgroundImage(UIImage(named: "vehicleMonitoring_setting"), for: .normal) + videoButton = DDButton.dd_initCustom() + videoButton.setBackgroundImage(UIImage(named: "vehicleMonitoring_video_icon"), for: .normal) nameLabel = DDLabel.dd_init(withText: "", font: .regularFont(auto(14)), textColor: .hex("11142F")) callButton = DDButton.dd_initCustom() callButton.setBackgroundImage(UIImage(named: "vehicleMonitor_call_cell"), for: .normal) @@ -1258,6 +1402,7 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg icon.addSubview(stateLabel) addSubview(vehicleLabel) addSubview(settingButton) + addSubview(videoButton) addSubview(nameLabel) addSubview(callButton) @@ -1339,12 +1484,19 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg } settingButton.snp.makeConstraints { make in - make.left.equalTo(vehicleLabel.snp.right).offset(auto(2.5)) + make.left.equalTo(vehicleLabel.snp.right).offset(auto(5)) make.centerY.equalTo(icon) make.width.equalTo(auto(16)) make.height.equalTo(auto(14)) } + videoButton.snp.makeConstraints { make in + make.left.equalTo(settingButton.snp.right).offset(auto(10)) + make.centerY.equalTo(icon) + make.width.equalTo(auto(23)) + make.height.equalTo(auto(13)) + } + callButton.snp.makeConstraints { make in make.right.equalTo(-auto(20)) make.centerY.equalTo(backButton) diff --git a/OrderScheduling/Video/Video/view/VideoPlayView.h b/OrderScheduling/Video/Video/view/VideoPlayView.h new file mode 100644 index 0000000..8a6b6bf --- /dev/null +++ b/OrderScheduling/Video/Video/view/VideoPlayView.h @@ -0,0 +1,31 @@ +// +// WWVideoReplayView.h +// wanwayInternet +//made in zhongdao Copyright © 2020 liuchao. All rights reserved. +// + +#import +#import "AAPLEAGLLayer.h" + +NS_ASSUME_NONNULL_BEGIN + +UIKIT_EXTERN NSString *WWCarVideoReplayLastChannel; + +@protocol WWVideoReplayViewDelegate + +- (void)viedoReplayView:(UIView *)videoView fullScreenAction:(BOOL)fullScreen; + +@end + +@interface VideoPlayView : UIView + +@property (nonatomic,strong)AAPLEAGLLayer *playLayer; +@property (nonatomic, weak) id repalyDelegate; +@property (nonatomic, strong) NSString *wsUrl; +@property (nonatomic, assign) BOOL isPlaying; + +- (void)beginShow; +- (void)endShow; +@end + +NS_ASSUME_NONNULL_END diff --git a/OrderScheduling/Video/Video/view/VideoPlayView.m b/OrderScheduling/Video/Video/view/VideoPlayView.m new file mode 100644 index 0000000..aaa88f8 --- /dev/null +++ b/OrderScheduling/Video/Video/view/VideoPlayView.m @@ -0,0 +1,314 @@ +// +// WWVideoReplayView.m +// wanwayInternet +//made in zhongdao Copyright © 2020 liuchao. All rights reserved. +// + +#import "VideoPlayView.h" +#import "SRWebSocket.h" +#import "H264DecodeTool.h" +#import "g726.h" +#import "g711.h" +#import "PCMStreamPlayer.h" +#import "YFTimerManager.h" +#import "YFProgressHUD.h" + +@interface VideoPlayView () + +@property (nonatomic,strong) SRWebSocket *websocket; +@property (nonatomic,strong) NSMutableData *receivedVideoData; +@property (nonatomic,strong) H264DecodeTool *h264Decoder; +@property (nonatomic, strong) PCMStreamPlayer *pcmPlayer; +@property (nonatomic, assign,getter=isStopPlayBuffer) BOOL stopPlayBuffer; +@property (nonatomic, strong) UIButton *playBtn; +@property (nonatomic, strong) YFProgressHUD *hud; +@end + +@implementation VideoPlayView { + g726_state_t *m_state726; +} + +- (instancetype)initWithFrame:(CGRect)frame { + if (self = [super initWithFrame:frame]) { + self.backgroundColor = [UIColor blackColor]; + + //g726 to pcm + m_state726 = (g726_state_t *)malloc(sizeof(g726_state_t)); + m_state726 = g726_init(m_state726, 8000*5);//2-16kBits 3-24kBits 4-32kBits 5-40kBits + _receivedVideoData = [NSMutableData data]; + + //操作按钮 + [self addSubview:self.playBtn]; + + [self addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(endShow)]]; + } + return self; +} + +- (void)layoutSubviews { + [super layoutSubviews]; + self.playBtn.center = CGPointMake(CGRectGetWidth(self.bounds)/2, CGRectGetHeight(self.bounds)/2); +} + +- (void)dealloc { + NSLog(@"***** video replay view dealloc"); + [self stopPlay]; + _pcmPlayer = nil; + _playLayer = nil; + [self.websocket close]; +} + + +//全屏播放 +- (void)repalyFullScreenBtnTouchAction:(UIButton *)btn { + btn.selected = !btn.selected; + if (self.repalyDelegate && [self.repalyDelegate respondsToSelector:@selector(viedoReplayView:fullScreenAction:)]) { + [self.repalyDelegate viedoReplayView:self fullScreenAction:btn.isSelected]; + } +} + +- (void)endShow { + if (self.hud) { + [YFProgressHUD hiddenProgressHUDforView:self]; + self.hud = nil; + } + + self.playBtn.hidden = NO; + [self stopPlay]; + + self.isPlaying = NO; +} + +- (void)stopPlay { + + [YFTimerManager deleteTimerDelegate:self forTimeInterval:5.0]; + + if (_websocket != nil && _websocket.readyState == SR_OPEN) { + [_websocket close]; + + } + if (_websocket != nil) { + _websocket = nil; + } + + if (self.pcmPlayer) { [self.pcmPlayer resetPlay]; } + + if (_playLayer) { + [_playLayer resetRenderBuffer]; + [_playLayer cleanUpTextures]; + self.stopPlayBuffer = YES; + } + + if (_receivedVideoData.length > 0) { + [_receivedVideoData resetBytesInRange:NSMakeRange(0, self.receivedVideoData.length)]; + _receivedVideoData.length = 0; + } +} + +- (void)beginShow { + [self replayBtnTouchWith:self.wsUrl]; +} + +- (void)replayBtnTouchWith:(NSString *)wsUrl { + + if ([wsUrl isKindOfClass:[NSString class]] && wsUrl.length > 0) { + self.stopPlayBuffer = NO; + self.playBtn.hidden = YES; + [self videoShowWithUrl:wsUrl]; + self.isPlaying = YES; + } + +} + +- (void)videoShowWithUrl:(NSString *)url { + NSMutableURLRequest *req = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:url]]; + _websocket = [[SRWebSocket alloc] initWithURLRequest:req]; + _websocket.delegate = self; + + if (self.pcmPlayer) { + [self.pcmPlayer resetPlay]; + } + + [self.playLayer resetRenderBuffer]; + [_websocket open]; + self.hud = [YFProgressHUD showProgressHUDinView:self title:@"努力加载视频中!"]; + +} + +//发送心跳包 +-(void)toDoThingsWhenTimeCome:(NSTimeInterval)interval{ + + if (interval == 5.0) { + if (self.websocket != nil && self.websocket.readyState == SR_OPEN) { + NSLog(@"********* websocket send state %ld", self.websocket.readyState); + [self.websocket send:@"0"]; + } + } + +} + +#pragma mark ================ SRWebSocketDelegate ======================= +- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message { + NSLog(@"收到数据了******* %ld %@ %@",webSocket.readyState, [message class], message); + + if ([message isKindOfClass:[NSData class]] == NO) { return; } + + NSData *data = [NSData dataWithData:message]; +// NSString *aString = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding]; +// NSLog(@"\n\n%@\n\n",aString) + + + if (data.length < 24) { return; } + //NSLog(@" --- begin --- %@", data); + + if (self.hud) { + [YFProgressHUD hiddenProgressHUDforView:self]; + self.hud = nil; + } + + //sim 卡号在收到的包头里。对讲用到 +// self.speakSimCardData = [data subdataWithRange:NSMakeRange(8, 6)]; + + __weak typeof(self) weakself = self; +// g726_state_t *weak_m_state726 = m_state726; + + NSInteger dataLength = data.length; + NSInteger dataOffset = 0; + + while (dataOffset < dataLength) { + + //跳过5个字节没用 + NSData *typeData = [data subdataWithRange:NSMakeRange(dataOffset + 5, 1)]; //62 -> 0110 0010 -> 第1位,和后7位 + const Byte *byteData = (Byte *)[typeData bytes]; + + Byte ptByte = byteData[0]; + int isComplete = ptByte >> 7; //标志位,是否是完整数据帧边界,根据这个来拼接 + int loadType = ptByte & 0x7f; //98位视频数据,否则为音频数据 + + NSLog(@"%@ -- %d %d", typeData, isComplete, loadType); + + if (loadType == 98) { + NSData *videoSizeData = [data subdataWithRange:NSMakeRange(dataOffset + 28, 2)]; + const Byte *sizeBytes = (Byte *)[videoSizeData bytes]; + int size = (sizeBytes[0] & 0xff) * 16 * 16 + (sizeBytes[1] & 0xff); + NSData *videoPerData = [data subdataWithRange:NSMakeRange(dataOffset + 30, size)]; + + //NSLog(@"---- one data %@", videoPerData); + [weakself.receivedVideoData appendData:videoPerData]; //61 0110 0001 + + dataOffset += 30 + size; + + //NSLog(@"size -- %@ %d %ld ",videoSizeData, size, dataOffset); + if (isComplete > 0) { + //NSLog(@"---- all data %@", self.receivedVideoData); + [weakself.h264Decoder decodeNalu:(uint8_t *)[weakself.receivedVideoData bytes] size:(uint32_t)weakself.receivedVideoData.length alive:NO]; + [weakself.receivedVideoData resetBytesInRange:NSMakeRange(0, weakself.receivedVideoData.length)]; + weakself.receivedVideoData.length = 0; + } + + } else { + + NSData *audioSizeData = [data subdataWithRange:NSMakeRange(dataOffset + 24, 2)]; + const Byte *sizeBytes = (Byte *)[audioSizeData bytes]; + int size = (sizeBytes[0] & 0xff) * 16 * 16 + (sizeBytes[1] & 0xff);//[self intFromData:videoSizeData]; + NSData *audioData = [data subdataWithRange:NSMakeRange(dataOffset + 26, size)]; + NSLog(@"---- audio = %d %ld-%d", loadType, dataLength,size); + dataOffset += 26 + size; + //[self.fileHandle writeData:audioData]; + + //G726 40k码率 8000采样频率 5bit采样位数 + //86 -- 1000 0110 -- loadType=6-G711A 8-G726 + + //G726 转码 pcm + //ffplay -f s16le -ac 1 -ar 8000 a.pcm + //ffplay -f g726le -ar 8000 -ac 1 -code_size 5 -i test.g726 + int outLen = size*6, iRet = 0; + short *outBuffer = (short *)malloc(outLen); + unsigned char *audioDataBuffer = (unsigned char *)audioData.bytes; + + if (loadType == 6) { + // audio = 6 186-160 + //iRet = 0, out = 320, {length = 320, bytes = 0x008a008e 00a600d9 ... 0801002d 005a007a } + outLen = g711_decode(outBuffer, &outLen, audioDataBuffer, size, TP_ALAW); + } else if (loadType == 7) { + outLen = g711_decode(outBuffer, &outLen, audioDataBuffer, size, TP_ULAW); + } else { + //audio = 8 126-100 + //iRet = 160, out = 320, {length = 320, bytes = 0x0000fcff 1c00f4ff ... 4816c457 0c12a8da } + iRet = g726_decode(self->m_state726, outBuffer, audioDataBuffer, size); + outLen = iRet*2; + } + + //不播放声音 + if (weakself.pcmPlayer) { + [weakself.pcmPlayer playWithData:(Byte *)outBuffer size:outLen]; + } + free(outBuffer); + } + } + + //NSLog(@" --- end --- "); +} + +- (void)webSocketDidOpen:(SRWebSocket *)webSocket { + [YFTimerManager addTimerDelegate:self forTimeInterval:5.0]; +} + +- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error { + if (self.hud) { + [YFProgressHUD hiddenProgressHUDforView:self]; + self.hud = nil; + } + [YFTimerManager deleteTimerDelegate:self forTimeInterval:5.0]; + NSLog(@"*********** websocket error %@", error); + [YFProgressHUD showToastTitle:(@"视频播放失败,请重试!")]; + [self endShow]; + +} + +#pragma mark ================ H264DecodeFrameCallbackDelegate ======================= +- (void)gotDecodedFrame:(CVImageBufferRef)imageBuffer { + if(imageBuffer) { + //解码回来的数据绘制播放 + if (!self.isStopPlayBuffer) { + self.playLayer.pixelBuffer = imageBuffer; + CVPixelBufferRelease(imageBuffer); + } + } +} + +#pragma mark - getter + +- (AAPLEAGLLayer *)playLayer { + if (_playLayer == nil) { + _playLayer = [[AAPLEAGLLayer alloc] initWithFrame:self.bounds]; + [self.layer insertSublayer:_playLayer atIndex:0]; + } + return _playLayer;; +} + + +- (H264DecodeTool *)h264Decoder { + if (_h264Decoder == nil) { + _h264Decoder = [[H264DecodeTool alloc] init]; + _h264Decoder.delegate = self; + } + return _h264Decoder; +} + +- (PCMStreamPlayer *)pcmPlayer { + return nil; +} + +- (UIButton *)playBtn{ + if (!_playBtn) { + _playBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 80, 80)]; + _playBtn.center = self.center; + [_playBtn setImage:[UIImage imageNamed:@"ww_video_paly"] forState:UIControlStateNormal]; + [_playBtn addTarget:self action:@selector(beginShow) forControlEvents:UIControlEventTouchUpInside]; + } + return _playBtn; +} + +@end + diff --git a/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.h b/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.h new file mode 100755 index 0000000..8c35b9d --- /dev/null +++ b/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.h @@ -0,0 +1,20 @@ +/* + Copyright (C) 2014 Apple Inc. All Rights Reserved. + See LICENSE.txt for this sample’s licensing information + + Abstract: + + This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner. + + */ + +//@import QuartzCore; +#include +#include + +@interface AAPLEAGLLayer : CAEAGLLayer +@property CVPixelBufferRef pixelBuffer; +- (id)initWithFrame:(CGRect)frame; +- (void)resetRenderBuffer; +- (void) cleanUpTextures; +@end diff --git a/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.m b/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.m new file mode 100755 index 0000000..a163446 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.m @@ -0,0 +1,595 @@ +/* + Copyright (C) 2014 Apple Inc. All Rights Reserved. + See LICENSE.txt for this sample’s licensing information + + Abstract: + + This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner. + + */ + +#import "AAPLEAGLLayer.h" + +#import +#import +#include +#import +#include +#include +#include +#include +#include + +// Uniform index. +enum +{ + UNIFORM_Y, + UNIFORM_UV, + UNIFORM_ROTATION_ANGLE, + UNIFORM_COLOR_CONVERSION_MATRIX, + NUM_UNIFORMS +}; +GLint uniforms[NUM_UNIFORMS]; + +// Attribute index. +enum +{ + ATTRIB_VERTEX, + ATTRIB_TEXCOORD, + NUM_ATTRIBUTES +}; + +// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range) + +// BT.601, which is the standard for SDTV. +static const GLfloat kColorConversion601[] = { + 1.164, 1.164, 1.164, + 0.0, -0.392, 2.017, + 1.596, -0.813, 0.0, +}; + +// BT.709, which is the standard for HDTV. +static const GLfloat kColorConversion709[] = { + 1.164, 1.164, 1.164, + 0.0, -0.213, 2.112, + 1.793, -0.533, 0.0, +}; + + + +@interface AAPLEAGLLayer () +{ + // The pixel dimensions of the CAEAGLLayer. + GLint _backingWidth; + GLint _backingHeight; + + EAGLContext *_context; + CVOpenGLESTextureRef _lumaTexture; + CVOpenGLESTextureRef _chromaTexture; + + GLuint _frameBufferHandle; + GLuint _colorBufferHandle; + + const GLfloat *_preferredConversion; +} +@property GLuint program; + +@end +@implementation AAPLEAGLLayer +@synthesize pixelBuffer = _pixelBuffer; + +-(CVPixelBufferRef) pixelBuffer +{ + return _pixelBuffer; +} + +- (void)setPixelBuffer:(CVPixelBufferRef)pb +{ + if(_pixelBuffer) { + CVPixelBufferRelease(_pixelBuffer); + } + _pixelBuffer = CVPixelBufferRetain(pb); + + int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer); + int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer); + [self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight]; +} + +- (instancetype)initWithFrame:(CGRect)frame +{ + self = [super init]; + if (self) { + CGFloat scale = [[UIScreen mainScreen] scale]; + self.contentsScale = scale; + + self.opaque = TRUE; + self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]}; + + [self setFrame:frame]; + + // Set the context into which the frames will be drawn. + _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; + + if (!_context) { + return nil; + } + + // Set the default conversion to BT.709, which is the standard for HDTV. + _preferredConversion = kColorConversion709; + + [self setupGL]; + } + + return self; +} + +- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight +{ + if (!_context || ![EAGLContext setCurrentContext:_context]) { + return; + } + + if(pixelBuffer == NULL) { + NSLog(@"Pixel buffer is null"); + return; + } + + CVReturn err; + + size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); + + /* + Use the color attachment of the pixel buffer to determine the appropriate color conversion matrix. + */ + CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL); + if ( CFStringCompare((CFStringRef)colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) { + _preferredConversion = kColorConversion601; + } + else { + _preferredConversion = kColorConversion709; + } + + /* + CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef. + */ + + /* + Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane. + */ + + CVOpenGLESTextureCacheRef _videoTextureCache; + + // Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion. + err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache); + if (err != noErr) { + NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err); + return; + } + + glActiveTexture(GL_TEXTURE0); + + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _videoTextureCache, + pixelBuffer, + NULL, + GL_TEXTURE_2D, + GL_RED_EXT, + frameWidth, + frameHeight, + GL_RED_EXT, + GL_UNSIGNED_BYTE, + 0, + &_lumaTexture); + if (err) { + NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture)); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + if(planeCount == 2) { + // UV-plane. + glActiveTexture(GL_TEXTURE1); + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _videoTextureCache, + pixelBuffer, + NULL, + GL_TEXTURE_2D, + GL_RG_EXT, + frameWidth / 2, + frameHeight / 2, + GL_RG_EXT, + GL_UNSIGNED_BYTE, + 1, + &_chromaTexture); + if (err) { + NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture)); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + } + + glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle); + + // Set the view port to the entire view. + glViewport(0, 0, _backingWidth, _backingHeight); + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + // Use shader program. + glUseProgram(self.program); + // glUniform1f(uniforms[UNIFORM_LUMA_THRESHOLD], 1); + // glUniform1f(uniforms[UNIFORM_CHROMA_THRESHOLD], 1); + glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0); + glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion); + + // Set up the quad vertices with respect to the orientation and aspect ratio of the video. + CGRect viewBounds = self.bounds; + CGSize contentSize = CGSizeMake(frameWidth, frameHeight); + CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds); + + // Compute normalized quad coordinates to draw the frame into. + CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0); + CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width, + vertexSamplingRect.size.height/viewBounds.size.height); + + // Normalize the quad vertices. + if (cropScaleAmount.width > cropScaleAmount.height) { + normalizedSamplingSize.width = 1.0; + normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width; + } + else { + normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height; + normalizedSamplingSize.height = 1.0;; + } + + /* + The quad vertex data defines the region of 2D plane onto which we draw our pixel buffers. + Vertex data formed using (-1,-1) and (1,1) as the bottom left and top right coordinates respectively, covers the entire screen. + */ + GLfloat quadVertexData [] = { + (GLfloat)(-1 * normalizedSamplingSize.width), (GLfloat)(-1 * normalizedSamplingSize.height), + (GLfloat)normalizedSamplingSize.width, (GLfloat)(-1 * normalizedSamplingSize.height), + (GLfloat)(-1 * normalizedSamplingSize.width), (GLfloat)normalizedSamplingSize.height, + (GLfloat)normalizedSamplingSize.width, (GLfloat)normalizedSamplingSize.height, + }; + + // Update attribute values. + glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData); + glEnableVertexAttribArray(ATTRIB_VERTEX); + + /* + The texture vertices are set up such that we flip the texture vertically. This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system. + */ + CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1); + GLfloat quadTextureData[] = { + (GLfloat)CGRectGetMinX(textureSamplingRect), (GLfloat)CGRectGetMaxY(textureSamplingRect), + (GLfloat)CGRectGetMaxX(textureSamplingRect), (GLfloat)CGRectGetMaxY(textureSamplingRect), + (GLfloat)CGRectGetMinX(textureSamplingRect), (GLfloat)CGRectGetMinY(textureSamplingRect), + (GLfloat)CGRectGetMaxX(textureSamplingRect), (GLfloat)CGRectGetMinY(textureSamplingRect) + }; + + glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData); + glEnableVertexAttribArray(ATTRIB_TEXCOORD); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle); + [_context presentRenderbuffer:GL_RENDERBUFFER]; + + [self cleanUpTextures]; + // Periodic texture cache flush every frame + CVOpenGLESTextureCacheFlush(_videoTextureCache, 0); + + if(_videoTextureCache) { + CFRelease(_videoTextureCache); + } +} + +# pragma mark - OpenGL setup + +- (void)setupGL +{ + if (!_context || ![EAGLContext setCurrentContext:_context]) { + return; + } + + [self setupBuffers]; + [self loadShaders]; + + glUseProgram(self.program); + + // 0 and 1 are the texture IDs of _lumaTexture and _chromaTexture respectively. + glUniform1i(uniforms[UNIFORM_Y], 0); + glUniform1i(uniforms[UNIFORM_UV], 1); + glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0); + glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion); +} + +#pragma mark - Utilities + +- (void)setupBuffers +{ + glDisable(GL_DEPTH_TEST); + + glEnableVertexAttribArray(ATTRIB_VERTEX); + glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0); + + glEnableVertexAttribArray(ATTRIB_TEXCOORD); + glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0); + + [self createBuffers]; +} + +- (void) createBuffers +{ + glGenFramebuffers(1, &_frameBufferHandle); + glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle); + + glGenRenderbuffers(1, &_colorBufferHandle); + glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle); + + [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self]; + glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth); + glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight); + + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle); + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER)); + } +} + +- (void) releaseBuffers +{ + if(_frameBufferHandle) { + glDeleteFramebuffers(1, &_frameBufferHandle); + _frameBufferHandle = 0; + } + + if(_colorBufferHandle) { + glDeleteRenderbuffers(1, &_colorBufferHandle); + _colorBufferHandle = 0; + } +} + +- (void) resetRenderBuffer +{ + if (!_context || ![EAGLContext setCurrentContext:_context]) { + return; + } + + [self releaseBuffers]; + [self createBuffers]; +} + +- (void) cleanUpTextures +{ + if (_lumaTexture) { + CFRelease(_lumaTexture); + _lumaTexture = NULL; + } + + if (_chromaTexture) { + CFRelease(_chromaTexture); + _chromaTexture = NULL; + } +} + +#pragma mark - OpenGL ES 2 shader compilation + +const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;" +"precision mediump float;" +"uniform sampler2D SamplerY;" +"uniform sampler2D SamplerUV;" +"uniform mat3 colorConversionMatrix;" +"void main()" +"{" +" mediump vec3 yuv;" +" lowp vec3 rgb;" +// Subtract constants to map the video range start at 0 +" yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));" +" yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));" +" rgb = colorConversionMatrix * yuv;" +" gl_FragColor = vec4(rgb, 1);" +"}"; + +const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;" +"attribute vec2 texCoord;" +"uniform float preferredRotation;" +"varying vec2 texCoordVarying;" +"void main()" +"{" +" mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0," +" sin(preferredRotation), cos(preferredRotation), 0.0, 0.0," +" 0.0, 0.0, 1.0, 0.0," +" 0.0, 0.0, 0.0, 1.0);" +" gl_Position = position * rotationMatrix;" +" texCoordVarying = texCoord;" +"}"; + +- (BOOL)loadShaders +{ + GLuint vertShader = 0, fragShader = 0; + + // Create the shader program. + self.program = glCreateProgram(); + + if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) { + NSLog(@"Failed to compile vertex shader"); + return NO; + } + + if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) { + NSLog(@"Failed to compile fragment shader"); + return NO; + } + + // Attach vertex shader to program. + glAttachShader(self.program, vertShader); + + // Attach fragment shader to program. + glAttachShader(self.program, fragShader); + + // Bind attribute locations. This needs to be done prior to linking. + glBindAttribLocation(self.program, ATTRIB_VERTEX, "position"); + glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord"); + + // Link the program. + if (![self linkProgram:self.program]) { + NSLog(@"Failed to link program: %d", self.program); + + if (vertShader) { + glDeleteShader(vertShader); + vertShader = 0; + } + if (fragShader) { + glDeleteShader(fragShader); + fragShader = 0; + } + if (self.program) { + glDeleteProgram(self.program); + self.program = 0; + } + + return NO; + } + + // Get uniform locations. + uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY"); + uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV"); + // uniforms[UNIFORM_LUMA_THRESHOLD] = glGetUniformLocation(self.program, "lumaThreshold"); + // uniforms[UNIFORM_CHROMA_THRESHOLD] = glGetUniformLocation(self.program, "chromaThreshold"); + uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation"); + uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix"); + + // Release vertex and fragment shaders. + if (vertShader) { + glDetachShader(self.program, vertShader); + glDeleteShader(vertShader); + } + if (fragShader) { + glDetachShader(self.program, fragShader); + glDeleteShader(fragShader); + } + + return YES; +} + +- (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString +{ + *shader = glCreateShader(type); + glShaderSource(*shader, 1, &shaderString, NULL); + glCompileShader(*shader); + +#if defined(DEBUG) + GLint logLength; + glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) { + GLchar *log = (GLchar *)malloc(logLength); + glGetShaderInfoLog(*shader, logLength, &logLength, log); + NSLog(@"Shader compile log:\n%s", log); + free(log); + } +#endif + + GLint status = 0; + glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); + if (status == 0) { + glDeleteShader(*shader); + return NO; + } + + return YES; +} + +- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL +{ + NSError *error; + NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error]; + if (sourceString == nil) { + NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]); + return NO; + } + + const GLchar *source = (GLchar *)[sourceString UTF8String]; + + return [self compileShaderString:shader type:type shaderString:source]; +} + +- (BOOL)linkProgram:(GLuint)prog +{ + GLint status; + glLinkProgram(prog); + +#if defined(DEBUG) + GLint logLength; + glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) { + GLchar *log = (GLchar *)malloc(logLength); + glGetProgramInfoLog(prog, logLength, &logLength, log); + NSLog(@"Program link log:\n%s", log); + free(log); + } +#endif + + glGetProgramiv(prog, GL_LINK_STATUS, &status); + if (status == 0) { + return NO; + } + + return YES; +} + +- (BOOL)validateProgram:(GLuint)prog +{ + GLint logLength, status; + + glValidateProgram(prog); + glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) { + GLchar *log = (GLchar *)malloc(logLength); + glGetProgramInfoLog(prog, logLength, &logLength, log); + NSLog(@"Program validate log:\n%s", log); + free(log); + } + + glGetProgramiv(prog, GL_VALIDATE_STATUS, &status); + if (status == 0) { + return NO; + } + + return YES; +} + +- (void)dealloc +{ + if (!_context || ![EAGLContext setCurrentContext:_context]) { + return; + } + + [self cleanUpTextures]; + + if(_pixelBuffer) { + CVPixelBufferRelease(_pixelBuffer); + } + + if (self.program) { + glDeleteProgram(self.program); + self.program = 0; + } + if(_context) { + //[_context release]; + _context = nil; + } + //[super dealloc]; +} + +@end diff --git a/OrderScheduling/Video/VideoTools/H264DecodeTool.h b/OrderScheduling/Video/VideoTools/H264DecodeTool.h new file mode 100644 index 0000000..264e82f --- /dev/null +++ b/OrderScheduling/Video/VideoTools/H264DecodeTool.h @@ -0,0 +1,29 @@ +// +// H264DecodeTool.h +// VideoToolBoxDecodeH264 +//made in zhongdao Copyright © 2018年 AnDong. All rights reserved. +// + +#import +#import +#import + +@protocol H264DecodeFrameCallbackDelegate + +//回调sps和pps数据 +- (void)gotDecodedFrame:(CVImageBufferRef )imageBuffer; + +@end + +@interface H264DecodeTool : NSObject + +-(BOOL)initH264Decoder; + +//解码nalu +-(void)decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize alive:(BOOL)isAlive; + +- (void)endDecode; + +@property (weak, nonatomic) id delegate; + +@end diff --git a/OrderScheduling/Video/VideoTools/H264DecodeTool.m b/OrderScheduling/Video/VideoTools/H264DecodeTool.m new file mode 100644 index 0000000..dea7173 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/H264DecodeTool.m @@ -0,0 +1,298 @@ +// +// H264DecodeTool.m +// VideoToolBoxDecodeH264 +//made in zhongdao Copyright © 2018年 AnDong. All rights reserved. +// + +#import "H264DecodeTool.h" + +const uint8_t lyStartCode[4] = {0, 0, 0, 1}; + +@interface H264DecodeTool(){ + + //解码session + VTDecompressionSessionRef _decoderSession; + + //解码format 封装了sps和pps + CMVideoFormatDescriptionRef _decoderFormatDescription; + + //sps & pps + uint8_t *_sps; + NSInteger _spsSize; + uint8_t *_pps; + NSInteger _ppsSize; + +} +@property(nonatomic,assign)BOOL isNewValue; + +@end + +@implementation H264DecodeTool + +- (BOOL)initH264Decoder{ + + if(_decoderSession){ + return YES; + } + + + const uint8_t* const parameterSetPointers[2] = { _sps, _pps }; + const size_t parameterSetSizes[2] = { _spsSize, _ppsSize }; + + //NSLog(@"----- init h264 -- sps %@ --- pps %@", [NSData dataWithBytes:_sps length:_spsSize], [NSData dataWithBytes:_pps length:_ppsSize]); + + //用sps 和pps 实例化_decoderFormatDescription + OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, + 2, //参数个数 + parameterSetPointers, + parameterSetSizes, + 4, //nal startcode开始的size + &_decoderFormatDescription); + + if(status == noErr) { + NSDictionary* destinationPixelBufferAttributes = @{ + (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], + //硬解必须是 kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + // 或者是kCVPixelFormatType_420YpCbCr8Planar + //因为iOS是 nv12 其他是nv21 + (id)kCVPixelBufferWidthKey : [NSNumber numberWithInt:1280], + (id)kCVPixelBufferHeightKey : [NSNumber numberWithInt:960], + //这里宽高和编码反的 两倍关系 + (id)kCVPixelBufferOpenGLCompatibilityKey : [NSNumber numberWithBool:YES] + }; + + + + VTDecompressionOutputCallbackRecord callBackRecord; + callBackRecord.decompressionOutputCallback = didDecompress; + callBackRecord.decompressionOutputRefCon = (__bridge void *)self; + status = VTDecompressionSessionCreate(kCFAllocatorDefault, + _decoderFormatDescription, + NULL, + (__bridge CFDictionaryRef)destinationPixelBufferAttributes, + &callBackRecord, + &_decoderSession); + VTSessionSetProperty(_decoderSession, kVTDecompressionPropertyKey_ThreadCount, (__bridge CFTypeRef)[NSNumber numberWithInt:1]); + VTSessionSetProperty(_decoderSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue); + } else { + NSLog(@"IOS8VT: reset decoder session failed status=%d", status); + return NO; + } + + return YES; +} + +//解码回调 +static void didDecompress( void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){ + CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon; + + //持有pixelBuffer数据,否则会被释放 + *outputPixelBuffer = CVPixelBufferRetain(pixelBuffer); + H264DecodeTool *decoder = (__bridge H264DecodeTool *)decompressionOutputRefCon; + if (decoder.delegate) + { + [decoder.delegate gotDecodedFrame:pixelBuffer]; + } +} + + +//解码nalu裸数据 +-(void)decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize alive:(BOOL)isAlive +{ + + int flag = 1; + uint8_t * packetBuffer = NULL; + long packetSize = 0; + + long count = 0, location = 0; + for (long i = 0 ; i < frameSize; i++) { + if (frame[i] == 0) { + count++; + } else if (frame[i] == 1 && ((isAlive && count == 3) || ( !isAlive && count >= 3)) && i > 3) { + // 0x00 0x00 0x00 0x01 如果存在多个0x00 以前的count == 3 就有问题 所以用count >= 3 正确,这里专门针对DQ001和除它之外的其他设备的实时和回看适配 + if (packetBuffer) { + free(packetBuffer); + packetBuffer = NULL; + } + packetSize = i - location - 3; + + packetBuffer = (uint8_t *)malloc(packetSize); + memcpy(packetBuffer, frame+location, packetSize); + + location = i - 3; + count = 0; + + flag = 0; + //NSLog(@"1---%@", [NSData dataWithBytes:packetBuffer length:packetSize]); + [self oneDecodeNalu:packetBuffer size:(uint32_t)packetSize]; + } else { + count = 0; + } + } + + + if (flag) { + //NSLog(@"2---%@", [NSData dataWithBytes:frame length:frameSize]); + [self oneDecodeNalu:frame size:frameSize]; + } else { + free(packetBuffer); + packetBuffer = NULL; + packetSize = frameSize - location; + + packetBuffer = (uint8_t *)malloc(packetSize); + memcpy(packetBuffer, frame+location, packetSize); + + //NSLog(@"3---%@", [NSData dataWithBytes:packetBuffer length:packetSize]); + [self oneDecodeNalu:packetBuffer size:(uint32_t)packetSize]; + } + + //NSLog(@"4---"); + + +} + + +-(void)oneDecodeNalu:(uint8_t *)frame size:(uint32_t)frameSize { + // NSLog(@"------------开始解码"); + + //获取nalu type + int nalu_type = (frame[4] & 0x1F); + CVPixelBufferRef pixelBuffer = NULL; + + //填充nalu size 去掉start code 替换成nalu size + uint32_t nalSize = (uint32_t)(frameSize - 4); + uint8_t *pNalSize = (uint8_t*)(&nalSize); + frame[0] = *(pNalSize + 3); + frame[1] = *(pNalSize + 2); + frame[2] = *(pNalSize + 1); + frame[3] = *(pNalSize); + + switch (nalu_type) + { + case 0x05: + //关键帧 + if([self initH264Decoder]) + { + pixelBuffer = [self decode:frame size:frameSize]; + } + break; + case 0x07: + //sps + _spsSize = frameSize - 4; +// uint8_t *oldsps = _sps; + + _sps = (uint8_t *)malloc(_spsSize); + memcpy(_sps, &frame[4], _spsSize); +// if (oldsps != _sps) { +// self.isNewValue = YES; +// } + break; + case 0x08: + { + //pps +// uint8_t * oldpps = _pps; + + _ppsSize = frameSize - 4; + _pps = (uint8_t *)malloc(_ppsSize); + memcpy(_pps, &frame[4], _ppsSize); + +// if (oldpps != _pps) { +// self.isNewValue = YES; +// } + break; + } + default: + { + // B/P frame + if([self initH264Decoder]) + { + pixelBuffer = [self decode:frame size:frameSize]; + } + break; + } + + + } +} + + +//解码帧数据 +- (CVPixelBufferRef)decode:(uint8_t *)frame size:(uint32_t)frameSize{ + CVPixelBufferRef outputPixelBuffer = NULL; + + CMBlockBufferRef blockBuffer = NULL; + + //创建CMBlockBufferRef + OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL, + (void *)frame, + frameSize, + kCFAllocatorNull, + NULL, + 0, + frameSize, + FALSE, + &blockBuffer); + if (status == kCMBlockBufferNoErr) { + + CMSampleBufferRef sampleBuffer = NULL; + const size_t sampleSizeArray[] = {frameSize}; + + //创建sampleBuffer + status = CMSampleBufferCreateReady(kCFAllocatorDefault, + blockBuffer, + _decoderFormatDescription , + 1, 0, NULL, 1, sampleSizeArray, + &sampleBuffer); + + if (status == kCMBlockBufferNoErr && sampleBuffer) { + VTDecodeFrameFlags flags = 0; + VTDecodeInfoFlags flagOut = 0; + //CMSampleBufferRef丢进去解码 + OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(_decoderSession, + sampleBuffer, + flags, + &outputPixelBuffer, + &flagOut); + + if(decodeStatus == kVTInvalidSessionErr) { + NSLog(@"IOS8VT: Invalid session, reset decoder session"); + } else if(decodeStatus == kVTVideoDecoderBadDataErr) { + NSLog(@"IOS8VT: decode failed status=%d(Bad data)", decodeStatus); + } else if(decodeStatus != noErr) { + NSLog(@"IOS8VT: decode failed status=%d", decodeStatus); + } + CFRelease(sampleBuffer); + } + CFRelease(blockBuffer); + } + //返回pixelBuffer数据 + return outputPixelBuffer; +} + +- (void)endDecode{ + + if(_decoderSession) { + VTDecompressionSessionInvalidate(_decoderSession); + CFRelease(_decoderSession); + _decoderSession = NULL; + } + + if(_decoderFormatDescription) { + CFRelease(_decoderFormatDescription); + _decoderFormatDescription = NULL; + } + + if (_sps) { + free(_sps); + } + + if (_pps) { + free(_pps); + } + + _ppsSize = _spsSize = 0; +} + + + +@end diff --git a/OrderScheduling/Video/VideoTools/PCMStreamPlayer.h b/OrderScheduling/Video/VideoTools/PCMStreamPlayer.h new file mode 100644 index 0000000..fbbc943 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/PCMStreamPlayer.h @@ -0,0 +1,18 @@ +// +// PCMStreamPlayer.h +// LinePlayer +//made in zhongdao Copyright © 2020 myz. All rights reserved. +// + +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface PCMStreamPlayer : NSObject + +-(void)playWithData:(Byte *)pcmData size:(int)length; +- (void)resetPlay; +//-(void)stop; +@end + +NS_ASSUME_NONNULL_END diff --git a/OrderScheduling/Video/VideoTools/PCMStreamPlayer.m b/OrderScheduling/Video/VideoTools/PCMStreamPlayer.m new file mode 100644 index 0000000..ff85fb0 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/PCMStreamPlayer.m @@ -0,0 +1,131 @@ +// +// PCMStreamPlayer.m +// LinePlayer +//made in zhongdao Copyright © 2020 myz. All rights reserved. +// + +#import "PCMStreamPlayer.h" +#import + + +#define QUEUE_BUFFER_SIZE 6 //队列缓冲个数 +#define MIN_SIZE_PER_FRAME 600 //每帧最小数据长度 + +@interface PCMStreamPlayer() { + NSLock *synlock ;//同步控制 + AudioQueueRef audioQueue;//音频播放队列 + BOOL audioQueueUsed[QUEUE_BUFFER_SIZE]; //音频缓存是否在使用中 + AudioStreamBasicDescription audioDescription;//音频参数 + AudioQueueBufferRef audioQueueBuffers[QUEUE_BUFFER_SIZE];//音频缓冲 + + int bufferSizeCount; +} + +@end + +@implementation PCMStreamPlayer + +- (instancetype)init { + if (self=[super init]) { + bufferSizeCount = 1; + synlock = [[NSLock alloc] init]; + [self reset]; + } + return self; +} + +- (void)reset { + [self stop]; + + ///设置音频参数 + audioDescription.mSampleRate = 8000; //采样率 + audioDescription.mFormatID = kAudioFormatLinearPCM; + audioDescription.mFormatFlags = (kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked); + audioDescription.mChannelsPerFrame = 1; ///单声道 + audioDescription.mFramesPerPacket = 1; //每一个packet一侦数据 + audioDescription.mBitsPerChannel = 16; //每个采样点16bit量化 + audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel / 8) * audioDescription.mChannelsPerFrame; + audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame; + AudioQueueNewOutput(&audioDescription, audioPlayerAQInputCallback, (__bridge void*)self, nil, nil, 0, &audioQueue); //使用player的内部线程播放 + //AudioQueueSetParameter(audioQueue, kAudioQueueParam_Volume, 1.0); + //初始化音频缓冲区 + for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) { + AudioQueueAllocateBuffer(audioQueue, MIN_SIZE_PER_FRAME, &audioQueueBuffers[i]); + } + +} + +- (void)dealloc { + NSLog(@"***** pcmstream player dealloc"); +} + +- (void)stop { + if (audioQueue) { + AudioQueueStop(audioQueue, true); + AudioQueueReset(audioQueue); + audioQueue = nil; + } +} + +- (void)resetPlay { + [self stop]; +} + +-(void)playWithData:(Byte *)pcmData size:(int)length { + if (audioQueue == nil) { //|| ![self checkBufferHasUsed] + // 第一次使用 + [self reset]; + AudioQueueStart(audioQueue, NULL); + } + + [synlock lock]; + AudioQueueBufferRef audioQueueBuffer = NULL; + while (true) { + audioQueueBuffer = [self getNotUsedBuffer]; + if (audioQueueBuffer != NULL) { + break; + } + } + + audioQueueBuffer->mAudioDataByteSize = length; + memcpy(audioQueueBuffer->mAudioData, pcmData, length); + AudioQueueEnqueueBuffer(audioQueue, audioQueueBuffer, 0, NULL); + [synlock unlock]; +} +static void audioPlayerAQInputCallback(void *input, AudioQueueRef audioQueue, AudioQueueBufferRef audioQueueBuffers) { + PCMStreamPlayer *player = (__bridge PCMStreamPlayer*)input; + [player playerCallback:audioQueueBuffers]; +} + +// 是不是有缓冲在使用中 +- (BOOL)checkBufferHasUsed +{ + for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) { + if (YES == audioQueueUsed[i]) { + return YES; + } + } + return NO; +} +// 获取没有在使用的缓冲 +- (AudioQueueBufferRef)getNotUsedBuffer +{ + for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) { + if (NO == audioQueueUsed[i]) { + audioQueueUsed[i] = YES; + return audioQueueBuffers[i]; + } + } + return NULL; +} + +// 标志缓冲空闲中 +- (void)playerCallback:(AudioQueueBufferRef)outQB { + for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) { + if (outQB == audioQueueBuffers[i]) { + audioQueueUsed[i] = NO; + } + } +} + +@end diff --git a/OrderScheduling/Video/VideoTools/SRWebSocket.h b/OrderScheduling/Video/VideoTools/SRWebSocket.h new file mode 100644 index 0000000..ca3a2c0 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/SRWebSocket.h @@ -0,0 +1,154 @@ +// +// Copyright 2012 Square Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import +#import + +typedef NS_ENUM(NSInteger, SRReadyState) { + SR_CONNECTING = 0, + SR_OPEN = 1, + SR_CLOSING = 2, + SR_CLOSED = 3, +}; + +typedef enum SRStatusCode : NSInteger { + // 0–999: Reserved and not used. + SRStatusCodeNormal = 1000, + SRStatusCodeGoingAway = 1001, + SRStatusCodeProtocolError = 1002, + SRStatusCodeUnhandledType = 1003, + // 1004 reserved. + SRStatusNoStatusReceived = 1005, + SRStatusCodeAbnormal = 1006, + SRStatusCodeInvalidUTF8 = 1007, + SRStatusCodePolicyViolated = 1008, + SRStatusCodeMessageTooBig = 1009, + SRStatusCodeMissingExtension = 1010, + SRStatusCodeInternalError = 1011, + SRStatusCodeServiceRestart = 1012, + SRStatusCodeTryAgainLater = 1013, + // 1014: Reserved for future use by the WebSocket standard. + SRStatusCodeTLSHandshake = 1015, + // 1016–1999: Reserved for future use by the WebSocket standard. + // 2000–2999: Reserved for use by WebSocket extensions. + // 3000–3999: Available for use by libraries and frameworks. May not be used by applications. Available for registration at the IANA via first-come, first-serve. + // 4000–4999: Available for use by applications. +} SRStatusCode; + +@class SRWebSocket; + +extern NSString *const SRWebSocketErrorDomain; +extern NSString *const SRHTTPResponseErrorKey; + +#pragma mark - SRWebSocketDelegate + +@protocol SRWebSocketDelegate; + +#pragma mark - SRWebSocket + +@interface SRWebSocket : NSObject + +@property (nonatomic, weak) id delegate; + +@property (nonatomic, readonly) SRReadyState readyState; +@property (nonatomic, readonly, retain) NSURL *url; + + +@property (nonatomic, readonly) CFHTTPMessageRef receivedHTTPHeaders; + +// Optional array of cookies (NSHTTPCookie objects) to apply to the connections +@property (nonatomic, readwrite) NSArray * requestCookies; + +// This returns the negotiated protocol. +// It will be nil until after the handshake completes. +@property (nonatomic, readonly, copy) NSString *protocol; + +// Protocols should be an array of strings that turn into Sec-WebSocket-Protocol. +- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates; +- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols; +- (id)initWithURLRequest:(NSURLRequest *)request; + +// Some helper constructors. +- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates; +- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols; +- (id)initWithURL:(NSURL *)url; + +// Delegate queue will be dispatch_main_queue by default. +// You cannot set both OperationQueue and dispatch_queue. +- (void)setDelegateOperationQueue:(NSOperationQueue*) queue; +- (void)setDelegateDispatchQueue:(dispatch_queue_t) queue; + +// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using defaultModes. +- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; +- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; + +// SRWebSockets are intended for one-time-use only. Open should be called once and only once. +- (void)open; + +- (void)close; +- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason; + +// Send a UTF8 String or Data. +- (void)send:(id)data; + +// Send Data (can be nil) in a ping message. +- (void)sendPing:(NSData *)data; + +@end + +#pragma mark - SRWebSocketDelegate + +@protocol SRWebSocketDelegate + +// message will either be an NSString if the server is using text +// or NSData if the server is using binary. +- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message; + +@optional + +- (void)webSocketDidOpen:(SRWebSocket *)webSocket; +- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error; +- (void)webSocket:(SRWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean; +- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload; + +// Return YES to convert messages sent as Text to an NSString. Return NO to skip NSData -> NSString conversion for Text messages. Defaults to YES. +- (BOOL)webSocketShouldConvertTextFrameToString:(SRWebSocket *)webSocket; + +@end + +#pragma mark - NSURLRequest (SRCertificateAdditions) + +@interface NSURLRequest (SRCertificateAdditions) + +@property (nonatomic, retain, readonly) NSArray *SR_SSLPinnedCertificates; + +@end + +#pragma mark - NSMutableURLRequest (SRCertificateAdditions) + +@interface NSMutableURLRequest (SRCertificateAdditions) + +@property (nonatomic, retain) NSArray *SR_SSLPinnedCertificates; + +@end + +#pragma mark - NSRunLoop (SRWebSocket) + +@interface NSRunLoop (SRWebSocket) + ++ (NSRunLoop *)SR_networkRunLoop; + +@end diff --git a/OrderScheduling/Video/VideoTools/SRWebSocket.m b/OrderScheduling/Video/VideoTools/SRWebSocket.m new file mode 100644 index 0000000..8673d10 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/SRWebSocket.m @@ -0,0 +1,1921 @@ +// +// Copyright 2012 Square Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + + +#import "SRWebSocket.h" + +#if TARGET_OS_IPHONE +#define HAS_ICU +#endif + +#ifdef HAS_ICU +#import +#endif + +#if TARGET_OS_IPHONE +#import +#else +#import +#endif + +#import +#import + +#if OS_OBJECT_USE_OBJC_RETAIN_RELEASE +#define sr_dispatch_retain(x) +#define sr_dispatch_release(x) +#define maybe_bridge(x) ((__bridge void *) x) +#else +#define sr_dispatch_retain(x) dispatch_retain(x) +#define sr_dispatch_release(x) dispatch_release(x) +#define maybe_bridge(x) (x) +#endif + +#if !__has_feature(objc_arc) +#error SocketRocket must be compiled with ARC enabled +#endif + + +typedef enum { + SROpCodeTextFrame = 0x1, + SROpCodeBinaryFrame = 0x2, + // 3-7 reserved. + SROpCodeConnectionClose = 0x8, + SROpCodePing = 0x9, + SROpCodePong = 0xA, + // B-F reserved. +} SROpCode; + +typedef struct { + BOOL fin; +// BOOL rsv1; +// BOOL rsv2; +// BOOL rsv3; + uint8_t opcode; + BOOL masked; + uint64_t payload_length; +} frame_header; + +static NSString *const SRWebSocketAppendToSecKeyString = @"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; + +static inline int32_t validate_dispatch_data_partial_string(NSData *data); +static inline void SRFastLog(NSString *format, ...); + + +static NSString *newSHA1String(const char *bytes, size_t length) { + uint8_t md[CC_SHA1_DIGEST_LENGTH]; + + assert(length >= 0); + assert(length <= UINT32_MAX); + CC_SHA1(bytes, (CC_LONG)length, md); + + NSData *data = [NSData dataWithBytes:md length:CC_SHA1_DIGEST_LENGTH]; + + if ([data respondsToSelector:@selector(base64EncodedStringWithOptions:)]) { + return [data base64EncodedStringWithOptions:0]; + } + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + return [data base64Encoding]; +#pragma clang diagnostic pop +} + + +@interface NSData (SRWebSocket) + +- (NSString *)stringBySHA1ThenBase64Encoding; + +@end + +@implementation NSData (SRWebSocket) + +- (NSString *)stringBySHA1ThenBase64Encoding; { + return newSHA1String((char *)self.bytes, self.length); +} + +@end + + + + +@interface NSString (SRWebSocket) + +- (NSString *)stringBySHA1ThenBase64Encoding; + +@end + + +@interface NSURL (SRWebSocket) + +// The origin isn't really applicable for a native application. +// So instead, just map ws -> http and wss -> https. +- (NSString *)SR_origin; + +@end + + +@interface _SRRunLoopThread : NSThread + +@property (nonatomic, readonly) NSRunLoop *runLoop; + +@end + + + + + +@implementation NSString (SRWebSocket) + +- (NSString *)stringBySHA1ThenBase64Encoding; +{ + return newSHA1String(self.UTF8String, self.length); +} + +@end + +NSString *const SRWebSocketErrorDomain = @"SRWebSocketErrorDomain"; +NSString *const SRHTTPResponseErrorKey = @"HTTPResponseStatusCode"; + +// Returns number of bytes consumed. Returning 0 means you didn't match. +// Sends bytes to callback handler; +typedef size_t (^stream_scanner)(NSData *collected_data); + +typedef void (^data_callback)(SRWebSocket *webSocket, NSData *data); + +@interface SRIOConsumer : NSObject { + stream_scanner _scanner; + data_callback _handler; + size_t _bytesNeeded; + BOOL _readToCurrentFrame; + BOOL _unmaskBytes; +} +@property (nonatomic, copy, readonly) stream_scanner consumer; +@property (nonatomic, copy, readonly) data_callback handler; +@property (nonatomic, assign) size_t bytesNeeded; +@property (nonatomic, assign, readonly) BOOL readToCurrentFrame; +@property (nonatomic, assign, readonly) BOOL unmaskBytes; + +@end + +// This class is not thread-safe, and is expected to always be run on the same queue. +@interface SRIOConsumerPool : NSObject + +- (id)initWithBufferCapacity:(NSUInteger)poolSize; + +- (SRIOConsumer *)consumerWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes; +- (void)returnConsumer:(SRIOConsumer *)consumer; + +@end + +@interface SRWebSocket () + +@property (nonatomic) SRReadyState readyState; + +@property (nonatomic) NSOperationQueue *delegateOperationQueue; +@property (nonatomic) dispatch_queue_t delegateDispatchQueue; + +// Specifies whether SSL trust chain should NOT be evaluated. +// By default this flag is set to NO, meaning only secure SSL connections are allowed. +// For DEBUG builds this flag is ignored, and SSL connections are allowed regardless +// of the certificate trust configuration +@property (nonatomic, readwrite) BOOL allowsUntrustedSSLCertificates; + +@end + + +@implementation SRWebSocket { + NSInteger _webSocketVersion; + + NSOperationQueue *_delegateOperationQueue; + dispatch_queue_t _delegateDispatchQueue; + + dispatch_queue_t _workQueue; + NSMutableArray *_consumers; + + NSInputStream *_inputStream; + NSOutputStream *_outputStream; + + NSMutableData *_readBuffer; + NSUInteger _readBufferOffset; + + NSMutableData *_outputBuffer; + NSUInteger _outputBufferOffset; + + uint8_t _currentFrameOpcode; + size_t _currentFrameCount; + size_t _readOpCount; + uint32_t _currentStringScanPosition; + NSMutableData *_currentFrameData; + + NSString *_closeReason; + + NSString *_secKey; + NSString *_basicAuthorizationString; + + BOOL _pinnedCertFound; + + uint8_t _currentReadMaskKey[4]; + size_t _currentReadMaskOffset; + + BOOL _consumerStopped; + + BOOL _closeWhenFinishedWriting; + BOOL _failed; + + BOOL _secure; + NSURLRequest *_urlRequest; + + BOOL _sentClose; + BOOL _didFail; + BOOL _cleanupScheduled; + int _closeCode; + + BOOL _isPumping; + + NSMutableSet *_scheduledRunloops; + + // We use this to retain ourselves. + __strong SRWebSocket *_selfRetain; + + NSArray *_requestedProtocols; + SRIOConsumerPool *_consumerPool; +} + +@synthesize delegate = _delegate; +@synthesize url = _url; +@synthesize readyState = _readyState; +@synthesize protocol = _protocol; + +static __strong NSData *CRLFCRLF; + ++ (void)initialize; +{ + CRLFCRLF = [[NSData alloc] initWithBytes:"\r\n\r\n" length:4]; +} + +- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates; +{ + self = [super init]; + if (self) { + assert(request.URL); + _url = request.URL; + _urlRequest = request; + _allowsUntrustedSSLCertificates = allowsUntrustedSSLCertificates; + + _requestedProtocols = [protocols copy]; + + [self _SR_commonInit]; + } + + return self; +} + +- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols; +{ + return [self initWithURLRequest:request protocols:protocols allowsUntrustedSSLCertificates:NO]; +} + +- (id)initWithURLRequest:(NSURLRequest *)request; +{ + return [self initWithURLRequest:request protocols:nil]; +} + +- (id)initWithURL:(NSURL *)url; +{ + return [self initWithURL:url protocols:nil]; +} + +- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols; +{ + NSMutableURLRequest *request = [[NSMutableURLRequest alloc] initWithURL:url]; + return [self initWithURLRequest:request protocols:protocols]; +} + +- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates; +{ + NSMutableURLRequest *request = [[NSMutableURLRequest alloc] initWithURL:url]; + return [self initWithURLRequest:request protocols:protocols allowsUntrustedSSLCertificates:allowsUntrustedSSLCertificates]; +} + +- (void)_SR_commonInit; +{ + NSString *scheme = _url.scheme.lowercaseString; + assert([scheme isEqualToString:@"ws"] || [scheme isEqualToString:@"http"] || [scheme isEqualToString:@"wss"] || [scheme isEqualToString:@"https"]); + + if ([scheme isEqualToString:@"wss"] || [scheme isEqualToString:@"https"]) { + _secure = YES; + } + + _readyState = SR_CONNECTING; + _consumerStopped = YES; + _webSocketVersion = 13; + + _workQueue = dispatch_queue_create(NULL, DISPATCH_QUEUE_SERIAL); + + // Going to set a specific on the queue so we can validate we're on the work queue + dispatch_queue_set_specific(_workQueue, (__bridge void *)self, maybe_bridge(_workQueue), NULL); + + _delegateDispatchQueue = dispatch_get_main_queue(); + sr_dispatch_retain(_delegateDispatchQueue); + + _readBuffer = [[NSMutableData alloc] init]; + _outputBuffer = [[NSMutableData alloc] init]; + + _currentFrameData = [[NSMutableData alloc] init]; + + _consumers = [[NSMutableArray alloc] init]; + + _consumerPool = [[SRIOConsumerPool alloc] init]; + + _scheduledRunloops = [[NSMutableSet alloc] init]; + + [self _initializeStreams]; + + // default handlers +} + +- (void)assertOnWorkQueue; +{ + assert(dispatch_get_specific((__bridge void *)self) == maybe_bridge(_workQueue)); +} + +- (void)dealloc +{ + _inputStream.delegate = nil; + _outputStream.delegate = nil; + + [_inputStream close]; + [_outputStream close]; + + if (_workQueue) { + sr_dispatch_release(_workQueue); + _workQueue = NULL; + } + + if (_receivedHTTPHeaders) { + CFRelease(_receivedHTTPHeaders); + _receivedHTTPHeaders = NULL; + } + + if (_delegateDispatchQueue) { + sr_dispatch_release(_delegateDispatchQueue); + _delegateDispatchQueue = NULL; + } +} + +#ifndef NDEBUG + +- (void)setReadyState:(SRReadyState)aReadyState; +{ + assert(aReadyState > _readyState); + _readyState = aReadyState; +} + +#endif + +- (void)open; +{ + assert(_url); + NSAssert(_readyState == SR_CONNECTING, @"Cannot call -(void)open on SRWebSocket more than once"); + + _selfRetain = self; + + if (_urlRequest.timeoutInterval > 0) + { + dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, _urlRequest.timeoutInterval * NSEC_PER_SEC); + dispatch_after(popTime, dispatch_get_main_queue(), ^(void){ + if (self.readyState == SR_CONNECTING) + [self _failWithError:[NSError errorWithDomain:@"com.squareup.SocketRocket" code:504 userInfo:@{NSLocalizedDescriptionKey: @"Timeout Connecting to Server"}]]; + }); + } + + [self openConnection]; +} + +// Calls block on delegate queue +- (void)_performDelegateBlock:(dispatch_block_t)block; +{ + if (_delegateOperationQueue) { + [_delegateOperationQueue addOperationWithBlock:block]; + } else { + assert(_delegateDispatchQueue); + dispatch_async(_delegateDispatchQueue, block); + } +} + +- (void)setDelegateDispatchQueue:(dispatch_queue_t)queue; +{ + if (queue) { + sr_dispatch_retain(queue); + } + + if (_delegateDispatchQueue) { + sr_dispatch_release(_delegateDispatchQueue); + } + + _delegateDispatchQueue = queue; +} + +- (BOOL)_checkHandshake:(CFHTTPMessageRef)httpMessage; +{ + NSString *acceptHeader = CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(httpMessage, CFSTR("Sec-WebSocket-Accept"))); + + if (acceptHeader == nil) { + return NO; + } + + NSString *concattedString = [_secKey stringByAppendingString:SRWebSocketAppendToSecKeyString]; + NSString *expectedAccept = [concattedString stringBySHA1ThenBase64Encoding]; + + return [acceptHeader isEqualToString:expectedAccept]; +} + +- (void)_HTTPHeadersDidFinish; +{ + NSInteger responseCode = CFHTTPMessageGetResponseStatusCode(_receivedHTTPHeaders); + + if (responseCode >= 400) { + SRFastLog(@"Request failed with response code %d", responseCode); + [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2132 userInfo:@{NSLocalizedDescriptionKey:[NSString stringWithFormat:@"received bad response code from server %ld", (long)responseCode], SRHTTPResponseErrorKey:@(responseCode)}]]; + return; + } + + if(![self _checkHandshake:_receivedHTTPHeaders]) { + [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2133 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Invalid Sec-WebSocket-Accept response"] forKey:NSLocalizedDescriptionKey]]]; + return; + } + + NSString *negotiatedProtocol = CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_receivedHTTPHeaders, CFSTR("Sec-WebSocket-Protocol"))); + if (negotiatedProtocol) { + // Make sure we requested the protocol + if ([_requestedProtocols indexOfObject:negotiatedProtocol] == NSNotFound) { + [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2133 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Server specified Sec-WebSocket-Protocol that wasn't requested"] forKey:NSLocalizedDescriptionKey]]]; + return; + } + + _protocol = negotiatedProtocol; + } + + self.readyState = SR_OPEN; + + if (!_didFail) { + [self _readFrameNew]; + } + + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocketDidOpen:)]) { + [self.delegate webSocketDidOpen:self]; + }; + }]; +} + + +- (void)_readHTTPHeader; +{ + if (_receivedHTTPHeaders == NULL) { + _receivedHTTPHeaders = CFHTTPMessageCreateEmpty(NULL, NO); + } + + [self _readUntilHeaderCompleteWithCallback:^(SRWebSocket *self, NSData *data) { + CFHTTPMessageAppendBytes(_receivedHTTPHeaders, (const UInt8 *)data.bytes, data.length); + + if (CFHTTPMessageIsHeaderComplete(_receivedHTTPHeaders)) { + SRFastLog(@"Finished reading headers %@", CFBridgingRelease(CFHTTPMessageCopyAllHeaderFields(_receivedHTTPHeaders))); + [self _HTTPHeadersDidFinish]; + } else { + [self _readHTTPHeader]; + } + }]; +} + +- (void)didConnect; +{ + SRFastLog(@"Connected"); + CFHTTPMessageRef request = CFHTTPMessageCreateRequest(NULL, CFSTR("GET"), (__bridge CFURLRef)_url, kCFHTTPVersion1_1); + + // Set host first so it defaults + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Host"), (__bridge CFStringRef)(_url.port ? [NSString stringWithFormat:@"%@:%@", _url.host, _url.port] : _url.host)); + + NSMutableData *keyBytes = [[NSMutableData alloc] initWithLength:16]; + SecRandomCopyBytes(kSecRandomDefault, keyBytes.length, keyBytes.mutableBytes); + + if ([keyBytes respondsToSelector:@selector(base64EncodedStringWithOptions:)]) { + _secKey = [keyBytes base64EncodedStringWithOptions:0]; + } else { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + _secKey = [keyBytes base64Encoding]; +#pragma clang diagnostic pop + } + + assert([_secKey length] == 24); + + // Apply cookies if any have been provided + NSDictionary * cookies = [NSHTTPCookie requestHeaderFieldsWithCookies:[self requestCookies]]; + for (NSString * cookieKey in cookies) { + NSString * cookieValue = [cookies objectForKey:cookieKey]; + if ([cookieKey length] && [cookieValue length]) { + CFHTTPMessageSetHeaderFieldValue(request, (__bridge CFStringRef)cookieKey, (__bridge CFStringRef)cookieValue); + } + } + + // set header for http basic auth + if (_url.user.length && _url.password.length) { + NSData *userAndPassword = [[NSString stringWithFormat:@"%@:%@", _url.user, _url.password] dataUsingEncoding:NSUTF8StringEncoding]; + NSString *userAndPasswordBase64Encoded; + if ([keyBytes respondsToSelector:@selector(base64EncodedStringWithOptions:)]) { + userAndPasswordBase64Encoded = [userAndPassword base64EncodedStringWithOptions:0]; + } else { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + userAndPasswordBase64Encoded = [userAndPassword base64Encoding]; +#pragma clang diagnostic pop + } + _basicAuthorizationString = [NSString stringWithFormat:@"Basic %@", userAndPasswordBase64Encoded]; + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Authorization"), (__bridge CFStringRef)_basicAuthorizationString); + } + + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Upgrade"), CFSTR("websocket")); + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Connection"), CFSTR("Upgrade")); + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Key"), (__bridge CFStringRef)_secKey); + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Version"), (__bridge CFStringRef)[NSString stringWithFormat:@"%ld", (long)_webSocketVersion]); + + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Origin"), (__bridge CFStringRef)_url.SR_origin); + + if (_requestedProtocols) { + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Protocol"), (__bridge CFStringRef)[_requestedProtocols componentsJoinedByString:@", "]); + } + + [_urlRequest.allHTTPHeaderFields enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop) { + CFHTTPMessageSetHeaderFieldValue(request, (__bridge CFStringRef)key, (__bridge CFStringRef)obj); + }]; + + NSData *message = CFBridgingRelease(CFHTTPMessageCopySerializedMessage(request)); + + CFRelease(request); + + [self _writeData:message]; + [self _readHTTPHeader]; +} + +- (void)_initializeStreams; +{ + assert(_url.port.unsignedIntValue <= UINT32_MAX); + uint32_t port = _url.port.unsignedIntValue; + if (port == 0) { + if (!_secure) { + port = 80; + } else { + port = 443; + } + } + NSString *host = _url.host; + + CFReadStreamRef readStream = NULL; + CFWriteStreamRef writeStream = NULL; + + CFStreamCreatePairWithSocketToHost(NULL, (__bridge CFStringRef)host, port, &readStream, &writeStream); + + _outputStream = CFBridgingRelease(writeStream); + _inputStream = CFBridgingRelease(readStream); + + _inputStream.delegate = self; + _outputStream.delegate = self; +} + +- (void)_updateSecureStreamOptions; +{ + if (_secure) { + NSMutableDictionary *SSLOptions = [[NSMutableDictionary alloc] init]; + + [_outputStream setProperty:(__bridge id)kCFStreamSocketSecurityLevelNegotiatedSSL forKey:(__bridge id)kCFStreamPropertySocketSecurityLevel]; + + // If we're using pinned certs, don't validate the certificate chain + if ([_urlRequest SR_SSLPinnedCertificates].count) { + [SSLOptions setValue:@NO forKey:(__bridge id)kCFStreamSSLValidatesCertificateChain]; + } + +#if DEBUG + self.allowsUntrustedSSLCertificates = YES; +#endif + + if (self.allowsUntrustedSSLCertificates) { + [SSLOptions setValue:@NO forKey:(__bridge id)kCFStreamSSLValidatesCertificateChain]; + SRFastLog(@"Allowing connection to any root cert"); + } + + [_outputStream setProperty:SSLOptions + forKey:(__bridge id)kCFStreamPropertySSLSettings]; + } + + _inputStream.delegate = self; + _outputStream.delegate = self; + + [self setupNetworkServiceType:_urlRequest.networkServiceType]; +} + +- (void)setupNetworkServiceType:(NSURLRequestNetworkServiceType)requestNetworkServiceType +{ + NSString *networkServiceType; + switch (requestNetworkServiceType) { + case NSURLNetworkServiceTypeDefault: + break; + case NSURLNetworkServiceTypeVoIP: { + networkServiceType = NSStreamNetworkServiceTypeVoIP; +#if TARGET_OS_IPHONE && __IPHONE_9_0 + if (floor(NSFoundationVersionNumber) > NSFoundationVersionNumber_iOS_8_3) { + static dispatch_once_t predicate; + dispatch_once(&predicate, ^{ + NSLog(@"SocketRocket: %@ - this service type is deprecated in favor of using PushKit for VoIP control", networkServiceType); + }); + } +#endif + break; + } + case NSURLNetworkServiceTypeVideo: + networkServiceType = NSStreamNetworkServiceTypeVideo; + break; + case NSURLNetworkServiceTypeBackground: + networkServiceType = NSStreamNetworkServiceTypeBackground; + break; + case NSURLNetworkServiceTypeVoice: + networkServiceType = NSStreamNetworkServiceTypeVoice; + break; + } + + if (networkServiceType != nil) { + [_inputStream setProperty:networkServiceType forKey:NSStreamNetworkServiceType]; + [_outputStream setProperty:networkServiceType forKey:NSStreamNetworkServiceType]; + } +} + +- (void)openConnection; +{ + [self _updateSecureStreamOptions]; + + if (!_scheduledRunloops.count) { + [self scheduleInRunLoop:[NSRunLoop SR_networkRunLoop] forMode:NSDefaultRunLoopMode]; + } + + + [_outputStream open]; + [_inputStream open]; +} + +- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; +{ + [_outputStream scheduleInRunLoop:aRunLoop forMode:mode]; + [_inputStream scheduleInRunLoop:aRunLoop forMode:mode]; + + [_scheduledRunloops addObject:@[aRunLoop, mode]]; +} + +- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; +{ + [_outputStream removeFromRunLoop:aRunLoop forMode:mode]; + [_inputStream removeFromRunLoop:aRunLoop forMode:mode]; + + [_scheduledRunloops removeObject:@[aRunLoop, mode]]; +} + +- (void)close; +{ + [self closeWithCode:SRStatusCodeNormal reason:nil]; +} + +- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason; +{ + assert(code); + dispatch_async(_workQueue, ^{ + if (self.readyState == SR_CLOSING || self.readyState == SR_CLOSED) { + return; + } + + BOOL wasConnecting = self.readyState == SR_CONNECTING; + + self.readyState = SR_CLOSING; + + SRFastLog(@"Closing with code %d reason %@", code, reason); + + if (wasConnecting) { + [self closeConnection]; + return; + } + + size_t maxMsgSize = [reason maximumLengthOfBytesUsingEncoding:NSUTF8StringEncoding]; + NSMutableData *mutablePayload = [[NSMutableData alloc] initWithLength:sizeof(uint16_t) + maxMsgSize]; + NSData *payload = mutablePayload; + + ((uint16_t *)mutablePayload.mutableBytes)[0] = EndianU16_BtoN(code); + + if (reason) { + NSRange remainingRange = {0}; + + NSUInteger usedLength = 0; + + BOOL success = [reason getBytes:(char *)mutablePayload.mutableBytes + sizeof(uint16_t) maxLength:payload.length - sizeof(uint16_t) usedLength:&usedLength encoding:NSUTF8StringEncoding options:NSStringEncodingConversionExternalRepresentation range:NSMakeRange(0, reason.length) remainingRange:&remainingRange]; + #pragma unused (success) + + assert(success); + assert(remainingRange.length == 0); + + if (usedLength != maxMsgSize) { + payload = [payload subdataWithRange:NSMakeRange(0, usedLength + sizeof(uint16_t))]; + } + } + + + [self _sendFrameWithOpcode:SROpCodeConnectionClose data:payload]; + }); +} + +- (void)_closeWithProtocolError:(NSString *)message; +{ + // Need to shunt this on the _callbackQueue first to see if they received any messages + [self _performDelegateBlock:^{ + [self closeWithCode:SRStatusCodeProtocolError reason:message]; + dispatch_async(_workQueue, ^{ + [self closeConnection]; + }); + }]; +} + +- (void)_failWithError:(NSError *)error; +{ + dispatch_async(_workQueue, ^{ + if (self.readyState != SR_CLOSED) { + _failed = YES; + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocket:didFailWithError:)]) { + [self.delegate webSocket:self didFailWithError:error]; + } + }]; + + self.readyState = SR_CLOSED; + + SRFastLog(@"Failing with error %@", error.localizedDescription); + + [self closeConnection]; + [self _scheduleCleanup]; + } + }); +} + +- (void)_writeData:(NSData *)data; +{ + [self assertOnWorkQueue]; + + if (_closeWhenFinishedWriting) { + return; + } + [_outputBuffer appendData:data]; + [self _pumpWriting]; +} + +- (void)send:(id)data; +{ + NSAssert(self.readyState != SR_CONNECTING, @"Invalid State: Cannot call send: until connection is open"); + // TODO: maybe not copy this for performance + data = [data copy]; + dispatch_async(_workQueue, ^{ + if ([data isKindOfClass:[NSString class]]) { + [self _sendFrameWithOpcode:SROpCodeTextFrame data:[(NSString *)data dataUsingEncoding:NSUTF8StringEncoding]]; + } else if ([data isKindOfClass:[NSData class]]) { + [self _sendFrameWithOpcode:SROpCodeBinaryFrame data:data]; + } else if (data == nil) { + [self _sendFrameWithOpcode:SROpCodeTextFrame data:data]; + } else { + assert(NO); + } + }); +} + +- (void)sendPing:(NSData *)data; +{ + NSAssert(self.readyState == SR_OPEN, @"Invalid State: Cannot call send: until connection is open"); + // TODO: maybe not copy this for performance + data = [data copy] ?: [NSData data]; // It's okay for a ping to be empty + dispatch_async(_workQueue, ^{ + [self _sendFrameWithOpcode:SROpCodePing data:data]; + }); +} + +- (void)handlePing:(NSData *)pingData; +{ + // Need to pingpong this off _callbackQueue first to make sure messages happen in order + [self _performDelegateBlock:^{ + dispatch_async(_workQueue, ^{ + [self _sendFrameWithOpcode:SROpCodePong data:pingData]; + }); + }]; +} + +- (void)handlePong:(NSData *)pongData; +{ + SRFastLog(@"Received pong"); + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocket:didReceivePong:)]) { + [self.delegate webSocket:self didReceivePong:pongData]; + } + }]; +} + +- (void)_handleMessage:(id)message +{ + SRFastLog(@"Received message"); + [self _performDelegateBlock:^{ + [self.delegate webSocket:self didReceiveMessage:message]; + }]; +} + + +static inline BOOL closeCodeIsValid(int closeCode) { + if (closeCode < 1000) { + return NO; + } + + if (closeCode >= 1000 && closeCode <= 1011) { + if (closeCode == 1004 || + closeCode == 1005 || + closeCode == 1006) { + return NO; + } + return YES; + } + + if (closeCode >= 3000 && closeCode <= 3999) { + return YES; + } + + if (closeCode >= 4000 && closeCode <= 4999) { + return YES; + } + + return NO; +} + +// Note from RFC: +// +// If there is a body, the first two +// bytes of the body MUST be a 2-byte unsigned integer (in network byte +// order) representing a status code with value /code/ defined in +// Section 7.4. Following the 2-byte integer the body MAY contain UTF-8 +// encoded data with value /reason/, the interpretation of which is not +// defined by this specification. + +- (void)handleCloseWithData:(NSData *)data; +{ + size_t dataSize = data.length; + __block uint16_t closeCode = 0; + + SRFastLog(@"Received close frame"); + + if (dataSize == 1) { + // TODO handle error + [self _closeWithProtocolError:@"Payload for close must be larger than 2 bytes"]; + return; + } else if (dataSize >= 2) { + [data getBytes:&closeCode length:sizeof(closeCode)]; + _closeCode = EndianU16_BtoN(closeCode); + if (!closeCodeIsValid(_closeCode)) { + [self _closeWithProtocolError:[NSString stringWithFormat:@"Cannot have close code of %d", _closeCode]]; + return; + } + if (dataSize > 2) { + _closeReason = [[NSString alloc] initWithData:[data subdataWithRange:NSMakeRange(2, dataSize - 2)] encoding:NSUTF8StringEncoding]; + if (!_closeReason) { + [self _closeWithProtocolError:@"Close reason MUST be valid UTF-8"]; + return; + } + } + } else { + _closeCode = SRStatusNoStatusReceived; + } + + [self assertOnWorkQueue]; + + if (self.readyState == SR_OPEN) { + [self closeWithCode:1000 reason:nil]; + } + dispatch_async(_workQueue, ^{ + [self closeConnection]; + }); +} + +- (void)closeConnection; +{ + [self assertOnWorkQueue]; + SRFastLog(@"Trying to disconnect"); + _closeWhenFinishedWriting = YES; + [self _pumpWriting]; +} + +- (void)_handleFrameWithData:(NSData *)frameData opCode:(NSInteger)opcode; +{ + // Check that the current data is valid UTF8 + + BOOL isControlFrame = (opcode == SROpCodePing || opcode == SROpCodePong || opcode == SROpCodeConnectionClose); + if (!isControlFrame) { + [self _readFrameNew]; + } else { + dispatch_async(_workQueue, ^{ + [self _readFrameContinue]; + }); + } + + //frameData will be copied before passing to handlers + //otherwise there can be misbehaviours when value at the pointer is changed + switch (opcode) { + case SROpCodeTextFrame: { + if ([self.delegate respondsToSelector:@selector(webSocketShouldConvertTextFrameToString:)] && ![self.delegate webSocketShouldConvertTextFrameToString:self]) { + [self _handleMessage:[frameData copy]]; + } else { + NSString *str = [[NSString alloc] initWithData:frameData encoding:NSUTF8StringEncoding]; + if (str == nil && frameData) { + [self closeWithCode:SRStatusCodeInvalidUTF8 reason:@"Text frames must be valid UTF-8"]; + dispatch_async(_workQueue, ^{ + [self closeConnection]; + }); + return; + } + [self _handleMessage:str]; + } + break; + } + case SROpCodeBinaryFrame: + [self _handleMessage:[frameData copy]]; + break; + case SROpCodeConnectionClose: + [self handleCloseWithData:[frameData copy]]; + break; + case SROpCodePing: + [self handlePing:[frameData copy]]; + break; + case SROpCodePong: + [self handlePong:[frameData copy]]; + break; + default: + [self _closeWithProtocolError:[NSString stringWithFormat:@"Unknown opcode %ld", (long)opcode]]; + // TODO: Handle invalid opcode + break; + } +} + +- (void)_handleFrameHeader:(frame_header)frame_header curData:(NSData *)curData; +{ + assert(frame_header.opcode != 0); + + if (self.readyState == SR_CLOSED) { + return; + } + + + BOOL isControlFrame = (frame_header.opcode == SROpCodePing || frame_header.opcode == SROpCodePong || frame_header.opcode == SROpCodeConnectionClose); + + if (isControlFrame && !frame_header.fin) { + [self _closeWithProtocolError:@"Fragmented control frames not allowed"]; + return; + } + + if (isControlFrame && frame_header.payload_length >= 126) { + [self _closeWithProtocolError:@"Control frames cannot have payloads larger than 126 bytes"]; + return; + } + + if (!isControlFrame) { + _currentFrameOpcode = frame_header.opcode; + _currentFrameCount += 1; + } + + if (frame_header.payload_length == 0) { + if (isControlFrame) { + [self _handleFrameWithData:curData opCode:frame_header.opcode]; + } else { + if (frame_header.fin) { + [self _handleFrameWithData:_currentFrameData opCode:frame_header.opcode]; + } else { + // TODO add assert that opcode is not a control; + [self _readFrameContinue]; + } + } + } else { + assert(frame_header.payload_length <= SIZE_T_MAX); + [self _addConsumerWithDataLength:(size_t)frame_header.payload_length callback:^(SRWebSocket *self, NSData *newData) { + if (isControlFrame) { + [self _handleFrameWithData:newData opCode:frame_header.opcode]; + } else { + if (frame_header.fin) { + [self _handleFrameWithData:self->_currentFrameData opCode:frame_header.opcode]; + } else { + // TODO add assert that opcode is not a control; + [self _readFrameContinue]; + } + + } + } readToCurrentFrame:!isControlFrame unmaskBytes:frame_header.masked]; + } +} + +/* From RFC: + + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-------+-+-------------+-------------------------------+ + |F|R|R|R| opcode|M| Payload len | Extended payload length | + |I|S|S|S| (4) |A| (7) | (16/64) | + |N|V|V|V| |S| | (if payload len==126/127) | + | |1|2|3| |K| | | + +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - + + | Extended payload length continued, if payload len == 127 | + + - - - - - - - - - - - - - - - +-------------------------------+ + | |Masking-key, if MASK set to 1 | + +-------------------------------+-------------------------------+ + | Masking-key (continued) | Payload Data | + +-------------------------------- - - - - - - - - - - - - - - - + + : Payload Data continued ... : + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + | Payload Data continued ... | + +---------------------------------------------------------------+ + */ + +static const uint8_t SRFinMask = 0x80; +static const uint8_t SROpCodeMask = 0x0F; +static const uint8_t SRRsvMask = 0x70; +static const uint8_t SRMaskMask = 0x80; +static const uint8_t SRPayloadLenMask = 0x7F; + + +- (void)_readFrameContinue; +{ + assert((_currentFrameCount == 0 && _currentFrameOpcode == 0) || (_currentFrameCount > 0 && _currentFrameOpcode > 0)); + + [self _addConsumerWithDataLength:2 callback:^(SRWebSocket *self, NSData *data) { + __block frame_header header = {0}; + + const uint8_t *headerBuffer = (uint8_t *)data.bytes; + assert(data.length >= 2); + + if (headerBuffer[0] & SRRsvMask) { + [self _closeWithProtocolError:@"Server used RSV bits"]; + return; + } + + uint8_t receivedOpcode = (SROpCodeMask & headerBuffer[0]); + + BOOL isControlFrame = (receivedOpcode == SROpCodePing || receivedOpcode == SROpCodePong || receivedOpcode == SROpCodeConnectionClose); + + if (!isControlFrame && receivedOpcode != 0 && self->_currentFrameCount > 0) { + [self _closeWithProtocolError:@"all data frames after the initial data frame must have opcode 0"]; + return; + } + + if (receivedOpcode == 0 && self->_currentFrameCount == 0) { + [self _closeWithProtocolError:@"cannot continue a message"]; + return; + } + + header.opcode = receivedOpcode == 0 ? self->_currentFrameOpcode : receivedOpcode; + + header.fin = !!(SRFinMask & headerBuffer[0]); + + + header.masked = !!(SRMaskMask & headerBuffer[1]); + header.payload_length = SRPayloadLenMask & headerBuffer[1]; + + headerBuffer = NULL; + + if (header.masked) { + [self _closeWithProtocolError:@"Client must receive unmasked data"]; + } + + size_t extra_bytes_needed = header.masked ? sizeof(_currentReadMaskKey) : 0; + + if (header.payload_length == 126) { + extra_bytes_needed += sizeof(uint16_t); + } else if (header.payload_length == 127) { + extra_bytes_needed += sizeof(uint64_t); + } + + if (extra_bytes_needed == 0) { + [self _handleFrameHeader:header curData:self->_currentFrameData]; + } else { + [self _addConsumerWithDataLength:extra_bytes_needed callback:^(SRWebSocket *self, NSData *data) { + size_t mapped_size = data.length; + #pragma unused (mapped_size) + const void *mapped_buffer = data.bytes; + size_t offset = 0; + + if (header.payload_length == 126) { + assert(mapped_size >= sizeof(uint16_t)); + uint16_t newLen = EndianU16_BtoN(*(uint16_t *)(mapped_buffer)); + header.payload_length = newLen; + offset += sizeof(uint16_t); + } else if (header.payload_length == 127) { + assert(mapped_size >= sizeof(uint64_t)); + header.payload_length = EndianU64_BtoN(*(uint64_t *)(mapped_buffer)); + offset += sizeof(uint64_t); + } else { + assert(header.payload_length < 126 && header.payload_length >= 0); + } + + if (header.masked) { + assert(mapped_size >= sizeof(_currentReadMaskOffset) + offset); + memcpy(self->_currentReadMaskKey, ((uint8_t *)mapped_buffer) + offset, sizeof(self->_currentReadMaskKey)); + } + + [self _handleFrameHeader:header curData:self->_currentFrameData]; + } readToCurrentFrame:NO unmaskBytes:NO]; + } + } readToCurrentFrame:NO unmaskBytes:NO]; +} + +- (void)_readFrameNew; +{ + dispatch_async(_workQueue, ^{ + [_currentFrameData setLength:0]; + + _currentFrameOpcode = 0; + _currentFrameCount = 0; + _readOpCount = 0; + _currentStringScanPosition = 0; + + [self _readFrameContinue]; + }); +} + +- (void)_pumpWriting; +{ + [self assertOnWorkQueue]; + + NSUInteger dataLength = _outputBuffer.length; + if (dataLength - _outputBufferOffset > 0 && _outputStream.hasSpaceAvailable) { + NSInteger bytesWritten = [_outputStream write:(uint8_t *)_outputBuffer.bytes + _outputBufferOffset maxLength:dataLength - _outputBufferOffset]; + if (bytesWritten == -1) { + [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2145 userInfo:[NSDictionary dictionaryWithObject:@"Error writing to stream" forKey:NSLocalizedDescriptionKey]]]; + return; + } + + _outputBufferOffset += bytesWritten; + + if (_outputBufferOffset > 4096 && _outputBufferOffset > (_outputBuffer.length >> 1)) { + _outputBuffer = [[NSMutableData alloc] initWithBytes:(char *)_outputBuffer.bytes + _outputBufferOffset length:_outputBuffer.length - _outputBufferOffset]; + _outputBufferOffset = 0; + } + } + + if (_closeWhenFinishedWriting && + _outputBuffer.length - _outputBufferOffset == 0 && + (_inputStream.streamStatus != NSStreamStatusNotOpen && + _inputStream.streamStatus != NSStreamStatusClosed) && + !_sentClose) { + _sentClose = YES; + + @synchronized(self) { + [_outputStream close]; + [_inputStream close]; + + + for (NSArray *runLoop in [_scheduledRunloops copy]) { + [self unscheduleFromRunLoop:[runLoop objectAtIndex:0] forMode:[runLoop objectAtIndex:1]]; + } + } + + if (!_failed) { + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocket:didCloseWithCode:reason:wasClean:)]) { + [self.delegate webSocket:self didCloseWithCode:_closeCode reason:_closeReason wasClean:YES]; + } + }]; + } + + [self _scheduleCleanup]; + } +} + +- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback; +{ + [self assertOnWorkQueue]; + [self _addConsumerWithScanner:consumer callback:callback dataLength:0]; +} + +- (void)_addConsumerWithDataLength:(size_t)dataLength callback:(data_callback)callback readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes; +{ + [self assertOnWorkQueue]; + assert(dataLength); + + [_consumers addObject:[_consumerPool consumerWithScanner:nil handler:callback bytesNeeded:dataLength readToCurrentFrame:readToCurrentFrame unmaskBytes:unmaskBytes]]; + [self _pumpScanner]; +} + +- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback dataLength:(size_t)dataLength; +{ + [self assertOnWorkQueue]; + [_consumers addObject:[_consumerPool consumerWithScanner:consumer handler:callback bytesNeeded:dataLength readToCurrentFrame:NO unmaskBytes:NO]]; + [self _pumpScanner]; +} + + +- (void)_scheduleCleanup +{ + @synchronized(self) { + if (_cleanupScheduled) { + return; + } + + _cleanupScheduled = YES; + + // Cleanup NSStream delegate's in the same RunLoop used by the streams themselves: + // This way we'll prevent race conditions between handleEvent and SRWebsocket's dealloc + NSTimer *timer = [NSTimer timerWithTimeInterval:(0.0f) target:self selector:@selector(_cleanupSelfReference:) userInfo:nil repeats:NO]; + [[NSRunLoop SR_networkRunLoop] addTimer:timer forMode:NSDefaultRunLoopMode]; + } +} + +- (void)_cleanupSelfReference:(NSTimer *)timer +{ + @synchronized(self) { + // Nuke NSStream delegate's + _inputStream.delegate = nil; + _outputStream.delegate = nil; + + // Remove the streams, right now, from the networkRunLoop + [_inputStream close]; + [_outputStream close]; + } + + // Cleanup selfRetain in the same GCD queue as usual + dispatch_async(_workQueue, ^{ + _selfRetain = nil; + }); +} + + +static const char CRLFCRLFBytes[] = {'\r', '\n', '\r', '\n'}; + +- (void)_readUntilHeaderCompleteWithCallback:(data_callback)dataHandler; +{ + [self _readUntilBytes:CRLFCRLFBytes length:sizeof(CRLFCRLFBytes) callback:dataHandler]; +} + +- (void)_readUntilBytes:(const void *)bytes length:(size_t)length callback:(data_callback)dataHandler; +{ + // TODO optimize so this can continue from where we last searched + stream_scanner consumer = ^size_t(NSData *data) { + __block size_t found_size = 0; + __block size_t match_count = 0; + + size_t size = data.length; + const unsigned char *buffer = (unsigned char *)data.bytes; + for (size_t i = 0; i < size; i++ ) { + if (((const unsigned char *)buffer)[i] == ((const unsigned char *)bytes)[match_count]) { + match_count += 1; + if (match_count == length) { + found_size = i + 1; + break; + } + } else { + match_count = 0; + } + } + return found_size; + }; + [self _addConsumerWithScanner:consumer callback:dataHandler]; +} + + +// Returns true if did work +- (BOOL)_innerPumpScanner { + + BOOL didWork = NO; + + if (self.readyState >= SR_CLOSED) { + return didWork; + } + + if (!_consumers.count) { + return didWork; + } + + size_t curSize = _readBuffer.length - _readBufferOffset; + if (!curSize) { + return didWork; + } + + SRIOConsumer *consumer = [_consumers objectAtIndex:0]; + + size_t bytesNeeded = consumer.bytesNeeded; + + size_t foundSize = 0; + if (consumer.consumer) { + NSData *tempView = [NSData dataWithBytesNoCopy:(char *)_readBuffer.bytes + _readBufferOffset length:_readBuffer.length - _readBufferOffset freeWhenDone:NO]; + foundSize = consumer.consumer(tempView); + } else { + assert(consumer.bytesNeeded); + if (curSize >= bytesNeeded) { + foundSize = bytesNeeded; + } else if (consumer.readToCurrentFrame) { + foundSize = curSize; + } + } + + NSData *slice = nil; + if (consumer.readToCurrentFrame || foundSize) { + NSRange sliceRange = NSMakeRange(_readBufferOffset, foundSize); + slice = [_readBuffer subdataWithRange:sliceRange]; + + _readBufferOffset += foundSize; + + if (_readBufferOffset > 4096 && _readBufferOffset > (_readBuffer.length >> 1)) { + _readBuffer = [[NSMutableData alloc] initWithBytes:(char *)_readBuffer.bytes + _readBufferOffset length:_readBuffer.length - _readBufferOffset]; _readBufferOffset = 0; + } + + if (consumer.unmaskBytes) { + NSMutableData *mutableSlice = [slice mutableCopy]; + + NSUInteger len = mutableSlice.length; + uint8_t *bytes = (uint8_t *)mutableSlice.mutableBytes; + + for (NSUInteger i = 0; i < len; i++) { + bytes[i] = bytes[i] ^ _currentReadMaskKey[_currentReadMaskOffset % sizeof(_currentReadMaskKey)]; + _currentReadMaskOffset += 1; + } + + slice = mutableSlice; + } + + if (consumer.readToCurrentFrame) { + [_currentFrameData appendData:slice]; + + _readOpCount += 1; + + if (_currentFrameOpcode == SROpCodeTextFrame) { + // Validate UTF8 stuff. + size_t currentDataSize = _currentFrameData.length; + if (_currentFrameOpcode == SROpCodeTextFrame && currentDataSize > 0) { + // TODO: Optimize the crap out of this. Don't really have to copy all the data each time + + size_t scanSize = currentDataSize - _currentStringScanPosition; + + NSData *scan_data = [_currentFrameData subdataWithRange:NSMakeRange(_currentStringScanPosition, scanSize)]; + int32_t valid_utf8_size = validate_dispatch_data_partial_string(scan_data); + + if (valid_utf8_size == -1) { + [self closeWithCode:SRStatusCodeInvalidUTF8 reason:@"Text frames must be valid UTF-8"]; + dispatch_async(_workQueue, ^{ + [self closeConnection]; + }); + return didWork; + } else { + _currentStringScanPosition += valid_utf8_size; + } + } + + } + + consumer.bytesNeeded -= foundSize; + + if (consumer.bytesNeeded == 0) { + [_consumers removeObjectAtIndex:0]; + consumer.handler(self, nil); + [_consumerPool returnConsumer:consumer]; + didWork = YES; + } + } else if (foundSize) { + [_consumers removeObjectAtIndex:0]; + consumer.handler(self, slice); + [_consumerPool returnConsumer:consumer]; + didWork = YES; + } + } + return didWork; +} + +-(void)_pumpScanner; +{ + [self assertOnWorkQueue]; + + if (!_isPumping) { + _isPumping = YES; + } else { + return; + } + + while ([self _innerPumpScanner]) { + + } + + _isPumping = NO; +} + +//#define NOMASK + +static const size_t SRFrameHeaderOverhead = 32; + +- (void)_sendFrameWithOpcode:(SROpCode)opcode data:(id)data; +{ + [self assertOnWorkQueue]; + + if (nil == data) { + return; + } + + NSAssert([data isKindOfClass:[NSData class]] || [data isKindOfClass:[NSString class]], @"NSString or NSData"); + + size_t payloadLength = [data isKindOfClass:[NSString class]] ? [(NSString *)data lengthOfBytesUsingEncoding:NSUTF8StringEncoding] : [data length]; + + NSMutableData *frame = [[NSMutableData alloc] initWithLength:payloadLength + SRFrameHeaderOverhead]; + if (!frame) { + [self closeWithCode:SRStatusCodeMessageTooBig reason:@"Message too big"]; + return; + } + uint8_t *frame_buffer = (uint8_t *)[frame mutableBytes]; + + // set fin + frame_buffer[0] = SRFinMask | opcode; + + BOOL useMask = YES; +#ifdef NOMASK + useMask = NO; +#endif + + if (useMask) { + // set the mask and header + frame_buffer[1] |= SRMaskMask; + } + + size_t frame_buffer_size = 2; + + const uint8_t *unmasked_payload = NULL; + if ([data isKindOfClass:[NSData class]]) { + unmasked_payload = (uint8_t *)[data bytes]; + } else if ([data isKindOfClass:[NSString class]]) { + unmasked_payload = (const uint8_t *)[data UTF8String]; + } else { + return; + } + + if (payloadLength < 126) { + frame_buffer[1] |= payloadLength; + } else if (payloadLength <= UINT16_MAX) { + frame_buffer[1] |= 126; + *((uint16_t *)(frame_buffer + frame_buffer_size)) = EndianU16_BtoN((uint16_t)payloadLength); + frame_buffer_size += sizeof(uint16_t); + } else { + frame_buffer[1] |= 127; + *((uint64_t *)(frame_buffer + frame_buffer_size)) = EndianU64_BtoN((uint64_t)payloadLength); + frame_buffer_size += sizeof(uint64_t); + } + + if (!useMask) { + for (size_t i = 0; i < payloadLength; i++) { + frame_buffer[frame_buffer_size] = unmasked_payload[i]; + frame_buffer_size += 1; + } + } else { + uint8_t *mask_key = frame_buffer + frame_buffer_size; + SecRandomCopyBytes(kSecRandomDefault, sizeof(uint32_t), (uint8_t *)mask_key); + frame_buffer_size += sizeof(uint32_t); + + // TODO: could probably optimize this with SIMD + for (size_t i = 0; i < payloadLength; i++) { + frame_buffer[frame_buffer_size] = unmasked_payload[i] ^ mask_key[i % sizeof(uint32_t)]; + frame_buffer_size += 1; + } + } + + assert(frame_buffer_size <= [frame length]); + frame.length = frame_buffer_size; + + [self _writeData:frame]; +} + +- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode; +{ + __weak typeof(self) weakSelf = self; + + if (_secure && !_pinnedCertFound && (eventCode == NSStreamEventHasBytesAvailable || eventCode == NSStreamEventHasSpaceAvailable)) { + + NSArray *sslCerts = [_urlRequest SR_SSLPinnedCertificates]; + if (sslCerts) { + SecTrustRef secTrust = (__bridge SecTrustRef)[aStream propertyForKey:(__bridge id)kCFStreamPropertySSLPeerTrust]; + if (secTrust) { + NSInteger numCerts = SecTrustGetCertificateCount(secTrust); + for (NSInteger i = 0; i < numCerts && !_pinnedCertFound; i++) { + SecCertificateRef cert = SecTrustGetCertificateAtIndex(secTrust, i); + NSData *certData = CFBridgingRelease(SecCertificateCopyData(cert)); + + for (id ref in sslCerts) { + SecCertificateRef trustedCert = (__bridge SecCertificateRef)ref; + NSData *trustedCertData = CFBridgingRelease(SecCertificateCopyData(trustedCert)); + + if ([trustedCertData isEqualToData:certData]) { + _pinnedCertFound = YES; + break; + } + } + } + } + + if (!_pinnedCertFound) { + dispatch_async(_workQueue, ^{ + NSDictionary *userInfo = @{ NSLocalizedDescriptionKey : @"Invalid server cert" }; + [weakSelf _failWithError:[NSError errorWithDomain:@"org.lolrus.SocketRocket" code:23556 userInfo:userInfo]]; + }); + return; + } else if (aStream == _outputStream) { + dispatch_async(_workQueue, ^{ + [self didConnect]; + }); + } + } + } + + dispatch_async(_workQueue, ^{ + [weakSelf safeHandleEvent:eventCode stream:aStream]; + }); +} + +- (void)safeHandleEvent:(NSStreamEvent)eventCode stream:(NSStream *)aStream +{ + switch (eventCode) { + case NSStreamEventOpenCompleted: { + SRFastLog(@"NSStreamEventOpenCompleted %@", aStream); + if (self.readyState >= SR_CLOSING) { + return; + } + assert(_readBuffer); + + // didConnect fires after certificate verification if we're using pinned certificates. + BOOL usingPinnedCerts = [[_urlRequest SR_SSLPinnedCertificates] count] > 0; + if ((!_secure || !usingPinnedCerts) && self.readyState == SR_CONNECTING && aStream == _inputStream) { + [self didConnect]; + } + [self _pumpWriting]; + [self _pumpScanner]; + break; + } + + case NSStreamEventErrorOccurred: { + SRFastLog(@"NSStreamEventErrorOccurred %@ %@", aStream, [[aStream streamError] copy]); + /// TODO specify error better! + [self _failWithError:aStream.streamError]; + _readBufferOffset = 0; + [_readBuffer setLength:0]; + break; + + } + + case NSStreamEventEndEncountered: { + [self _pumpScanner]; + SRFastLog(@"NSStreamEventEndEncountered %@", aStream); + if (aStream.streamError) { + [self _failWithError:aStream.streamError]; + } else { + dispatch_async(_workQueue, ^{ + if (self.readyState != SR_CLOSED) { + self.readyState = SR_CLOSED; + [self _scheduleCleanup]; + } + + if (!_sentClose && !_failed) { + _sentClose = YES; + // If we get closed in this state it's probably not clean because we should be sending this when we send messages + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocket:didCloseWithCode:reason:wasClean:)]) { + [self.delegate webSocket:self didCloseWithCode:SRStatusCodeGoingAway reason:@"Stream end encountered" wasClean:NO]; + } + }]; + } + }); + } + + break; + } + + case NSStreamEventHasBytesAvailable: { + SRFastLog(@"NSStreamEventHasBytesAvailable %@", aStream); + const int bufferSize = 2048; + uint8_t buffer[bufferSize]; + + while (_inputStream.hasBytesAvailable) { + NSInteger bytes_read = [_inputStream read:buffer maxLength:bufferSize]; + + if (bytes_read > 0) { + [_readBuffer appendBytes:buffer length:bytes_read]; + } else if (bytes_read < 0) { + [self _failWithError:_inputStream.streamError]; + } + + if (bytes_read != bufferSize) { + break; + } + }; + [self _pumpScanner]; + break; + } + + case NSStreamEventHasSpaceAvailable: { + SRFastLog(@"NSStreamEventHasSpaceAvailable %@", aStream); + [self _pumpWriting]; + break; + } + + default: + SRFastLog(@"(default) %@", aStream); + break; + } +} + +@end + + +@implementation SRIOConsumer + +@synthesize bytesNeeded = _bytesNeeded; +@synthesize consumer = _scanner; +@synthesize handler = _handler; +@synthesize readToCurrentFrame = _readToCurrentFrame; +@synthesize unmaskBytes = _unmaskBytes; + +- (void)setupWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes; +{ + _scanner = [scanner copy]; + _handler = [handler copy]; + _bytesNeeded = bytesNeeded; + _readToCurrentFrame = readToCurrentFrame; + _unmaskBytes = unmaskBytes; + assert(_scanner || _bytesNeeded); +} + + +@end + + +@implementation SRIOConsumerPool { + NSUInteger _poolSize; + NSMutableArray *_bufferedConsumers; +} + +- (id)initWithBufferCapacity:(NSUInteger)poolSize; +{ + self = [super init]; + if (self) { + _poolSize = poolSize; + _bufferedConsumers = [[NSMutableArray alloc] initWithCapacity:poolSize]; + } + return self; +} + +- (id)init +{ + return [self initWithBufferCapacity:8]; +} + +- (SRIOConsumer *)consumerWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes; +{ + SRIOConsumer *consumer = nil; + if (_bufferedConsumers.count) { + consumer = [_bufferedConsumers lastObject]; + [_bufferedConsumers removeLastObject]; + } else { + consumer = [[SRIOConsumer alloc] init]; + } + + [consumer setupWithScanner:scanner handler:handler bytesNeeded:bytesNeeded readToCurrentFrame:readToCurrentFrame unmaskBytes:unmaskBytes]; + + return consumer; +} + +- (void)returnConsumer:(SRIOConsumer *)consumer; +{ + if (_bufferedConsumers.count < _poolSize) { + [_bufferedConsumers addObject:consumer]; + } +} + +@end + + +@implementation NSURLRequest (SRCertificateAdditions) + +- (NSArray *)SR_SSLPinnedCertificates; +{ + return [NSURLProtocol propertyForKey:@"SR_SSLPinnedCertificates" inRequest:self]; +} + +@end + +@implementation NSMutableURLRequest (SRCertificateAdditions) + +- (NSArray *)SR_SSLPinnedCertificates; +{ + return [NSURLProtocol propertyForKey:@"SR_SSLPinnedCertificates" inRequest:self]; +} + +- (void)setSR_SSLPinnedCertificates:(NSArray *)SR_SSLPinnedCertificates; +{ + [NSURLProtocol setProperty:SR_SSLPinnedCertificates forKey:@"SR_SSLPinnedCertificates" inRequest:self]; +} + +@end + +@implementation NSURL (SRWebSocket) + +- (NSString *)SR_origin; +{ + NSString *scheme = [self.scheme lowercaseString]; + + if ([scheme isEqualToString:@"wss"]) { + scheme = @"https"; + } else if ([scheme isEqualToString:@"ws"]) { + scheme = @"http"; + } + + BOOL portIsDefault = !self.port || + ([scheme isEqualToString:@"http"] && self.port.integerValue == 80) || + ([scheme isEqualToString:@"https"] && self.port.integerValue == 443); + + if (!portIsDefault) { + return [NSString stringWithFormat:@"%@://%@:%@", scheme, self.host, self.port]; + } else { + return [NSString stringWithFormat:@"%@://%@", scheme, self.host]; + } +} + +@end + +//#define SR_ENABLE_LOG + +static inline void SRFastLog(NSString *format, ...) { +#ifdef SR_ENABLE_LOG + __block va_list arg_list; + va_start (arg_list, format); + + NSString *formattedString = [[NSString alloc] initWithFormat:format arguments:arg_list]; + + va_end(arg_list); + + NSLog(@"[SR] %@", formattedString); +#endif +} + + +#ifdef HAS_ICU + +static inline int32_t validate_dispatch_data_partial_string(NSData *data) { + if ([data length] > INT32_MAX) { + // INT32_MAX is the limit so long as this Framework is using 32 bit ints everywhere. + return -1; + } + + int32_t size = (int32_t)[data length]; + + const void * contents = [data bytes]; + const uint8_t *str = (const uint8_t *)contents; + + UChar32 codepoint = 1; + int32_t offset = 0; + int32_t lastOffset = 0; + while(offset < size && codepoint > 0) { + lastOffset = offset; + U8_NEXT(str, offset, size, codepoint); + } + + if (codepoint == -1) { + // Check to see if the last byte is valid or whether it was just continuing + if (!U8_IS_LEAD(str[lastOffset]) || U8_COUNT_TRAIL_BYTES(str[lastOffset]) + lastOffset < (int32_t)size) { + + size = -1; + } else { + uint8_t leadByte = str[lastOffset]; + U8_MASK_LEAD_BYTE(leadByte, U8_COUNT_TRAIL_BYTES(leadByte)); + + for (int i = lastOffset + 1; i < offset; i++) { + if (U8_IS_SINGLE(str[i]) || U8_IS_LEAD(str[i]) || !U8_IS_TRAIL(str[i])) { + size = -1; + } + } + + if (size != -1) { + size = lastOffset; + } + } + } + + if (size != -1 && ![[NSString alloc] initWithBytesNoCopy:(char *)[data bytes] length:size encoding:NSUTF8StringEncoding freeWhenDone:NO]) { + size = -1; + } + + return size; +} + +#else + +// This is a hack, and probably not optimal +static inline int32_t validate_dispatch_data_partial_string(NSData *data) { + static const int maxCodepointSize = 3; + + for (int i = 0; i < maxCodepointSize; i++) { + NSString *str = [[NSString alloc] initWithBytesNoCopy:(char *)data.bytes length:data.length - i encoding:NSUTF8StringEncoding freeWhenDone:NO]; + if (str) { + return (int32_t)data.length - i; + } + } + + return -1; +} + +#endif + +static _SRRunLoopThread *networkThread = nil; +static NSRunLoop *networkRunLoop = nil; + +@implementation NSRunLoop (SRWebSocket) + ++ (NSRunLoop *)SR_networkRunLoop { + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + networkThread = [[_SRRunLoopThread alloc] init]; + networkThread.name = @"com.squareup.SocketRocket.NetworkThread"; + [networkThread start]; + networkRunLoop = networkThread.runLoop; + }); + + return networkRunLoop; +} + +@end + + +@implementation _SRRunLoopThread { + dispatch_group_t _waitGroup; +} + +@synthesize runLoop = _runLoop; + +- (void)dealloc +{ + sr_dispatch_release(_waitGroup); +} + +- (id)init +{ + self = [super init]; + if (self) { + _waitGroup = dispatch_group_create(); + dispatch_group_enter(_waitGroup); + } + return self; +} + +- (void)main; +{ + @autoreleasepool { + _runLoop = [NSRunLoop currentRunLoop]; + dispatch_group_leave(_waitGroup); + + // Add an empty run loop source to prevent runloop from spinning. + CFRunLoopSourceContext sourceCtx = { + .version = 0, + .info = NULL, + .retain = NULL, + .release = NULL, + .copyDescription = NULL, + .equal = NULL, + .hash = NULL, + .schedule = NULL, + .cancel = NULL, + .perform = NULL + }; + CFRunLoopSourceRef source = CFRunLoopSourceCreate(NULL, 0, &sourceCtx); + CFRunLoopAddSource(CFRunLoopGetCurrent(), source, kCFRunLoopDefaultMode); + CFRelease(source); + + while ([_runLoop runMode:NSDefaultRunLoopMode beforeDate:[NSDate distantFuture]]) { + + } + assert(NO); + } +} + +- (NSRunLoop *)runLoop; +{ + dispatch_group_wait(_waitGroup, DISPATCH_TIME_FOREVER); + return _runLoop; +} + +@end diff --git a/OrderScheduling/Video/VideoTools/g711.h b/OrderScheduling/Video/VideoTools/g711.h new file mode 100644 index 0000000..646b4b6 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/g711.h @@ -0,0 +1,30 @@ +/* + Copyright (c) 2013-2016 EasyDarwin.ORG. All rights reserved. + Github: https://github.com/EasyDarwin + WEChat: EasyDarwin + Website: http://www.easydarwin.org +*/ + +#ifndef __G_711_H_ +#define __G_711_H_ + +#include + +enum _e_g711_tp +{ + TP_ALAW, //G711A + TP_ULAW //G711U +}; + +unsigned char linear2alaw(int pcm_val); /* 2's complement (16-bit range) */ +int alaw2linear(unsigned char a_val); + +unsigned char linear2ulaw(int pcm_val); /* 2's complement (16-bit range) */ +int ulaw2linear(unsigned char u_val); + +unsigned char alaw2ulaw(unsigned char aval); +unsigned char ulaw2alaw(unsigned char uval); + +int g711_decode(void *pout_buf, int *pout_len, const void *pin_buf, const int in_len , int type); + +#endif diff --git a/OrderScheduling/Video/VideoTools/g711.m b/OrderScheduling/Video/VideoTools/g711.m new file mode 100644 index 0000000..102ef1c --- /dev/null +++ b/OrderScheduling/Video/VideoTools/g711.m @@ -0,0 +1,306 @@ +/* + * g711.c + * + * u-law, A-law and linear PCM conversions. + */ + +//#include "stdafx.h" +#include +#include +#include "g711.h" + +#define SIGN_BIT (0x80) /* Sign bit for a A-law byte. */ +#define QUANT_MASK (0xf) /* Quantization field mask. */ +#define NSEGS (8) /* Number of A-law segments. */ +#define SEG_SHIFT (4) /* Left shift for segment number. */ +#define SEG_MASK (0x70) /* Segment field mask. */ + +static short seg_end[8] = {0xFF, 0x1FF, 0x3FF, 0x7FF, + 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF}; + +/* copy from CCITT G.711 specifications */ +unsigned char _u2a[128] = { /* u- to A-law conversions */ + 1, 1, 2, 2, 3, 3, 4, 4, + 5, 5, 6, 6, 7, 7, 8, 8, + 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, + 25, 27, 29, 31, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, + 46, 48, 49, 50, 51, 52, 53, 54, + 55, 56, 57, 58, 59, 60, 61, 62, + 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 74, 75, 76, 77, 78, 79, + 81, 82, 83, 84, 85, 86, 87, 88, + 89, 90, 91, 92, 93, 94, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, + 113, 114, 115, 116, 117, 118, 119, 120, + 121, 122, 123, 124, 125, 126, 127, 128}; + +unsigned char _a2u[128] = { /* A- to u-law conversions */ + 1, 3, 5, 7, 9, 11, 13, 15, + 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, + 32, 32, 33, 33, 34, 34, 35, 35, + 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 48, 49, 49, + 50, 51, 52, 53, 54, 55, 56, 57, + 58, 59, 60, 61, 62, 63, 64, 64, + 65, 66, 67, 68, 69, 70, 71, 72, + 73, 74, 75, 76, 77, 78, 79, 79, + 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, + 96, 97, 98, 99, 100, 101, 102, 103, + 104, 105, 106, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 119, + 120, 121, 122, 123, 124, 125, 126, 127}; + +static int +search( + int val, + short *table, + int size) +{ + int i; + + for (i = 0; i < size; i++) { + if (val <= *table++) + return (i); + } + return (size); +} + +/* + * linear2alaw() - Convert a 16-bit linear PCM value to 8-bit A-law + * + * linear2alaw() accepts an 16-bit integer and encodes it as A-law data. + * + * Linear Input Code Compressed Code + * ------------------------ --------------- + * 0000000wxyza 000wxyz + * 0000001wxyza 001wxyz + * 000001wxyzab 010wxyz + * 00001wxyzabc 011wxyz + * 0001wxyzabcd 100wxyz + * 001wxyzabcde 101wxyz + * 01wxyzabcdef 110wxyz + * 1wxyzabcdefg 111wxyz + * + * For further information see John C. Bellamy's Digital Telephony, 1982, + * John Wiley & Sons, pps 98-111 and 472-476. + */ +unsigned char +linear2alaw( + int pcm_val) /* 2's complement (16-bit range) */ +{ + int mask; + int seg; + unsigned char aval; + + if (pcm_val >= 0) { + mask = 0xD5; /* sign (7th) bit = 1 */ + } else { + mask = 0x55; /* sign bit = 0 */ + pcm_val = -pcm_val - 8; + } + + /* Convert the scaled magnitude to segment number. */ + seg = search(pcm_val, seg_end, 8); + + /* Combine the sign, segment, and quantization bits. */ + + if (seg >= 8) /* out of range, return maximum value. */ + return (0x7F ^ mask); + else { + aval = seg << SEG_SHIFT; + if (seg < 2) + aval |= (pcm_val >> 4) & QUANT_MASK; + else + aval |= (pcm_val >> (seg + 3)) & QUANT_MASK; + return (aval ^ mask); + } +} + +/* + * alaw2linear() - Convert an A-law value to 16-bit linear PCM + * + */ +int +alaw2linear( + unsigned char a_val) +{ + int t; + int seg; + + a_val ^= 0x55; + + t = (a_val & QUANT_MASK) << 4; + seg = ((unsigned)a_val & SEG_MASK) >> SEG_SHIFT; + switch (seg) { + case 0: + t += 8; + break; + case 1: + t += 0x108; + break; + default: + t += 0x108; + t <<= seg - 1; + } + return ((a_val & SIGN_BIT) ? t : -t); +} + +#define BIAS (0x84) /* Bias for linear code. */ + +/* + * linear2ulaw() - Convert a linear PCM value to u-law + * + * In order to simplify the encoding process, the original linear magnitude + * is biased by adding 33 which shifts the encoding range from (0 - 8158) to + * (33 - 8191). The result can be seen in the following encoding table: + * + * Biased Linear Input Code Compressed Code + * ------------------------ --------------- + * 00000001wxyza 000wxyz + * 0000001wxyzab 001wxyz + * 000001wxyzabc 010wxyz + * 00001wxyzabcd 011wxyz + * 0001wxyzabcde 100wxyz + * 001wxyzabcdef 101wxyz + * 01wxyzabcdefg 110wxyz + * 1wxyzabcdefgh 111wxyz + * + * Each biased linear code has a leading 1 which identifies the segment + * number. The value of the segment number is equal to 7 minus the number + * of leading 0's. The quantization interval is directly available as the + * four bits wxyz. * The trailing bits (a - h) are ignored. + * + * Ordinarily the complement of the resulting code word is used for + * transmission, and so the code word is complemented before it is returned. + * + * For further information see John C. Bellamy's Digital Telephony, 1982, + * John Wiley & Sons, pps 98-111 and 472-476. + */ +unsigned char +linear2ulaw( + int pcm_val) /* 2's complement (16-bit range) */ +{ + int mask; + int seg; + unsigned char uval; + + /* Get the sign and the magnitude of the value. */ + if (pcm_val < 0) { + pcm_val = BIAS - pcm_val; + mask = 0x7F; + } else { + pcm_val += BIAS; + mask = 0xFF; + } + + /* Convert the scaled magnitude to segment number. */ + seg = search(pcm_val, seg_end, 8); + + /* + * Combine the sign, segment, quantization bits; + * and complement the code word. + */ + if (seg >= 8) /* out of range, return maximum value. */ + return (0x7F ^ mask); + else { + uval = (seg << 4) | ((pcm_val >> (seg + 3)) & 0xF); + return (uval ^ mask); + } + +} + +/* + * ulaw2linear() - Convert a u-law value to 16-bit linear PCM + * + * First, a biased linear code is derived from the code word. An unbiased + * output can then be obtained by subtracting 33 from the biased code. + * + * Note that this function expects to be passed the complement of the + * original code word. This is in keeping with ISDN conventions. + */ +int +ulaw2linear( + unsigned char u_val) +{ + int t; + + /* Complement to obtain normal u-law value. */ + u_val = ~u_val; + + /* + * Extract and bias the quantization bits. Then + * shift up by the segment number and subtract out the bias. + */ + t = ((u_val & QUANT_MASK) << 3) + BIAS; + t <<= ((unsigned)u_val & SEG_MASK) >> SEG_SHIFT; + + return ((u_val & SIGN_BIT) ? (BIAS - t) : (t - BIAS)); +} + +/* A-law to u-law conversion */ +unsigned char +alaw2ulaw( + unsigned char aval) +{ + aval &= 0xff; + return ((aval & 0x80) ? (0xFF ^ _a2u[aval ^ 0xD5]) : + (0x7F ^ _a2u[aval ^ 0x55])); +} + +/* u-law to A-law conversion */ +unsigned char +ulaw2alaw( + unsigned char uval) +{ + uval &= 0xff; + return ((uval & 0x80) ? (0xD5 ^ (_u2a[0xFF ^ uval] - 1)) : + (0x55 ^ (_u2a[0x7F ^ uval] - 1))); +} + +int g711_decode(void *pout_buf, int *pout_len, const void *pin_buf, const int in_len , int type) +{ + int16_t *dst = (int16_t *) pout_buf; + uint8_t *src = (uint8_t *) pin_buf; + uint32_t i = 0; + int Ret = 0; + + if ((NULL == pout_buf) || \ + (NULL == pout_len) || \ + (NULL == pin_buf) || \ + (0 == in_len)) + { + return -1; + } + + if (*pout_len < 2 * in_len) + { + return -2; + } + //---{{{ + if (TP_ALAW == type) + { + for (i = 0; i < in_len; i++) + { + //*(dst++) = alawtos16[*(src++)]; + *(dst++) = (int16_t)alaw2linear(*(src++)); + } + }else + { + for (i = 0; i < in_len; i++) + { + //*(dst++) = alawtos16[*(src++)]; + *(dst++) = (int16_t)ulaw2linear(*(src++)); + } + } + + //---}}} + *pout_len = 2 * in_len; + + Ret = 2 * in_len; + return Ret; +} \ No newline at end of file diff --git a/OrderScheduling/Video/VideoTools/g726.h b/OrderScheduling/Video/VideoTools/g726.h new file mode 100644 index 0000000..db7afbb --- /dev/null +++ b/OrderScheduling/Video/VideoTools/g726.h @@ -0,0 +1,188 @@ + +/*! Bitstream handler state */ +typedef struct bitstream_state_s +{ + /*! The bit stream. */ + unsigned int bitstream; + /*! The residual bits in bitstream. */ + int residue; +}bitstream_state_t; + +typedef struct g726_state_s g726_state_t; +typedef short (*g726_decoder_func_t)(g726_state_t *s, unsigned char code); +typedef unsigned char (*g726_encoder_func_t)(g726_state_t *s, short amp); + + +/*! +* The following is the definition of the state structure +* used by the G.726 encoder and decoder to preserve their internal +* state between successive calls. The meanings of the majority +* of the state structure fields are explained in detail in the +* CCITT Recommendation G.726. The field names are essentially indentical +* to variable names in the bit level description of the coding algorithm +* included in this recommendation. +*/ +struct g726_state_s +{ + /*! The bit rate */ + int rate; + /*! The external coding, for tandem operation */ + //int ext_coding; + /*! The number of bits per sample */ + int bits_per_sample; + /*! One of the G.726_PACKING_xxx options */ + //int packing; + + /*! Locked or steady state step size multiplier. */ + int yl; + /*! Unlocked or non-steady state step size multiplier. */ + short yu; + /*! short term energy estimate. */ + short dms; + /*! Long term energy estimate. */ + short dml; + /*! Linear weighting coefficient of 'yl' and 'yu'. */ + short ap; + + /*! Coefficients of pole portion of prediction filter. */ + short a[2]; + /*! Coefficients of zero portion of prediction filter. */ + short b[6]; + /*! Signs of previous two samples of a partially reconstructed signal. */ + short pk[2]; + /*! Previous 6 samples of the quantized difference signal represented in + an internal floating point format. */ + short dq[6]; + /*! Previous 2 samples of the quantized difference signal represented in an + internal floating point format. */ + short sr[2]; + /*! Delayed tone detect */ + int td; + + /*! \brief The bit stream processing context. */ + bitstream_state_t bs; + + /*! \brief The current encoder function. */ + g726_encoder_func_t enc_func; + /*! \brief The current decoder function. */ + g726_decoder_func_t dec_func; +}; + +/* +* Maps G.726_16 code word to reconstructed scale factor normalized log +* magnitude values. +*/ +static const int g726_16_dqlntab[4] = +{ + 116, 365, 365, 116 +}; + +/* Maps G.726_16 code word to log of scale factor multiplier. */ +static const int g726_16_witab[4] = +{ + -704, 14048, 14048, -704 +}; + +/* +* Maps G.726_16 code words to a set of values whose long and short +* term averages are computed and then compared to give an indication +* how stationary (steady state) the signal is. +*/ +static const int g726_16_fitab[4] = +{ + 0x000, 0xE00, 0xE00, 0x000 +}; + +/* +* Maps G.726_24 code word to reconstructed scale factor normalized log +* magnitude values. +*/ +static const int g726_24_dqlntab[8] = +{ + -2048, 135, 273, 373, 373, 273, 135, -2048 +}; + +/* Maps G.726_24 code word to log of scale factor multiplier. */ +static const int g726_24_witab[8] = +{ + -128, 960, 4384, 18624, 18624, 4384, 960, -128 +}; + +/* +* Maps G.726_24 code words to a set of values whose long and short +* term averages are computed and then compared to give an indication +* how stationary (steady state) the signal is. +*/ +static const int g726_24_fitab[8] = +{ + 0x000, 0x200, 0x400, 0xE00, 0xE00, 0x400, 0x200, 0x000 +}; + +/* +* Maps G.726_32 code word to reconstructed scale factor normalized log +* magnitude values. +*/ +static const int g726_32_dqlntab[16] = +{ + -2048, 4, 135, 213, 273, 323, 373, 425, + 425, 373, 323, 273, 213, 135, 4, -2048 +}; + +/* Maps G.726_32 code word to log of scale factor multiplier. */ +static const int g726_32_witab[16] = +{ + -384, 576, 1312, 2048, 3584, 6336, 11360, 35904, + 35904, 11360, 6336, 3584, 2048, 1312, 576, -384 +}; + +/* +* Maps G.726_32 code words to a set of values whose long and short +* term averages are computed and then compared to give an indication +* how stationary (steady state) the signal is. +*/ +static const int g726_32_fitab[16] = +{ + 0x000, 0x000, 0x000, 0x200, 0x200, 0x200, 0x600, 0xE00, + 0xE00, 0x600, 0x200, 0x200, 0x200, 0x000, 0x000, 0x000 +}; + +/* +* Maps G.726_40 code word to ructeconstructed scale factor normalized log +* magnitude values. +*/ +static const int g726_40_dqlntab[32] = +{ + -2048, -66, 28, 104, 169, 224, 274, 318, + 358, 395, 429, 459, 488, 514, 539, 566, + 566, 539, 514, 488, 459, 429, 395, 358, + 318, 274, 224, 169, 104, 28, -66, -2048 +}; + +/* Maps G.726_40 code word to log of scale factor multiplier. */ +static const int g726_40_witab[32] = +{ + 448, 448, 768, 1248, 1280, 1312, 1856, 3200, + 4512, 5728, 7008, 8960, 11456, 14080, 16928, 22272, + 22272, 16928, 14080, 11456, 8960, 7008, 5728, 4512, + 3200, 1856, 1312, 1280, 1248, 768, 448, 448 +}; + +/* +* Maps G.726_40 code words to a set of values whose long and short +* term averages are computed and then compared to give an indication +* how stationary (steady state) the signal is. +*/ +static const int g726_40_fitab[32] = +{ + 0x000, 0x000, 0x000, 0x000, 0x000, 0x200, 0x200, 0x200, + 0x200, 0x200, 0x400, 0x600, 0x800, 0xA00, 0xC00, 0xC00, + 0xC00, 0xC00, 0xA00, 0x800, 0x600, 0x400, 0x200, 0x200, + 0x200, 0x200, 0x200, 0x000, 0x000, 0x000, 0x000, 0x000 +}; + + +g726_state_t *g726_init(g726_state_t *s, int bit_rate); + +int g726_decode(g726_state_t *s, short amp[], const unsigned char g726_data[], int g726_bytes); + +int g726_encode(g726_state_t *s, unsigned char g726_data[], const short amp[], int len); diff --git a/OrderScheduling/Video/VideoTools/g726.m b/OrderScheduling/Video/VideoTools/g726.m new file mode 100644 index 0000000..ba6920f --- /dev/null +++ b/OrderScheduling/Video/VideoTools/g726.m @@ -0,0 +1,889 @@ +/* + Copyright (c) 2013-2016 EasyDarwin.ORG. All rights reserved. + Github: https://github.com/EasyDarwin + WEChat: EasyDarwin + Website: http://www.easydarwin.org +*/ + +#include +#include +#include +#include "g726.h" + +static const int qtab_726_16[1] = +{ + 261 +}; + +static const int qtab_726_24[3] = +{ + 8, 218, 331 +}; + +static const int qtab_726_32[7] = +{ + -124, 80, 178, 246, 300, 349, 400 +}; + +static const int qtab_726_40[15] = +{ + -122, -16, 68, 139, 198, 250, 298, 339, + 378, 413, 445, 475, 502, 528, 553 +}; + + +static __inline int top_bit(unsigned int bits) +{ +#if defined(__i386__) || defined(__x86_64__) + int res; + + __asm__ (" xorl %[res],%[res];\n" + " decl %[res];\n" + " bsrl %[bits],%[res]\n" + : [res] "=&r" (res) + : [bits] "rm" (bits)); + return res; +#elif defined(__ppc__) || defined(__powerpc__) + int res; + + __asm__ ("cntlzw %[res],%[bits];\n" + : [res] "=&r" (res) + : [bits] "r" (bits)); + return 31 - res; +#elif defined(_M_IX86) // Visual Studio x86 + __asm + { + xor eax, eax + dec eax + bsr eax, bits + } +#else + int res; + + if (bits == 0) + return -1; + res = 0; + if (bits & 0xFFFF0000) + { + bits &= 0xFFFF0000; + res += 16; + } + if (bits & 0xFF00FF00) + { + bits &= 0xFF00FF00; + res += 8; + } + if (bits & 0xF0F0F0F0) + { + bits &= 0xF0F0F0F0; + res += 4; + } + if (bits & 0xCCCCCCCC) + { + bits &= 0xCCCCCCCC; + res += 2; + } + if (bits & 0xAAAAAAAA) + { + bits &= 0xAAAAAAAA; + res += 1; + } + return res; +#endif +} + + +static bitstream_state_t *bitstream_init(bitstream_state_t *s) +{ + if (s == NULL) + return NULL; + s->bitstream = 0; + s->residue = 0; + return s; +} + +/* + * Given a raw sample, 'd', of the difference signal and a + * quantization step size scale factor, 'y', this routine returns the + * ADPCM codeword to which that sample gets quantized. The step + * size scale factor division operation is done in the log base 2 domain + * as a subtraction. + */ +static short quantize(int d, /* Raw difference signal sample */ + int y, /* Step size multiplier */ + const int table[], /* quantization table */ + int quantizer_states) /* table size of short integers */ +{ + short dqm; /* Magnitude of 'd' */ + short exp; /* Integer part of base 2 log of 'd' */ + short mant; /* Fractional part of base 2 log */ + short dl; /* Log of magnitude of 'd' */ + short dln; /* Step size scale factor normalized log */ + int i; + int size; + + /* + * LOG + * + * Compute base 2 log of 'd', and store in 'dl'. + */ + dqm = (short) abs(d); + exp = (short) (top_bit(dqm >> 1) + 1); + /* Fractional portion. */ + mant = ((dqm << 7) >> exp) & 0x7F; + dl = (exp << 7) + mant; + + /* + * SUBTB + * + * "Divide" by step size multiplier. + */ + dln = dl - (short) (y >> 2); + + /* + * QUAN + * + * Search for codword i for 'dln'. + */ + size = (quantizer_states - 1) >> 1; + for (i = 0; i < size; i++) + { + if (dln < table[i]) + break; + } + if (d < 0) + { + /* Take 1's complement of i */ + return (short) ((size << 1) + 1 - i); + } + if (i == 0 && (quantizer_states & 1)) + { + /* Zero is only valid if there are an even number of states, so + take the 1's complement if the code is zero. */ + return (short) quantizer_states; + } + return (short) i; +} +/*- End of function --------------------------------------------------------*/ + + +/* +* returns the integer product of the 14-bit integer "an" and +* "floating point" representation (4-bit exponent, 6-bit mantessa) "srn". +*/ +static short fmult(short an, short srn) +{ + short anmag; + short anexp; + short anmant; + short wanexp; + short wanmant; + short retval; + + anmag = (an > 0) ? an : ((-an) & 0x1FFF); + anexp = (short) (top_bit(anmag) - 5); + anmant = (anmag == 0) ? 32 : (anexp >= 0) ? (anmag >> anexp) : (anmag << -anexp); + wanexp = anexp + ((srn >> 6) & 0xF) - 13; + + wanmant = (anmant*(srn & 0x3F) + 0x30) >> 4; + retval = (wanexp >= 0) ? ((wanmant << wanexp) & 0x7FFF) : (wanmant >> -wanexp); + + return (((an ^ srn) < 0) ? -retval : retval); +} + +/* +* Compute the estimated signal from the 6-zero predictor. +*/ +static __inline short predictor_zero(g726_state_t *s) +{ + int i; + int sezi; + + sezi = fmult(s->b[0] >> 2, s->dq[0]); + /* ACCUM */ + for (i = 1; i < 6; i++) + sezi += fmult(s->b[i] >> 2, s->dq[i]); + return (short) sezi; +} +/*- End of function --------------------------------------------------------*/ + +/* +* Computes the estimated signal from the 2-pole predictor. +*/ +static __inline short predictor_pole(g726_state_t *s) +{ + return (fmult(s->a[1] >> 2, s->sr[1]) + fmult(s->a[0] >> 2, s->sr[0])); +} + +/* +* Computes the quantization step size of the adaptive quantizer. +*/ +static int step_size(g726_state_t *s) +{ + int y; + int dif; + int al; + + if (s->ap >= 256) + return s->yu; + y = s->yl >> 6; + dif = s->yu - y; + al = s->ap >> 2; + if (dif > 0) + y += (dif*al) >> 6; + else if (dif < 0) + y += (dif*al + 0x3F) >> 6; + return y; +} +/*- End of function --------------------------------------------------------*/ + +/* +* Returns reconstructed difference signal 'dq' obtained from +* codeword 'i' and quantization step size scale factor 'y'. +* Multiplication is performed in log base 2 domain as addition. +*/ +static short reconstruct(int sign, /* 0 for non-negative value */ + int dqln, /* G.72x codeword */ + int y) /* Step size multiplier */ +{ + short dql; /* Log of 'dq' magnitude */ + short dex; /* Integer part of log */ + short dqt; + short dq; /* Reconstructed difference signal sample */ + + dql = (short) (dqln + (y >> 2)); /* ADDA */ + + if (dql < 0) + return ((sign) ? -0x8000 : 0); + /* ANTILOG */ + dex = (dql >> 7) & 15; + dqt = 128 + (dql & 127); + dq = (dqt << 7) >> (14 - dex); + return ((sign) ? (dq - 0x8000) : dq); +} +/*- End of function --------------------------------------------------------*/ + +/* +* updates the state variables for each output code +*/ +static void update(g726_state_t *s, + int y, /* quantizer step size */ + int wi, /* scale factor multiplier */ + int fi, /* for long/short term energies */ + int dq, /* quantized prediction difference */ + int sr, /* reconstructed signal */ + int dqsez) /* difference from 2-pole predictor */ +{ + short mag; + short exp; + short a2p; /* LIMC */ + short a1ul; /* UPA1 */ + short pks1; /* UPA2 */ + short fa1; + short ylint; + short dqthr; + short ylfrac; + short thr; + short pk0; + int i; + int tr; + + a2p = 0; + /* Needed in updating predictor poles */ + pk0 = (dqsez < 0) ? 1 : 0; + + /* prediction difference magnitude */ + mag = (short) (dq & 0x7FFF); + /* TRANS */ + ylint = (short) (s->yl >> 15); /* exponent part of yl */ + ylfrac = (short) ((s->yl >> 10) & 0x1F); /* fractional part of yl */ + /* Limit threshold to 31 << 10 */ + thr = (ylint > 9) ? (31 << 10) : ((32 + ylfrac) << ylint); + dqthr = (thr + (thr >> 1)) >> 1; /* dqthr = 0.75 * thr */ + if (!s->td) /* signal supposed voice */ + tr = 0; + else if (mag <= dqthr) /* supposed data, but small mag */ + tr = 0; /* treated as voice */ + else /* signal is data (modem) */ + tr = 1; + + /* + * Quantizer scale factor adaptation. + */ + + /* FUNCTW & FILTD & DELAY */ + /* update non-steady state step size multiplier */ + s->yu = (short) (y + ((wi - y) >> 5)); + + /* LIMB */ + if (s->yu < 544) + s->yu = 544; + else if (s->yu > 5120) + s->yu = 5120; + + /* FILTE & DELAY */ + /* update steady state step size multiplier */ + s->yl += s->yu + ((-s->yl) >> 6); + + /* + * Adaptive predictor coefficients. + */ + if (tr) + { + /* Reset the a's and b's for a modem signal */ + s->a[0] = 0; + s->a[1] = 0; + s->b[0] = 0; + s->b[1] = 0; + s->b[2] = 0; + s->b[3] = 0; + s->b[4] = 0; + s->b[5] = 0; + } + else + { + /* Update the a's and b's */ + /* UPA2 */ + pks1 = pk0 ^ s->pk[0]; + + /* Update predictor pole a[1] */ + a2p = s->a[1] - (s->a[1] >> 7); + if (dqsez != 0) + { + fa1 = (pks1) ? s->a[0] : -s->a[0]; + /* a2p = function of fa1 */ + if (fa1 < -8191) + a2p -= 0x100; + else if (fa1 > 8191) + a2p += 0xFF; + else + a2p += fa1 >> 5; + + if (pk0 ^ s->pk[1]) + { + /* LIMC */ + if (a2p <= -12160) + a2p = -12288; + else if (a2p >= 12416) + a2p = 12288; + else + a2p -= 0x80; + } + else if (a2p <= -12416) + a2p = -12288; + else if (a2p >= 12160) + a2p = 12288; + else + a2p += 0x80; + } + + /* TRIGB & DELAY */ + s->a[1] = a2p; + + /* UPA1 */ + /* Update predictor pole a[0] */ + s->a[0] -= s->a[0] >> 8; + if (dqsez != 0) + { + if (pks1 == 0) + s->a[0] += 192; + else + s->a[0] -= 192; + } + /* LIMD */ + a1ul = 15360 - a2p; + if (s->a[0] < -a1ul) + s->a[0] = -a1ul; + else if (s->a[0] > a1ul) + s->a[0] = a1ul; + + /* UPB : update predictor zeros b[6] */ + for (i = 0; i < 6; i++) + { + /* Distinguish 40Kbps mode from the others */ + s->b[i] -= s->b[i] >> ((s->bits_per_sample == 5) ? 9 : 8); + if (dq & 0x7FFF) + { + /* XOR */ + if ((dq ^ s->dq[i]) >= 0) + s->b[i] += 128; + else + s->b[i] -= 128; + } + } + } + + for (i = 5; i > 0; i--) + s->dq[i] = s->dq[i - 1]; + /* FLOAT A : convert dq[0] to 4-bit exp, 6-bit mantissa f.p. */ + if (mag == 0) + { + s->dq[0] = (dq >= 0) ? 0x20 : 0xFC20; + } + else + { + exp = (short) (top_bit(mag) + 1); + s->dq[0] = (dq >= 0) + ? ((exp << 6) + ((mag << 6) >> exp)) + : ((exp << 6) + ((mag << 6) >> exp) - 0x400); + } + + s->sr[1] = s->sr[0]; + /* FLOAT B : convert sr to 4-bit exp., 6-bit mantissa f.p. */ + if (sr == 0) + { + s->sr[0] = 0x20; + } + else if (sr > 0) + { + exp = (short) (top_bit(sr) + 1); + s->sr[0] = (short) ((exp << 6) + ((sr << 6) >> exp)); + } + else if (sr > -32768) + { + mag = (short) -sr; + exp = (short) (top_bit(mag) + 1); + s->sr[0] = (exp << 6) + ((mag << 6) >> exp) - 0x400; + } + else + { + s->sr[0] = (short) 0xFC20; + } + + /* DELAY A */ + s->pk[1] = s->pk[0]; + s->pk[0] = pk0; + + /* TONE */ + if (tr) /* this sample has been treated as data */ + s->td = 0; /* next one will be treated as voice */ + else if (a2p < -11776) /* small sample-to-sample correlation */ + s->td = 1; /* signal may be data */ + else /* signal is voice */ + s->td = 0; + + /* Adaptation speed control. */ + /* FILTA */ + s->dms += ((short) fi - s->dms) >> 5; + /* FILTB */ + s->dml += (((short) (fi << 2) - s->dml) >> 7); + + if (tr) + s->ap = 256; + else if (y < 1536) /* SUBTC */ + s->ap += (0x200 - s->ap) >> 4; + else if (s->td) + s->ap += (0x200 - s->ap) >> 4; + else if (abs((s->dms << 2) - s->dml) >= (s->dml >> 3)) + s->ap += (0x200 - s->ap) >> 4; + else + s->ap += (-s->ap) >> 4; +} + +/* +* Decodes a 2-bit CCITT G.726_16 ADPCM code and returns +* the resulting 16-bit linear PCM, A-law or u-law sample value. +*/ +static short g726_16_decoder(g726_state_t *s, unsigned char code) +{ + short sezi; + short sei; + short se; + short sr; + short dq; + short dqsez; + int y; + + /* Mask to get proper bits */ + code &= 0x03; + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + + y = step_size(s); + dq = reconstruct(code & 2, g726_16_dqlntab[code], y); + + /* Reconstruct the signal */ + se = sei >> 1; + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_16_witab[code], g726_16_fitab[code], dq, sr, dqsez); + + return (sr << 2); +} +/*- End of function --------------------------------------------------------*/ + + +/* + * Encodes a linear PCM, A-law or u-law input sample and returns its 3-bit code. + */ +static unsigned char g726_16_encoder(g726_state_t *s, short amp) +{ + int y; + short sei; + short sezi; + short se; + short d; + short sr; + short dqsez; + short dq; + short i; + + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + se = sei >> 1; + d = amp - se; + + /* Quantize prediction difference */ + y = step_size(s); + i = quantize(d, y, qtab_726_16, 4); + dq = reconstruct(i & 2, g726_16_dqlntab[i], y); + + /* Reconstruct the signal */ + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_16_witab[i], g726_16_fitab[i], dq, sr, dqsez); + return (unsigned char) i; +} + +/* +* Decodes a 3-bit CCITT G.726_24 ADPCM code and returns +* the resulting 16-bit linear PCM, A-law or u-law sample value. +*/ +static short g726_24_decoder(g726_state_t *s, unsigned char code) +{ + short sezi; + short sei; + short se; + short sr; + short dq; + short dqsez; + int y; + + /* Mask to get proper bits */ + code &= 0x07; + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + + y = step_size(s); + dq = reconstruct(code & 4, g726_24_dqlntab[code], y); + + /* Reconstruct the signal */ + se = sei >> 1; + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_24_witab[code], g726_24_fitab[code], dq, sr, dqsez); + + return (sr << 2); +} +/*- End of function --------------------------------------------------------*/ + + +/* + * Encodes a linear PCM, A-law or u-law input sample and returns its 3-bit code. + */ +static unsigned char g726_24_encoder(g726_state_t *s, short amp) +{ + short sei; + short sezi; + short se; + short d; + short sr; + short dqsez; + short dq; + short i; + int y; + + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + se = sei >> 1; + d = amp - se; + + /* Quantize prediction difference */ + y = step_size(s); + i = quantize(d, y, qtab_726_24, 7); + dq = reconstruct(i & 4, g726_24_dqlntab[i], y); + + /* Reconstruct the signal */ + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_24_witab[i], g726_24_fitab[i], dq, sr, dqsez); + return (unsigned char) i; +} + + +/* +* Decodes a 4-bit CCITT G.726_32 ADPCM code and returns +* the resulting 16-bit linear PCM, A-law or u-law sample value. +*/ +static short g726_32_decoder(g726_state_t *s, unsigned char code) +{ + short sezi; + short sei; + short se; + short sr; + short dq; + short dqsez; + int y; + + /* Mask to get proper bits */ + code &= 0x0F; + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + + y = step_size(s); + dq = reconstruct(code & 8, g726_32_dqlntab[code], y); + + /* Reconstruct the signal */ + se = sei >> 1; + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_32_witab[code], g726_32_fitab[code], dq, sr, dqsez); + + return (sr << 2); +} +/*- End of function --------------------------------------------------------*/ + +/* + * Encodes a linear input sample and returns its 4-bit code. + */ +static unsigned char g726_32_encoder(g726_state_t *s, short amp) +{ + short sei; + short sezi; + short se; + short d; + short sr; + short dqsez; + short dq; + short i; + int y; + + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + se = sei >> 1; + d = amp - se; + + /* Quantize the prediction difference */ + y = step_size(s); + i = quantize(d, y, qtab_726_32, 15); + dq = reconstruct(i & 8, g726_32_dqlntab[i], y); + + /* Reconstruct the signal */ + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_32_witab[i], g726_32_fitab[i], dq, sr, dqsez); + return (unsigned char) i; +} + +/* +* Decodes a 5-bit CCITT G.726 40Kbps code and returns +* the resulting 16-bit linear PCM, A-law or u-law sample value. +*/ +static short g726_40_decoder(g726_state_t *s, unsigned char code) +{ + short sezi; + short sei; + short se; + short sr; + short dq; + short dqsez; + int y; + + /* Mask to get proper bits */ + code &= 0x1F; + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + + y = step_size(s); + dq = reconstruct(code & 0x10, g726_40_dqlntab[code], y); + + /* Reconstruct the signal */ + se = sei >> 1; + sr = (dq < 0) ? (se - (dq & 0x7FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_40_witab[code], g726_40_fitab[code], dq, sr, dqsez); + + return (sr << 2); +} +/*- End of function --------------------------------------------------------*/ + + +/* + * Encodes a 16-bit linear PCM, A-law or u-law input sample and retuens + * the resulting 5-bit CCITT G.726 40Kbps code. + */ +static unsigned char g726_40_encoder(g726_state_t *s, short amp) +{ + short sei; + short sezi; + short se; + short d; + short sr; + short dqsez; + short dq; + short i; + int y; + + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + se = sei >> 1; + d = amp - se; + + /* Quantize prediction difference */ + y = step_size(s); + i = quantize(d, y, qtab_726_40, 31); + dq = reconstruct(i & 0x10, g726_40_dqlntab[i], y); + + /* Reconstruct the signal */ + sr = (dq < 0) ? (se - (dq & 0x7FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_40_witab[i], g726_40_fitab[i], dq, sr, dqsez); + return (unsigned char) i; +} + +g726_state_t *g726_init(g726_state_t *s, int bit_rate) +{ + int i; + + if (bit_rate != 16000 && bit_rate != 24000 && bit_rate != 32000 && bit_rate != 40000) + return NULL; + + s->yl = 34816; + s->yu = 544; + s->dms = 0; + s->dml = 0; + s->ap = 0; + s->rate = bit_rate; + + for (i = 0; i < 2; i++) + { + s->a[i] = 0; + s->pk[i] = 0; + s->sr[i] = 32; + } + for (i = 0; i < 6; i++) + { + s->b[i] = 0; + s->dq[i] = 32; + } + s->td = 0; + switch (bit_rate) + { + case 16000: + s->enc_func = g726_16_encoder; + s->dec_func = g726_16_decoder; + s->bits_per_sample = 2; + break; + case 24000: + s->enc_func = g726_24_encoder; + s->dec_func = g726_24_decoder; + s->bits_per_sample = 3; + break; + case 32000: + default: + s->enc_func = g726_32_encoder; + s->dec_func = g726_32_decoder; + s->bits_per_sample = 4; + break; + case 40000: + s->enc_func = g726_40_encoder; + s->dec_func = g726_40_decoder; + s->bits_per_sample = 5; + break; + } + bitstream_init(&s->bs); + return s; +} + +int g726_decode(g726_state_t *s, + short amp[], + const unsigned char g726_data[], + int g726_bytes) +{ + int i; + int samples; + unsigned char code; + int sl; + + for (samples = i = 0; ; ) + { + if (s->bs.residue < s->bits_per_sample) + { + if (i >= g726_bytes) + break; + s->bs.bitstream = (s->bs.bitstream << 8) | g726_data[g726_bytes-i-1]; i++; +// s->bs.bitstream = (s->bs.bitstream << 8) | g726_data[i++]; + s->bs.residue += 8; + } + code = (unsigned char) ((s->bs.bitstream >> (s->bs.residue - s->bits_per_sample)) & ((1 << s->bits_per_sample) - 1)); + + s->bs.residue -= s->bits_per_sample; + + sl = s->dec_func(s, code); + + amp[samples++] = (short) sl; + } + return samples; +} + + +int g726_encode(g726_state_t *s, + unsigned char g726_data[], + const short amp[], + int len) +{ + int i; + int g726_bytes; + short sl; + unsigned char code; + + for (g726_bytes = i = 0; i < len; i++) + { + sl = amp[i] >> 2; + + code = s->enc_func(s, sl); + + s->bs.bitstream = (s->bs.bitstream << s->bits_per_sample) | code; + s->bs.residue += s->bits_per_sample; + if (s->bs.residue >= 8) + { + g726_data[g726_bytes++] = (unsigned char) ((s->bs.bitstream >> (s->bs.residue - 8)) & 0xFF); + s->bs.residue -= 8; + } + + } + + int j = 0, k = g726_bytes - 1; + unsigned char temp = 0; + while (j < k) { + temp = g726_data[j]; g726_data[j] = g726_data[k]; g726_data[k] = temp; + j++; k--; + } + + return g726_bytes; +} + diff --git a/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.h b/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.h new file mode 100644 index 0000000..c5788a2 --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.h @@ -0,0 +1,85 @@ +// +// YFProgressHUD.h +// LoadingViewAnimation +//made in zhongdao Copyright © 2017年 tracy wang. All rights reserved. +// + +#import + +@interface YFProgressHUD : UIView + +#pragma mark ====== 添加在window上 ======= + ++(YFProgressHUD *) showToastTitle:(NSString *)titleString; + +/** + 显示title + + @param titleString 加载时展示的文字(可选) + @param block 加载完成后的操作 + */ ++(YFProgressHUD *) showToastTitle:(NSString *)titleString completionBlock:(void(^)(void))block; + +/** + 显示UIActivityIndicatorView 和 title + + @param titleString 加载时展示的文字(可选) + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString; + +/** + 带有下落动画HUD + + @param titleString 加载时展示的文字(可选) + @param arr 动画的图片 + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString imagesArr:(NSArray *)arr; + + +/** + gif动画HUD + + @param titleString 加载时展示的文字(可选) + @param gifName gif动画的图片 + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString gifImg:(NSString *)gifName; + +/** + 移除HUD + */ ++(void) hiddenProgressHUD; + +#pragma mark ====== 添加在view上 ======= +/** + 显示UIActivityIndicatorView 和 title + + @param titleString 加载时展示的文字(可选) + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString; + +/** + 带有下落动画HUD + + @param view 需要展示HUD的view + @param titleString 加载时展示的文字(可选) + @param arr 动画的图片 + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString imagesArr:(NSArray *)arr; + +/** + gif动画HUD + @param view 需要展示HUD的view + @param titleString 加载时展示的文字(可选) + @param gifName gif动画的图片 + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view withTitle:(NSString *)titleString gifImg:(NSString *)gifName; +/** + 移除HUD + */ ++(void) hiddenProgressHUDforView:(UIView *)view; + +/** + 设置文字 + */ ++ (void)reSetTitleString:(NSString *)titleString forView:(UIView *)view; +@end diff --git a/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.m b/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.m new file mode 100644 index 0000000..e6b11ba --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.m @@ -0,0 +1,564 @@ +// +// YFProgressHUD.m +// LoadingViewAnimation +//made in zhongdao Copyright © 2017年 tracy wang. All rights reserved. +// + +#import "YFProgressHUD.h" + +#import "YLImageView.h" +#import "YLGIFImage.h" + + + +#define ANIMATION_DURATION_SECS 0.5f + +#define KW 120 +#define KH 120 + +typedef NS_ENUM(NSUInteger, YFProgressHUDType) { + YFProgressHUDTypeGif, // gifHUD + YFProgressHUDTypeRotAni,// 旋转下落HUD + YFProgressHUDTypeNormal,// 普通的HUD + YFProgressHUDTypeToast,// Toast +}; + +@interface YFProgressHUD () +// 动画处理的定时器 +@property (nonatomic, strong) NSTimer *timer; +// 动画的view +@property(nonatomic,strong)UIImageView * shapView; +// 阴影view +@property(nonatomic,strong)UIImageView * shadowView; +// gif +@property (nonatomic, copy) NSString *gifName; +// 加载的文字 +@property (nonatomic, copy) NSString *titleString; +// 加载的文字的label +@property (nonatomic, strong) UILabel *titleLabel; +// 加载的图片数组 +@property(nonatomic,strong)NSArray *imagesArr; +// 切换不同的图片 +@property (nonatomic, assign) int stepNumber; +// 是否正在动画中 +@property (nonatomic, assign) BOOL isAnimating; +// 记录下降动画开始的位置 +@property(nonatomic,assign)float fromValue; +// 记录下降动画结束的位置 +@property(nonatomic,assign)float toValue; +// 记录阴影缩放开始的值 +@property(nonatomic,assign)float scalefromValue; +// 记录阴影缩放结束的值 +@property(nonatomic,assign)float scaletoValue; +// HUD动画类型 +@property(nonatomic,assign)YFProgressHUDType hudType; +@property(nonatomic,strong)UIWindow *window; +@end + +@implementation YFProgressHUD + ++(UIWindow *)getWindow{ + + static __weak UIWindow *cachedKeyWindow = nil; + /* (Bug ID: #23, #25, #73) */ + UIWindow *originalKeyWindow = nil; + + #if __IPHONE_OS_VERSION_MAX_ALLOWED >= 130000 + if (@available(iOS 13.0, *)) { + NSSet *connectedScenes = [UIApplication sharedApplication].connectedScenes; + for (UIScene *scene in connectedScenes) { + if (scene.activationState == UISceneActivationStateForegroundActive && [scene isKindOfClass:[UIWindowScene class]]) { + UIWindowScene *windowScene = (UIWindowScene *)scene; + for (UIWindow *window in windowScene.windows) { + if (window.isKeyWindow) { + originalKeyWindow = window; + break; + } + } + } + } + } else + #endif + { + #if __IPHONE_OS_VERSION_MIN_REQUIRED < 130000 + originalKeyWindow = [UIApplication sharedApplication].keyWindow; + #endif + } + + //If original key window is not nil and the cached keywindow is also not original keywindow then changing keywindow. + if (originalKeyWindow) + { + cachedKeyWindow = originalKeyWindow; + } + + return cachedKeyWindow; +} + + +#pragma mark ====== 添加在window上 ======= ++(YFProgressHUD *) showToastTitle:(NSString *)titleString{ + + __block YFProgressHUD *hud; + __block UIWindow *window; + + dispatch_async(dispatch_get_main_queue(), ^{ + window = [YFProgressHUD getWindow]; + // [YFProgressHUD hiddenProgressHUDforView:window]; + + hud = [[YFProgressHUD alloc] initWithFrame:window.bounds]; + hud.hudType = YFProgressHUDTypeToast; + hud.titleString = titleString; + [hud setupView]; + [window addSubview:hud]; + }); + + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ + [hud removeFromSuperview]; + [YFProgressHUD hiddenProgressHUDforView:window]; + }); + + return hud; +} + ++(YFProgressHUD *) showToastTitle:(NSString *)titleString completionBlock:(void(^)(void))block +{ + __block YFProgressHUD *hud; + __block UIWindow *window; + + dispatch_async(dispatch_get_main_queue(), ^{ + window = [YFProgressHUD getWindow]; + // [YFProgressHUD hiddenProgressHUDforView:window]; + + hud = [[YFProgressHUD alloc] initWithFrame:window.bounds]; + hud.hudType = YFProgressHUDTypeToast; + hud.titleString = titleString; + [hud setupView]; + [window addSubview:hud]; + }); + + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ + [hud removeFromSuperview]; + [YFProgressHUD hiddenProgressHUDforView:window]; + if (block) { + block(); + } + }); + + return hud; +} +/** + 显示UIActivityIndicatorView 和 title + + @param titleString 加载时展示的文字(可选) + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString{ + UIWindow *window = [YFProgressHUD getWindow]; + return [YFProgressHUD showProgressHUDinView:window title:titleString]; +} + +/** + 带有下落动画HUD + + @param titleString 加载时展示的文字(可选) + @param arr 动画的图片 + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString imagesArr:(NSArray *)arr{ + + UIWindow *window = [YFProgressHUD getWindow]; + return [YFProgressHUD showProgressHUDinView:window title:titleString imagesArr:arr]; +} + +/** + gif动画HUD + + @param titleString 加载时展示的文字(可选) + @param gifName gif动画的图片 + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString gifImg:(NSString *)gifName{ + UIWindow *window = [YFProgressHUD getWindow]; + return [YFProgressHUD showProgressHUDinView:window withTitle:titleString gifImg:gifName]; +} + +/** + 移除HUD + */ ++(void) hiddenProgressHUD{ + UIWindow *window = [YFProgressHUD getWindow]; + [YFProgressHUD hiddenProgressHUDforView:window]; +} + +#pragma mark ====== 添加在View上 ======= +/** + 显示UIActivityIndicatorView 和 title + + @param titleString 加载时展示的文字(可选) + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString{ + + [YFProgressHUD hiddenProgressHUDforView:view]; + YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds]; + hud.hudType = YFProgressHUDTypeNormal; + hud.titleString = titleString; + [hud setupView]; + dispatch_async(dispatch_get_main_queue(), ^{ + [view addSubview:hud]; + }); + [hud startAnimating]; + + return hud; + +} + +/** + 带有下落动画HUD + + @param view 需要展示HUD的view + @param titleString 加载时展示的文字(可选) + @param arr 动画的图片 + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString imagesArr:(NSArray *)arr{ + + [YFProgressHUD hiddenProgressHUDforView:view]; + + YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds]; + hud.hudType = YFProgressHUDTypeRotAni; + hud.titleString = titleString; + hud.imagesArr = arr; + [hud setupView]; + [view addSubview:hud]; + [hud startAnimating]; + return hud; +} + +/** + gif动画HUD + @param view 需要展示HUD的view + @param titleString 加载时展示的文字(可选) + @param gifName gif动画的图片 + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view withTitle:(NSString *)titleString gifImg:(NSString *)gifName{ + + [YFProgressHUD hiddenProgressHUDforView:view]; + + YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds]; + hud.hudType = YFProgressHUDTypeGif; + hud.titleString = titleString; + hud.gifName = gifName; + [hud setupView]; + [view addSubview:hud]; + [hud startAnimating]; + return hud; +} + +/** + 移除HUD + */ ++(void) hiddenProgressHUDforView:(UIView *)view{ + if (!view && [view isMemberOfClass:[UIWindow class]]) { + view = [YFProgressHUD getWindow]; + } + dispatch_async(dispatch_get_main_queue(), ^{ + for (UIView *subView in view.subviews) { + if ([subView isKindOfClass:[YFProgressHUD class]]) { + [subView removeFromSuperview]; + break; + } + } + }); + +} + ++ (void)reSetTitleString:(NSString *)titleString forView:(UIView *)view; +{ + if (!view && [view isMemberOfClass:[UIWindow class]]) { + view = [YFProgressHUD getWindow]; + } + dispatch_async(dispatch_get_main_queue(), ^{ + for (UIView *subView in view.subviews) { + if ([subView isKindOfClass:[YFProgressHUD class]]) { + YFProgressHUD *hud = (YFProgressHUD *)subView; + hud.titleString = titleString; + hud.titleLabel.text = titleString; + } + } + }); +} + +#pragma mark ====== 初始化======= +-(void)setupView +{ + + self.userInteractionEnabled = YES; + + switch (self.hudType) { + case YFProgressHUDTypeToast: + [self setUpToast]; + break; + case YFProgressHUDTypeNormal: + [self setupNormal]; + break; + case YFProgressHUDTypeGif: + [self setupGif]; + break; + case YFProgressHUDTypeRotAni: + [self setupRotAni]; + break; + } + +} + + + +-(void)setUpToast{ + + CGFloat strW = [self.titleString boundingRectWithSize:CGSizeMake(10000, 30) options:NSStringDrawingUsesLineFragmentOrigin attributes:@{NSFontAttributeName:[UIFont systemFontOfSize:14]} context:nil].size.width; + +// CGFloat width = [UIScreen mainScreen].bounds.size.width; +// CGFloat scale = width/375; +// CGFloat w = 80 * scale; +// +// UIView *centerView = [UIView new]; +// centerView.bounds = CGRectMake(0, 0, w, w); +// centerView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 20); +// centerView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.3]; +// centerView.layer.cornerRadius = 4; +// centerView.layer.masksToBounds = YES; +// + + UILabel *label=[[UILabel alloc] init]; + if (strW + 40 > [UIScreen mainScreen].bounds.size.width - 60) { + CGFloat strH = [self.titleString boundingRectWithSize:CGSizeMake([UIScreen mainScreen].bounds.size.width - 60, 10000) options:NSStringDrawingUsesLineFragmentOrigin attributes:@{NSFontAttributeName:[UIFont systemFontOfSize:14]} context:nil].size.height; + label.frame=CGRectMake(0, 0 , [UIScreen mainScreen].bounds.size.width - 60 , strH + 20); + }else{ + label.frame=CGRectMake(0, 0 , strW + 20 , 40); + } + label.backgroundColor = [UIColor colorWithWhite:0 alpha:1.0]; + label.textColor = [UIColor whiteColor]; + label.textAlignment=NSTextAlignmentCenter; + label.numberOfLines = 0; + label.layer.cornerRadius = 4.0; + label.layer.masksToBounds = YES; + label.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 40); + label.text=_titleString; + label.font=[UIFont systemFontOfSize:14.0f]; + [self addSubview:label]; + self.titleLabel = label; + +} + +-(void)setupNormal{ + + CGFloat width = [UIScreen mainScreen].bounds.size.width; + CGFloat scale = width/375; + CGFloat w = 80 * scale; + UIView *centerView = [UIView new]; + centerView.bounds = CGRectMake(0, 0, w, w); + centerView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 20); + centerView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.3]; + centerView.layer.cornerRadius = 4; + centerView.layer.masksToBounds = YES; + + UIActivityIndicatorView *indicatorView = [[UIActivityIndicatorView alloc]init]; + indicatorView.frame = CGRectMake(0, 0, w, w); + indicatorView.center = CGPointMake(w/2, w/2); + indicatorView.color = [UIColor colorWithWhite:1.0 alpha:0.8]; + indicatorView.transform = CGAffineTransformMakeScale(1.6 * scale, 1.6 * scale); + indicatorView.hidesWhenStopped = NO; + [centerView addSubview:indicatorView]; + [indicatorView startAnimating]; + [self addSubview:centerView]; + + if (_titleString.length != 0) { + UILabel *label=[[UILabel alloc] init]; + label.frame=CGRectMake(0, 0 , KW , 35); + label.textColor=[UIColor grayColor]; + label.numberOfLines = 2; + label.textAlignment=NSTextAlignmentCenter; + label.center=CGPointMake(self.frame.size.width/2, self.frame.size.height/2 + w/2); + label.text=_titleString; + label.font=[UIFont boldSystemFontOfSize:14.0f]; + [self addSubview:label]; + self.titleLabel = label; + } +} + +-(void)setupRotAni{ + + _shapView=[[UIImageView alloc] init]; + _shapView.frame = CGRectMake(KW/2-31/2, 0, 31, 31); + _shapView.image = [UIImage imageNamed:self.imagesArr[0]]; + _shapView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2-100); + _shapView.contentMode = UIViewContentModeScaleAspectFit; + [self addSubview:_shapView]; + + //阴影 + _shadowView = [[UIImageView alloc] init]; + _shadowView.frame = CGRectMake(KW/2-37/2, KH-2.5-30, 37, 2.5); + _shadowView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2); + _shadowView.image = [UIImage imageNamed:@"loading_shadow"]; + [self addSubview:_shadowView]; + + if (_titleString.length != 0) { + UILabel *_label=[[UILabel alloc] init]; + _label.frame=CGRectMake(0, 0 , KW , 20); + _label.textColor=[UIColor grayColor]; + _label.textAlignment=NSTextAlignmentCenter; + _label.center=CGPointMake(self.frame.size.width/2, self.frame.size.height/2+20); + _label.text=_titleString; + _label.font=[UIFont systemFontOfSize:14.0f]; + [self addSubview:_label]; + self.titleLabel = _label; + } + + _fromValue=self.frame.size.height/2-100; + _toValue=self.frame.size.height/2.0-37/2.0; + _scalefromValue=0.1f; + _scaletoValue=1.0f; +} + +-(void)setupGif{ + +// NSString *url = [[NSBundle mainBundle] pathForResource:self.gifName ofType:@""]; + UIImage *gifImg = [YLGIFImage imageNamed:self.gifName];//[UIImage sd_animatedGIFWithData:[NSData dataWithContentsOfFile:url]]; + + CGSize size = gifImg.size; + YLImageView *gifView=[[YLImageView alloc] init]; + gifView.frame = CGRectMake(0, 0, size.width, size.height); + gifView.image = [YLGIFImage imageNamed:self.gifName]; + gifView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 30); + gifView.contentMode = UIViewContentModeScaleAspectFit; + [self addSubview:gifView]; + + if (_titleString.length != 0) { + UILabel *label=[[UILabel alloc] init]; + label.frame=CGRectMake(0, gifView.bounds.size.height , KW , 20); + label.textColor=[UIColor grayColor]; + label.textAlignment=NSTextAlignmentCenter; + label.center = CGPointMake(gifView.center.x, gifView.center.y + size.height/2 + 20); + label.text=_titleString; + label.font=[UIFont systemFontOfSize:14.0f]; + [self addSubview:label]; + self.titleLabel = label; + } +} + +#pragma mark ====== 动画处理 ======= +// 开始动画 +-(void) startAnimating +{ + if (!_isAnimating) + { + _isAnimating = YES; + if (self.hudType == YFProgressHUDTypeRotAni) { + _timer = [NSTimer scheduledTimerWithTimeInterval:ANIMATION_DURATION_SECS target:self selector:@selector(animateNextStep) userInfo:nil repeats:YES]; + [[NSRunLoop mainRunLoop] addTimer:_timer forMode:NSDefaultRunLoopMode]; + [self animateNextStep]; + } + + } + +} + +// 结束动画 +-(void) stopAnimating +{ + _isAnimating = NO; + if (self.hudType == YFProgressHUDTypeRotAni) { + [_timer invalidate]; + _timer=nil; + _stepNumber = 0; + [_shapView.layer removeAllAnimations]; + [_shadowView.layer removeAllAnimations]; + } +} + +// 动画方法 +-(void)animateNextStep +{ + + if (_stepNumber%2==0) { + [self loadingAnimation:_toValue toValue:_fromValue timingFunction:kCAMediaTimingFunctionEaseOut]; + [self scaleAnimation:_scaletoValue toValue:_scalefromValue timingFunction:kCAMediaTimingFunctionEaseIn]; + _shapView.image=[UIImage imageNamed:self.imagesArr[_stepNumber]]; + }else { + [self loadingAnimation:_fromValue toValue:_toValue timingFunction:kCAMediaTimingFunctionEaseIn]; + [self scaleAnimation:_scalefromValue toValue:_scaletoValue timingFunction:kCAMediaTimingFunctionEaseOut]; + } + + if (_stepNumber==self.imagesArr.count-1) { + _stepNumber = -1; + } + _stepNumber++; +} + +// 下落动画 +-(void) loadingAnimation:(float)fromValue toValue:(float)toValue timingFunction:(NSString * const)tf +{ + //位置 + CABasicAnimation *panimation = [CABasicAnimation animation]; + panimation.keyPath = @"position.y"; + panimation.fromValue =@(fromValue); + panimation.toValue = @(toValue); + panimation.duration = ANIMATION_DURATION_SECS; + panimation.timingFunction = [CAMediaTimingFunction functionWithName:tf]; + + //旋转 + CABasicAnimation *ranimation = [CABasicAnimation animation]; + ranimation.keyPath = @"transform.rotation"; + ranimation.fromValue =@(0); + ranimation.toValue = @(M_PI_2); + ranimation.duration = ANIMATION_DURATION_SECS; + + ranimation.timingFunction = [CAMediaTimingFunction functionWithName:tf]; + + //组合 + CAAnimationGroup *group = [[CAAnimationGroup alloc] init]; + group.animations = @[panimation,ranimation]; + group.duration = ANIMATION_DURATION_SECS; + group.beginTime = 0; + group.fillMode=kCAFillModeForwards; + group.removedOnCompletion = NO; + + [_shapView.layer addAnimation:group forKey:@"basic"]; + +} + +// 缩放动画 +-(void) scaleAnimation:(float) fromeValue toValue:(float)toValue timingFunction:(NSString * const)tf +{ + + CABasicAnimation *sanimation = [CABasicAnimation animation]; + sanimation.keyPath = @"transform.scale"; + sanimation.fromValue =@(fromeValue); + sanimation.toValue = @(toValue); + sanimation.duration = ANIMATION_DURATION_SECS; + sanimation.fillMode = kCAFillModeForwards; + sanimation.timingFunction = [CAMediaTimingFunction functionWithName:tf]; + sanimation.removedOnCompletion = NO; + [_shadowView.layer addAnimation:sanimation forKey:@"shadow"]; + +} + +#pragma mark ====== setter ======= +-(void)setImagesArr:(NSArray *)imagesArr{ + NSMutableArray *arr=[NSMutableArray array]; + for (int i=0; i + +@interface YLGIFImage : UIImage + +///----------------------- +/// @name Image Attributes +///----------------------- + +/** + A C array containing the frame durations. + + The number of frames is defined by the count of the `images` array property. + */ +@property (nonatomic, readonly) NSTimeInterval *frameDurations; + +/** + Total duration of the animated image. + */ +@property (nonatomic, readonly) NSTimeInterval totalDuration; + +/** + Number of loops the image can do before it stops + */ +@property (nonatomic, readonly) NSUInteger loopCount; + +- (UIImage*)getFrameWithIndex:(NSUInteger)idx; + +@end diff --git a/OrderScheduling/Video/YFProgressHUD/YLGIFImage.m b/OrderScheduling/Video/YFProgressHUD/YLGIFImage.m new file mode 100755 index 0000000..030fb02 --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YLGIFImage.m @@ -0,0 +1,305 @@ +// +// YLGIFImage.m +// YLGIFImage +//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved. +// + +#import "YLGIFImage.h" +#import +#import + + +//Define FLT_EPSILON because, reasons. +//Actually, I don't know why but it seems under certain circumstances it is not defined +#ifndef FLT_EPSILON +#define FLT_EPSILON __FLT_EPSILON__ +#endif + +inline static NSTimeInterval CGImageSourceGetGifFrameDelay(CGImageSourceRef imageSource, NSUInteger index) +{ + NSTimeInterval frameDuration = 0; + CFDictionaryRef theImageProperties; + if ((theImageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, index, NULL))) { + CFDictionaryRef gifProperties; + if (CFDictionaryGetValueIfPresent(theImageProperties, kCGImagePropertyGIFDictionary, (const void **)&gifProperties)) { + const void *frameDurationValue; + if (CFDictionaryGetValueIfPresent(gifProperties, kCGImagePropertyGIFUnclampedDelayTime, &frameDurationValue)) { + frameDuration = [(__bridge NSNumber *)frameDurationValue doubleValue]; + if (frameDuration <= 0) { + if (CFDictionaryGetValueIfPresent(gifProperties, kCGImagePropertyGIFDelayTime, &frameDurationValue)) { + frameDuration = [(__bridge NSNumber *)frameDurationValue doubleValue]; + } + } + } + } + CFRelease(theImageProperties); + } + +#ifndef OLExactGIFRepresentation + //Implement as Browsers do. + //See: http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser-compatibility + //Also: http://blogs.msdn.com/b/ieinternals/archive/2010/06/08/animated-gifs-slow-down-to-under-20-frames-per-second.aspx + + if (frameDuration < 0.02 - FLT_EPSILON) { + frameDuration = 0.1; + } +#endif + return frameDuration; +} + +inline static BOOL CGImageSourceContainsAnimatedGif(CGImageSourceRef imageSource) +{ + return imageSource && UTTypeConformsTo(CGImageSourceGetType(imageSource), kUTTypeGIF) && CGImageSourceGetCount(imageSource) > 1; +} + +inline static BOOL isRetinaFilePath(NSString *path) +{ + NSRange retinaSuffixRange = [[path lastPathComponent] rangeOfString:@"@2x" options:NSCaseInsensitiveSearch]; + return retinaSuffixRange.length && retinaSuffixRange.location != NSNotFound; +} + +@interface YLGIFImage () + +@property (nonatomic, readwrite) NSMutableArray *images; +@property (nonatomic, readwrite) NSTimeInterval *frameDurations; +@property (nonatomic, readwrite) NSTimeInterval totalDuration; +@property (nonatomic, readwrite) NSUInteger loopCount; +@property (nonatomic, readwrite) CGImageSourceRef incrementalSource; +@property(nonatomic,assign)CGImageSourceRef imageSourceRef; +@end + +static NSUInteger _prefetchedNum = 10; + +@implementation YLGIFImage +{ + dispatch_queue_t readFrameQueue; +// CGImageSourceRef _imageSourceRef; + CGFloat _scale; +} + +@synthesize images; + +#pragma mark - Class Methods + ++ (id)imageNamed:(NSString *)name +{ + NSString *path = [[NSBundle mainBundle] pathForResource:name ofType:nil]; + + return ([[NSFileManager defaultManager] fileExistsAtPath:path]) ? [self imageWithContentsOfFile:path] : nil; +} + ++ (id)imageWithContentsOfFile:(NSString *)path +{ + return [self imageWithData:[NSData dataWithContentsOfFile:path] + scale:isRetinaFilePath(path) ? 2.0f : 1.0f]; +} + ++ (id)imageWithData:(NSData *)data +{ + return [self imageWithData:data scale:1.0f]; +} + ++ (id)imageWithData:(NSData *)data scale:(CGFloat)scale +{ + if (!data) { + return nil; + } + + CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)(data), NULL); + UIImage *image; + + if (CGImageSourceContainsAnimatedGif(imageSource)) { + image = [[self alloc] initWithCGImageSource:imageSource scale:scale]; + } else { + image = [super imageWithData:data scale:scale]; + } + + if (imageSource) { + CFRelease(imageSource); + } + + return image; +} + +#pragma mark - Initialization methods + +- (id)initWithContentsOfFile:(NSString *)path +{ + return [self initWithData:[NSData dataWithContentsOfFile:path] + scale:isRetinaFilePath(path) ? 2.0f : 1.0f]; +} + +- (id)initWithData:(NSData *)data +{ + return [self initWithData:data scale:1.0f]; +} + +- (id)initWithData:(NSData *)data scale:(CGFloat)scale +{ + if (!data) { + return nil; + } + + CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)(data), NULL); + + if (CGImageSourceContainsAnimatedGif(imageSource)) { + self = [self initWithCGImageSource:imageSource scale:scale]; + } else { + if (scale == 1.0f) { + self = [super initWithData:data]; + } else { + self = [super initWithData:data scale:scale]; + } + } + + if (imageSource) { + CFRelease(imageSource); + } + + return self; +} + +- (id)initWithCGImageSource:(CGImageSourceRef)imageSource scale:(CGFloat)scale +{ + self = [super init]; + if (!imageSource || !self) { + return nil; + } + + CFRetain(imageSource); + + NSUInteger numberOfFrames = CGImageSourceGetCount(imageSource); + + NSDictionary *imageProperties = CFBridgingRelease(CGImageSourceCopyProperties(imageSource, NULL)); + NSDictionary *gifProperties = [imageProperties objectForKey:(NSString *)kCGImagePropertyGIFDictionary]; + + self.frameDurations = (NSTimeInterval *)malloc(numberOfFrames * sizeof(NSTimeInterval)); + self.loopCount = [gifProperties[(NSString *)kCGImagePropertyGIFLoopCount] unsignedIntegerValue]; + self.images = [NSMutableArray arrayWithCapacity:numberOfFrames]; + + NSNull *aNull = [NSNull null]; + for (NSUInteger i = 0; i < numberOfFrames; ++i) { + [self.images addObject:aNull]; + NSTimeInterval frameDuration = CGImageSourceGetGifFrameDelay(imageSource, i); + self.frameDurations[i] = frameDuration; + self.totalDuration += frameDuration; + } + //CFTimeInterval start = CFAbsoluteTimeGetCurrent(); + // Load first frame + NSUInteger num = MIN(_prefetchedNum, numberOfFrames); + for (NSUInteger i=0; i _prefetchedNum) { + if(idx != 0) { + [self.images replaceObjectAtIndex:idx withObject:[NSNull null]]; + } + __weak typeof(self) weakSelf=self; + NSUInteger nextReadIdx = (idx + _prefetchedNum); + for(NSUInteger i=idx+1; i<=nextReadIdx; i++) { + NSUInteger _idx = i%self.images.count; + CGFloat scale = _scale; + if([self.images[_idx] isKindOfClass:[NSNull class]]) { + dispatch_async(readFrameQueue, ^{ + CGImageRef image = CGImageSourceCreateImageAtIndex(weakSelf.imageSourceRef, _idx, NULL); + @synchronized(weakSelf.images) { + if (image != NULL) { + [weakSelf.images replaceObjectAtIndex:_idx withObject:[UIImage imageWithCGImage:image scale:scale orientation:UIImageOrientationUp]]; + CFRelease(image); + } else { + [weakSelf.images replaceObjectAtIndex:_idx withObject:[NSNull null]]; + } + } + }); + } + } + } + return frame; +} + +#pragma mark - Compatibility methods + +- (CGSize)size +{ + if (self.images.count) { + + return [(UIImage *)[self.images objectAtIndex:0] size]; + } + return [super size]; +} + +- (CGImageRef)CGImage +{ + if (self.images.count) { + return [[self.images objectAtIndex:0] CGImage]; + } else { + return [super CGImage]; + } +} + +- (UIImageOrientation)imageOrientation +{ + if (self.images.count) { + return [[self.images objectAtIndex:0] imageOrientation]; + } else { + return [super imageOrientation]; + } +} + +- (CGFloat)scale +{ + if (self.images.count) { + return [(UIImage *)[self.images objectAtIndex:0] scale]; + } else { + return [super scale]; + } +} + +- (NSTimeInterval)duration +{ + return self.images ? self.totalDuration : [super duration]; +} + +- (void)dealloc { + if(_imageSourceRef) { + CFRelease(_imageSourceRef); + } + free(_frameDurations); + if (_incrementalSource) { + CFRelease(_incrementalSource); + } +} + +@end diff --git a/OrderScheduling/Video/YFProgressHUD/YLImageView.h b/OrderScheduling/Video/YFProgressHUD/YLImageView.h new file mode 100755 index 0000000..71e804e --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YLImageView.h @@ -0,0 +1,13 @@ +// +// YLImageView.h +// YLGIFImage +//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved. +// + +#import + +@interface YLImageView : UIImageView + +@property (nonatomic, copy) NSString *runLoopMode; + +@end diff --git a/OrderScheduling/Video/YFProgressHUD/YLImageView.m b/OrderScheduling/Video/YFProgressHUD/YLImageView.m new file mode 100755 index 0000000..8f924b3 --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YLImageView.m @@ -0,0 +1,219 @@ +// +// YLImageView.m +// YLGIFImage +//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved. +// + +#import "YLImageView.h" +#import "YLGIFImage.h" +#import + +@interface YLImageView () + +@property (nonatomic, strong) YLGIFImage *animatedImage; +@property (nonatomic, strong) CADisplayLink *displayLink; +@property (nonatomic) NSTimeInterval accumulator; +@property (nonatomic) NSUInteger currentFrameIndex; +@property (nonatomic, strong) UIImage* currentFrame; +@property (nonatomic) NSUInteger loopCountdown; + +@end + +@implementation YLImageView + +const NSTimeInterval kMaxTimeStep = 1; // note: To avoid spiral-o-death + +@synthesize runLoopMode = _runLoopMode; +@synthesize displayLink = _displayLink; + +- (id)init +{ + self = [super init]; + if (self) { + self.currentFrameIndex = 0; + } + return self; +} + +- (CADisplayLink *)displayLink +{ + if (self.superview) { + if (!_displayLink && self.animatedImage) { + _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(changeKeyframe:)]; + [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:self.runLoopMode]; + } + } else { + [_displayLink invalidate]; + _displayLink = nil; + } + return _displayLink; +} + +- (NSString *)runLoopMode +{ + return _runLoopMode ?: NSRunLoopCommonModes; +} + +- (void)setRunLoopMode:(NSString *)runLoopMode +{ + if (runLoopMode != _runLoopMode) { + [self stopAnimating]; + + NSRunLoop *runloop = [NSRunLoop mainRunLoop]; + [self.displayLink removeFromRunLoop:runloop forMode:_runLoopMode]; + [self.displayLink addToRunLoop:runloop forMode:runLoopMode]; + + _runLoopMode = runLoopMode; + + [self startAnimating]; + } +} + +- (void)setImage:(UIImage *)image +{ + if (image == self.image) { + return; + } + + [self stopAnimating]; + + self.currentFrameIndex = 0; + self.loopCountdown = 0; + self.accumulator = 0; + + if ([image isKindOfClass:[YLGIFImage class]] && image.images) { + if([image.images[0] isKindOfClass:UIImage.class]) + [super setImage:image.images[0]]; + else + [super setImage:nil]; + self.currentFrame = nil; + self.animatedImage = (YLGIFImage *)image; + self.loopCountdown = self.animatedImage.loopCount ?: NSUIntegerMax; + [self startAnimating]; + } else { + self.animatedImage = nil; + [super setImage:image]; + } + [self.layer setNeedsDisplay]; +} + +- (void)setAnimatedImage:(YLGIFImage *)animatedImage +{ + _animatedImage = animatedImage; + if (animatedImage == nil) { + self.layer.contents = nil; + } +} + +- (BOOL)isAnimating +{ + return [super isAnimating] || (self.displayLink && !self.displayLink.isPaused); +} + +- (void)stopAnimating +{ + if (!self.animatedImage) { + [super stopAnimating]; + return; + } + + self.loopCountdown = 0; + + self.displayLink.paused = YES; +} + +- (void)startAnimating +{ + if (!self.animatedImage) { + [super startAnimating]; + return; + } + + if (self.isAnimating) { + return; + } + + self.loopCountdown = self.animatedImage.loopCount ?: NSUIntegerMax; + + self.displayLink.paused = NO; +} + +- (void)changeKeyframe:(CADisplayLink *)displayLink +{ + if (self.currentFrameIndex >= [self.animatedImage.images count]) { + return; + } + self.accumulator += fmin(displayLink.duration, kMaxTimeStep); + + while (self.accumulator >= self.animatedImage.frameDurations[self.currentFrameIndex]) { + self.accumulator -= self.animatedImage.frameDurations[self.currentFrameIndex]; + if (++self.currentFrameIndex >= [self.animatedImage.images count]) { + if (--self.loopCountdown == 0) { + [self stopAnimating]; + return; + } + self.currentFrameIndex = 0; + } + self.currentFrameIndex = MIN(self.currentFrameIndex, [self.animatedImage.images count] - 1); + self.currentFrame = [self.animatedImage getFrameWithIndex:self.currentFrameIndex]; + [self.layer setNeedsDisplay]; + } +} + +- (void)displayLayer:(CALayer *)layer +{ + if (!self.animatedImage || [self.animatedImage.images count] == 0) { + return; + } + //NSLog(@"display index: %luu", (unsigned long)self.currentFrameIndex); + if(self.currentFrame && ![self.currentFrame isKindOfClass:[NSNull class]]) + layer.contents = (__bridge id)([self.currentFrame CGImage]); +} + +- (void)didMoveToWindow +{ + [super didMoveToWindow]; + if (self.window) { + [self startAnimating]; + } else { + dispatch_async(dispatch_get_main_queue(), ^{ + if (!self.window) { + [self stopAnimating]; + } + }); + } +} + +- (void)didMoveToSuperview +{ + [super didMoveToSuperview]; + if (self.superview) { + //Has a superview, make sure it has a displayLink + [self displayLink]; + } else { + //Doesn't have superview, let's check later if we need to remove the displayLink + dispatch_async(dispatch_get_main_queue(), ^{ + [self displayLink]; + }); + } +} + +- (void)setHighlighted:(BOOL)highlighted +{ + if (!self.animatedImage) { + [super setHighlighted:highlighted]; + } +} + +- (UIImage *)image +{ + return self.animatedImage ?: [super image]; +} + +- (CGSize)sizeThatFits:(CGSize)size +{ + return self.image.size; +} + +@end + diff --git a/OrderScheduling/Video/YFTimerTool/YFTimer.h b/OrderScheduling/Video/YFTimerTool/YFTimer.h new file mode 100644 index 0000000..13678c5 --- /dev/null +++ b/OrderScheduling/Video/YFTimerTool/YFTimer.h @@ -0,0 +1,32 @@ +// +// YFTimer.h +// Timer_Demo +//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved. +// + +#import + +@protocol YFTimerDelegate +@optional +-(void)toDoThingsWhenTimeCome:(NSTimeInterval)interval; + +@end + +@interface YFTimer : NSObject +// 定时器的间隔 +@property(nonatomic,assign)NSTimeInterval interval; + +// 添加代理 +-(void)timerAddDelegate:(id)delegate; + +// 取消代理 +-(void)timerDeleteDelegate:(id)delegate; + +// 创建定时器 +-(void)fireTimeWithInterval:(NSTimeInterval)interval; + +// 取消定时器 +-(void)invalidate; + +@end + diff --git a/OrderScheduling/Video/YFTimerTool/YFTimer.m b/OrderScheduling/Video/YFTimerTool/YFTimer.m new file mode 100644 index 0000000..ba02f28 --- /dev/null +++ b/OrderScheduling/Video/YFTimerTool/YFTimer.m @@ -0,0 +1,87 @@ +// +// YFTimer.m +// Timer_Demo +//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved. +// + +#import "YFTimer.h" +#import "YFTimerManager.h" + +@interface YFTimer() +// NSPointerArray 可以让数组中的引用是弱引用 +// 关于NSPointerArray的使用 https://blog.csdn.net/weixin_34387468/article/details/90334534 +// 所有定时器的代理 +@property(nonatomic,strong)NSPointerArray *delegates; +// 定时器 +@property (nonatomic,strong)dispatch_source_t timer; + +@end + +@implementation YFTimer + +// 添加定时器 +-(void)fireTimeWithInterval:(NSTimeInterval)interval{ + + self.interval = interval; + + dispatch_source_t timer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, dispatch_get_main_queue()); + dispatch_source_set_timer(timer, dispatch_walltime(NULL, 0), interval * NSEC_PER_SEC, 0); //每多少秒触发timer,误差多少秒 + dispatch_source_set_event_handler(timer, ^{ + // 定时器触发时执行的 block + [self isTimeToDoThing]; + }); + dispatch_resume(timer); + + self.timer = timer; + +} + +// 取消定时器 +-(void)invalidate{ + self.delegates = nil; + self.timer = nil; +} + +// 添加代理 +-(void)timerAddDelegate:(id)delegate{ + if (![self.delegates.allObjects containsObject:delegate]) { + + [self.delegates addPointer:NULL]; + [self.delegates compact]; + + [self.delegates addPointer:(__bridge void * _Nullable)(delegate)]; + } +} + +// 取消代理 +-(void)timerDeleteDelegate:(id)delegate{ + if ([self.delegates.allObjects containsObject:delegate]) { + NSInteger index = [self.delegates.allObjects indexOfObject:delegate]; + [self.delegates removePointerAtIndex:index]; + } +} + +// 倒计时要做的事 +-(void)isTimeToDoThing{ + + if (self.delegates.allObjects.count == 0) { + [YFTimerManager invalidateTimerForTimeInterval:self.interval]; + return; + } + + for (iddelegate in self.delegates.allObjects) { + if (delegate && [delegate respondsToSelector:@selector(toDoThingsWhenTimeCome:)]) { + [delegate toDoThingsWhenTimeCome:self.interval]; + } + } + +} + +-(NSPointerArray *)delegates{ + if (!_delegates) { + _delegates = [NSPointerArray weakObjectsPointerArray]; + } + return _delegates; +} + +@end diff --git a/OrderScheduling/Video/YFTimerTool/YFTimerManager.h b/OrderScheduling/Video/YFTimerTool/YFTimerManager.h new file mode 100644 index 0000000..90cd869 --- /dev/null +++ b/OrderScheduling/Video/YFTimerTool/YFTimerManager.h @@ -0,0 +1,58 @@ +// +// YFTimerManager.h +// Timer_Demo +//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved. +// + +#import +#import "YFTimer.h" + +//.h文件 +#define YFSingleTonH(ClassName) +(instancetype)share##ClassName; + +//.m文件 +#define YFSingleTonM(ClassName) \ +static id _instance=nil;\ ++(instancetype)allocWithZone:(struct _NSZone *)zone{\ + static dispatch_once_t onceToken;\ + dispatch_once(&onceToken, ^{\ + _instance=[super allocWithZone:zone];\ + });\ + return _instance;\ +}\ ++(instancetype)share##ClassName{\ + static dispatch_once_t onceToken;\ + dispatch_once(&onceToken, ^{\ + _instance=[[self alloc] init];\ + });\ + return _instance;\ +}\ +-(instancetype)copyWithZone:(NSZone *)zone{\ + return _instance;\ +} + +@protocol YFTimerDelegate; + +@interface YFTimerManager : NSObject + +@property(nonatomic,strong)NSMutableArray *timers; + +YFSingleTonH(YFTimerManager) + +// 添加一个时间间隔是interval的定时器 ++(void)addTimerWithTimeInterval:(NSTimeInterval)interval; + +// 给时间间隔是interval的定时器设置代理 ++(void)addTimerDelegate:(id)delegate forTimeInterval:(NSTimeInterval)interval; + +// 给时间间隔是interval的定时器取消代理 ++(void)deleteTimerDelegate:(id)delegate forTimeInterval:(NSTimeInterval)interval; + +// 取消一个时间间隔是interval的定时器 ++(void)invalidateTimerForTimeInterval:(NSTimeInterval)interval; + +// 取消所有的定时器 ++(void)invalidateAllTimer; + +@end + diff --git a/OrderScheduling/Video/YFTimerTool/YFTimerManager.m b/OrderScheduling/Video/YFTimerTool/YFTimerManager.m new file mode 100644 index 0000000..e5127fc --- /dev/null +++ b/OrderScheduling/Video/YFTimerTool/YFTimerManager.m @@ -0,0 +1,90 @@ +// +// YFTimerManager.m +// Timer_Demo +//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved. +// + +#import "YFTimerManager.h" + +@interface YFTimerManager () + +@end + +@implementation YFTimerManager + +YFSingleTonM(YFTimerManager) + +// 添加一个时间间隔是interval的定时器 ++(void)addTimerWithTimeInterval:(NSTimeInterval)interval{ + + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + if (timer.interval == interval) {// 防止重复添加定时器 + return; + } + } + + YFTimer *timer = [[YFTimer alloc] init]; + [timer fireTimeWithInterval:interval]; + + [[YFTimerManager shareYFTimerManager].timers addObject:timer]; + +} + +// 取消一个时间间隔是interval的定时器 ++(void)invalidateTimerForTimeInterval:(NSTimeInterval)interval{ + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + if (timer.interval == interval) {// 防止重复添加定时器 + [timer invalidate]; + [[YFTimerManager shareYFTimerManager].timers removeObject:timer]; + return; + } + } +} + +// 取消所有的定时器 ++(void)invalidateAllTimer{ + + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + [timer invalidate]; + } + [[YFTimerManager shareYFTimerManager].timers removeAllObjects]; +} + +// 给时间间隔是interval的定时器设置代理 ++(void)addTimerDelegate:(id)delegate forTimeInterval :(NSTimeInterval)interval{ + + if ([YFTimerManager shareYFTimerManager].timers.count == 0) { + [self addTimerWithTimeInterval:interval]; + [[YFTimerManager shareYFTimerManager].timers.firstObject timerAddDelegate:delegate]; + }else{ + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + if (timer.interval == interval) {// 防止重复添加定时器 + [timer timerAddDelegate:delegate]; + return; + }else{ + [self addTimerWithTimeInterval:interval]; + [[YFTimerManager shareYFTimerManager].timers.lastObject timerAddDelegate:delegate]; + } + } + } + +} + +// 给时间间隔是interval的定时器取消代理 ++(void)deleteTimerDelegate:(id)delegate forTimeInterval:(NSTimeInterval)interval{ + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + if (timer.interval == interval) {// 防止重复添加定时器 + [timer timerDeleteDelegate:delegate]; + return; + } + } +} + +-(NSMutableArray *)timers{ + if (_timers==nil) { + _timers=[[NSMutableArray alloc] init]; + } + return _timers; +} + +@end