From d060351acaf1802d9c50793a812c5d3a4b10ee45 Mon Sep 17 00:00:00 2001 From: ddisfriend Date: Wed, 6 Aug 2025 17:14:43 +0800 Subject: [PATCH] =?UTF-8?q?=E6=8A=A5=E8=AD=A6=E7=9B=B8=E5=85=B3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- OrderScheduling.xcodeproj/project.pbxproj | 136 +- OrderScheduling/.DS_Store | Bin 10244 -> 10244 bytes .../Contents.json | 23 + .../vehicleMonitoring_alarm.png | Bin 0 -> 2159 bytes .../vehicleMonitoring_alarm@2x.png | Bin 0 -> 3865 bytes .../vehicleMonitoring_alarm@3x.png | Bin 0 -> 6311 bytes .../Contents.json | 23 + .../vehicleMonitoring_alarm_level1.png | Bin 0 -> 1994 bytes .../vehicleMonitoring_alarm_level1@2x.png | Bin 0 -> 3698 bytes .../vehicleMonitoring_alarm_level1@3x.png | Bin 0 -> 5930 bytes .../Contents.json | 23 + .../vehicleMonitoring_alarm_level2.png | Bin 0 -> 2058 bytes .../vehicleMonitoring_alarm_level2@2x.png | Bin 0 -> 3730 bytes .../vehicleMonitoring_alarm_level2@3x.png | Bin 0 -> 6172 bytes .../Contents.json | 23 + .../vehicleMonitoring_alarm_level3.png | Bin 0 -> 2102 bytes .../vehicleMonitoring_alarm_level3@2x.png | Bin 0 -> 3874 bytes .../vehicleMonitoring_alarm_level3@3x.png | Bin 0 -> 6372 bytes .../Contents.json | 23 + .../vehicleMonitoring_channel_alarm_icon.png | Bin 0 -> 347 bytes ...ehicleMonitoring_channel_alarm_icon@2x.png | Bin 0 -> 617 bytes ...ehicleMonitoring_channel_alarm_icon@3x.png | Bin 0 -> 1121 bytes .../Contents.json | 23 + .../vehicleMonitoring_history_icon.png | Bin 0 -> 251 bytes .../vehicleMonitoring_history_icon@2x.png | Bin 0 -> 322 bytes .../vehicleMonitoring_history_icon@3x.png | Bin 0 -> 457 bytes .../Contents.json | 23 + .../vehicleMonitoring_offline_icon.png | Bin 0 -> 316 bytes .../vehicleMonitoring_offline_icon@2x.png | Bin 0 -> 484 bytes .../vehicleMonitoring_offline_icon@3x.png | Bin 0 -> 772 bytes .../Contents.json | 23 + .../vehicleMonitoring_video_icon.png | Bin 0 -> 343 bytes .../vehicleMonitoring_video_icon@2x.png | Bin 0 -> 514 bytes .../vehicleMonitoring_video_icon@3x.png | Bin 0 -> 789 bytes .../ww_video_paly.imageset/Contents.json | 22 + .../ww_video_paly@2x.png | Bin 0 -> 2847 bytes .../ww_video_paly@3x.png | Bin 0 -> 5365 bytes .../Common/WebView/WebViewController.swift | 13 + .../Common/WebView/WebViewTool.swift | 8 + .../HttpRequestCenter/ApiList.swift | 10 +- .../HttpRequestCenter/ParametersList.swift | 23 + .../HttpRequestCenter/RequestList.swift | 20 +- .../HttpResponseModel/ResponseModel.swift | 27 +- .../Main/OrderScheduling-Bridging-Header.h | 2 + .../Rescue/View/AcceptOrderTool.swift | 12 +- OrderScheduling/VehicleMonitoring/.DS_Store | Bin 6148 -> 6148 bytes .../View/VerticalLoopScrollLabel.swift | 93 + .../VehicleMonitorHistoryController.swift | 467 ++++ .../VehicleMonitorVideoController.swift | 161 +- .../VehicleMonitoringController.swift | 188 +- .../Video/Video/view/VideoPlayView.h | 31 + .../Video/Video/view/VideoPlayView.m | 314 +++ .../Video/VideoTools/AAPLEAGLLayer.h | 20 + .../Video/VideoTools/AAPLEAGLLayer.m | 595 +++++ .../Video/VideoTools/H264DecodeTool.h | 29 + .../Video/VideoTools/H264DecodeTool.m | 298 +++ .../Video/VideoTools/PCMStreamPlayer.h | 18 + .../Video/VideoTools/PCMStreamPlayer.m | 131 ++ .../Video/VideoTools/SRWebSocket.h | 154 ++ .../Video/VideoTools/SRWebSocket.m | 1921 +++++++++++++++++ OrderScheduling/Video/VideoTools/g711.h | 30 + OrderScheduling/Video/VideoTools/g711.m | 306 +++ OrderScheduling/Video/VideoTools/g726.h | 188 ++ OrderScheduling/Video/VideoTools/g726.m | 889 ++++++++ .../Video/YFProgressHUD/YFProgressHUD.h | 85 + .../Video/YFProgressHUD/YFProgressHUD.m | 564 +++++ .../Video/YFProgressHUD/YLGIFImage.h | 34 + .../Video/YFProgressHUD/YLGIFImage.m | 305 +++ .../Video/YFProgressHUD/YLImageView.h | 13 + .../Video/YFProgressHUD/YLImageView.m | 219 ++ OrderScheduling/Video/YFTimerTool/YFTimer.h | 32 + OrderScheduling/Video/YFTimerTool/YFTimer.m | 87 + .../Video/YFTimerTool/YFTimerManager.h | 58 + .../Video/YFTimerTool/YFTimerManager.m | 90 + 74 files changed, 7682 insertions(+), 95 deletions(-) create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@3x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@3x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@3x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@3x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_channel_alarm_icon.imageset/vehicleMonitoring_channel_alarm_icon@3x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@3x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon@3x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@3x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/Contents.json create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@2x.png create mode 100644 OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@3x.png create mode 100644 OrderScheduling/VehicleMonitoring/View/VerticalLoopScrollLabel.swift create mode 100644 OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitorHistoryController.swift create mode 100644 OrderScheduling/Video/Video/view/VideoPlayView.h create mode 100644 OrderScheduling/Video/Video/view/VideoPlayView.m create mode 100755 OrderScheduling/Video/VideoTools/AAPLEAGLLayer.h create mode 100755 OrderScheduling/Video/VideoTools/AAPLEAGLLayer.m create mode 100644 OrderScheduling/Video/VideoTools/H264DecodeTool.h create mode 100644 OrderScheduling/Video/VideoTools/H264DecodeTool.m create mode 100644 OrderScheduling/Video/VideoTools/PCMStreamPlayer.h create mode 100644 OrderScheduling/Video/VideoTools/PCMStreamPlayer.m create mode 100644 OrderScheduling/Video/VideoTools/SRWebSocket.h create mode 100644 OrderScheduling/Video/VideoTools/SRWebSocket.m create mode 100644 OrderScheduling/Video/VideoTools/g711.h create mode 100644 OrderScheduling/Video/VideoTools/g711.m create mode 100644 OrderScheduling/Video/VideoTools/g726.h create mode 100644 OrderScheduling/Video/VideoTools/g726.m create mode 100644 OrderScheduling/Video/YFProgressHUD/YFProgressHUD.h create mode 100644 OrderScheduling/Video/YFProgressHUD/YFProgressHUD.m create mode 100755 OrderScheduling/Video/YFProgressHUD/YLGIFImage.h create mode 100755 OrderScheduling/Video/YFProgressHUD/YLGIFImage.m create mode 100755 OrderScheduling/Video/YFProgressHUD/YLImageView.h create mode 100755 OrderScheduling/Video/YFProgressHUD/YLImageView.m create mode 100644 OrderScheduling/Video/YFTimerTool/YFTimer.h create mode 100644 OrderScheduling/Video/YFTimerTool/YFTimer.m create mode 100644 OrderScheduling/Video/YFTimerTool/YFTimerManager.h create mode 100644 OrderScheduling/Video/YFTimerTool/YFTimerManager.m diff --git a/OrderScheduling.xcodeproj/project.pbxproj b/OrderScheduling.xcodeproj/project.pbxproj index e507b8d..5169ea4 100644 --- a/OrderScheduling.xcodeproj/project.pbxproj +++ b/OrderScheduling.xcodeproj/project.pbxproj @@ -35,6 +35,20 @@ 791887C62A84D9DF007EA0C1 /* DispatchOrderController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 791887C52A84D9DF007EA0C1 /* DispatchOrderController.swift */; }; 792EE0952AA74E0A00A212AB /* PushNotiCommonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 792EE0942AA74E0A00A212AB /* PushNotiCommonView.swift */; }; 792EE0972AA74E5800A212AB /* PushNotiCommonTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 792EE0962AA74E5800A212AB /* PushNotiCommonTool.swift */; }; + 7938A6502E3B51270017508A /* VehicleMonitorHistoryController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */; }; + 7938A8252E4055800017508A /* YFTimer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A81E2E4055800017508A /* YFTimer.m */; }; + 7938A8292E4055800017508A /* VideoPlayView.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8062E4055800017508A /* VideoPlayView.m */; }; + 7938A82B2E4055800017508A /* YFTimerManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8202E4055800017508A /* YFTimerManager.m */; }; + 7938A82C2E4055800017508A /* AAPLEAGLLayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A80E2E4055800017508A /* AAPLEAGLLayer.m */; }; + 7938A82E2E4055800017508A /* PCMStreamPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8162E4055800017508A /* PCMStreamPlayer.m */; }; + 7938A82F2E4055800017508A /* SRWebSocket.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8182E4055800017508A /* SRWebSocket.m */; }; + 7938A8312E4055800017508A /* g711.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8102E4055800017508A /* g711.m */; }; + 7938A8332E4055800017508A /* g726.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8122E4055800017508A /* g726.m */; }; + 7938A8352E4055800017508A /* H264DecodeTool.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8142E4055800017508A /* H264DecodeTool.m */; }; + 7938A83D2E4055D50017508A /* YFProgressHUD.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8372E4055D50017508A /* YFProgressHUD.m */; }; + 7938A83E2E4055D50017508A /* YLGIFImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8392E4055D50017508A /* YLGIFImage.m */; }; + 7938A83F2E4055D50017508A /* YLImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A83B2E4055D50017508A /* YLImageView.m */; }; + 7938A8452E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */; }; 7940277A2B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */; }; 7940277C2B3E9ECB00EC52D4 /* ConditionalSearchView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7940277B2B3E9ECB00EC52D4 /* ConditionalSearchView.swift */; }; 7940277E2B43B9B600EC52D4 /* ConditionalSearchTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7940277D2B43B9B600EC52D4 /* ConditionalSearchTool.swift */; }; @@ -60,7 +74,6 @@ 79CECC222A8A2A2900B95D8B /* VehicleMonitoringController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */; }; 79CECC242A8B16D400B95D8B /* VehicleMonitoringListController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */; }; 79CECC262A8C749B00B95D8B /* VehicleMonitorVideoController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */; }; - 79CECC282A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */; }; 79DD0DAA2A9481BC00768FE7 /* NotificationAuthTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DA92A9481BC00768FE7 /* NotificationAuthTool.swift */; }; 79DD0DB12A94B3DB00768FE7 /* EmptyView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DB02A94B3DB00768FE7 /* EmptyView.swift */; }; 79DD0DB42A95F00B00768FE7 /* Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DB32A95F00B00768FE7 /* Extension.swift */; }; @@ -68,6 +81,7 @@ 79E434252AA1919400AEB16C /* CommonAlertView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434242AA1919400AEB16C /* CommonAlertView.swift */; }; 79E434282AA1EFA500AEB16C /* SystemCall.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434272AA1EFA500AEB16C /* SystemCall.swift */; }; 79E4342A2AA5833F00AEB16C /* CustomPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434292AA5833F00AEB16C /* CustomPicker.swift */; }; + 79EA0A912E3753D100320195 /* VerticalLoopScrollLabel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */; }; 79EAD8142A7B86610036E093 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 79EAD8132A7B86610036E093 /* Assets.xcassets */; }; 79EAD8172A7B86610036E093 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 79EAD8152A7B86610036E093 /* LaunchScreen.storyboard */; }; 79FB75EC2A988EC000DB00A4 /* MessageCenterTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79FB75EB2A988EC000DB00A4 /* MessageCenterTool.swift */; }; @@ -145,6 +159,32 @@ 791887C52A84D9DF007EA0C1 /* DispatchOrderController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DispatchOrderController.swift; sourceTree = ""; }; 792EE0942AA74E0A00A212AB /* PushNotiCommonView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PushNotiCommonView.swift; sourceTree = ""; }; 792EE0962AA74E5800A212AB /* PushNotiCommonTool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PushNotiCommonTool.swift; sourceTree = ""; }; + 7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitorHistoryController.swift; sourceTree = ""; }; + 7938A8052E4055800017508A /* VideoPlayView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VideoPlayView.h; sourceTree = ""; }; + 7938A8062E4055800017508A /* VideoPlayView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VideoPlayView.m; sourceTree = ""; }; + 7938A80D2E4055800017508A /* AAPLEAGLLayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AAPLEAGLLayer.h; sourceTree = ""; }; + 7938A80E2E4055800017508A /* AAPLEAGLLayer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AAPLEAGLLayer.m; sourceTree = ""; }; + 7938A80F2E4055800017508A /* g711.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = g711.h; sourceTree = ""; }; + 7938A8102E4055800017508A /* g711.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = g711.m; sourceTree = ""; }; + 7938A8112E4055800017508A /* g726.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = g726.h; sourceTree = ""; }; + 7938A8122E4055800017508A /* g726.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = g726.m; sourceTree = ""; }; + 7938A8132E4055800017508A /* H264DecodeTool.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = H264DecodeTool.h; sourceTree = ""; }; + 7938A8142E4055800017508A /* H264DecodeTool.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = H264DecodeTool.m; sourceTree = ""; }; + 7938A8152E4055800017508A /* PCMStreamPlayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PCMStreamPlayer.h; sourceTree = ""; }; + 7938A8162E4055800017508A /* PCMStreamPlayer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = PCMStreamPlayer.m; sourceTree = ""; }; + 7938A8172E4055800017508A /* SRWebSocket.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SRWebSocket.h; sourceTree = ""; }; + 7938A8182E4055800017508A /* SRWebSocket.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SRWebSocket.m; sourceTree = ""; }; + 7938A81D2E4055800017508A /* YFTimer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFTimer.h; sourceTree = ""; }; + 7938A81E2E4055800017508A /* YFTimer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFTimer.m; sourceTree = ""; }; + 7938A81F2E4055800017508A /* YFTimerManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFTimerManager.h; sourceTree = ""; }; + 7938A8202E4055800017508A /* YFTimerManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFTimerManager.m; sourceTree = ""; }; + 7938A8362E4055D50017508A /* YFProgressHUD.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFProgressHUD.h; sourceTree = ""; }; + 7938A8372E4055D50017508A /* YFProgressHUD.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFProgressHUD.m; sourceTree = ""; }; + 7938A8382E4055D50017508A /* YLGIFImage.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YLGIFImage.h; sourceTree = ""; }; + 7938A8392E4055D50017508A /* YLGIFImage.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YLGIFImage.m; sourceTree = ""; }; + 7938A83A2E4055D50017508A /* YLImageView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YLImageView.h; sourceTree = ""; }; + 7938A83B2E4055D50017508A /* YLImageView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YLImageView.m; sourceTree = ""; }; + 7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringVideoDetailController.swift; sourceTree = ""; }; 794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringConfigView.swift; sourceTree = ""; }; 7940277B2B3E9ECB00EC52D4 /* ConditionalSearchView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConditionalSearchView.swift; sourceTree = ""; }; 7940277D2B43B9B600EC52D4 /* ConditionalSearchTool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConditionalSearchTool.swift; sourceTree = ""; }; @@ -170,7 +210,6 @@ 79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringController.swift; sourceTree = ""; }; 79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringListController.swift; sourceTree = ""; }; 79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitorVideoController.swift; sourceTree = ""; }; - 79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringVideoDetailController.swift; sourceTree = ""; }; 79CECC9D2A8E03C200B95D8B /* MediaPlayer.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MediaPlayer.framework; path = System/Library/Frameworks/MediaPlayer.framework; sourceTree = SDKROOT; }; 79CECC9F2A8E03CF00B95D8B /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 79CECCA12A8E03D900B95D8B /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; }; @@ -196,6 +235,7 @@ 79E434242AA1919400AEB16C /* CommonAlertView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CommonAlertView.swift; sourceTree = ""; }; 79E434272AA1EFA500AEB16C /* SystemCall.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SystemCall.swift; sourceTree = ""; }; 79E434292AA5833F00AEB16C /* CustomPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomPicker.swift; sourceTree = ""; }; + 79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VerticalLoopScrollLabel.swift; sourceTree = ""; }; 79EAD8072A7B86600036E093 /* OrderScheduling.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = OrderScheduling.app; sourceTree = BUILT_PRODUCTS_DIR; }; 79EAD8132A7B86610036E093 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 79EAD8162A7B86610036E093 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; @@ -436,6 +476,77 @@ path = Tool; sourceTree = ""; }; + 7938A80B2E4055800017508A /* view */ = { + isa = PBXGroup; + children = ( + 7938A8052E4055800017508A /* VideoPlayView.h */, + 7938A8062E4055800017508A /* VideoPlayView.m */, + ); + path = view; + sourceTree = ""; + }; + 7938A80C2E4055800017508A /* Video */ = { + isa = PBXGroup; + children = ( + 7938A80B2E4055800017508A /* view */, + ); + path = Video; + sourceTree = ""; + }; + 7938A8192E4055800017508A /* VideoTools */ = { + isa = PBXGroup; + children = ( + 7938A80D2E4055800017508A /* AAPLEAGLLayer.h */, + 7938A80E2E4055800017508A /* AAPLEAGLLayer.m */, + 7938A80F2E4055800017508A /* g711.h */, + 7938A8102E4055800017508A /* g711.m */, + 7938A8112E4055800017508A /* g726.h */, + 7938A8122E4055800017508A /* g726.m */, + 7938A8132E4055800017508A /* H264DecodeTool.h */, + 7938A8142E4055800017508A /* H264DecodeTool.m */, + 7938A8152E4055800017508A /* PCMStreamPlayer.h */, + 7938A8162E4055800017508A /* PCMStreamPlayer.m */, + 7938A8172E4055800017508A /* SRWebSocket.h */, + 7938A8182E4055800017508A /* SRWebSocket.m */, + ); + path = VideoTools; + sourceTree = ""; + }; + 7938A8212E4055800017508A /* YFTimerTool */ = { + isa = PBXGroup; + children = ( + 7938A81D2E4055800017508A /* YFTimer.h */, + 7938A81E2E4055800017508A /* YFTimer.m */, + 7938A81F2E4055800017508A /* YFTimerManager.h */, + 7938A8202E4055800017508A /* YFTimerManager.m */, + ); + path = YFTimerTool; + sourceTree = ""; + }; + 7938A8222E4055800017508A /* Video */ = { + isa = PBXGroup; + children = ( + 7938A80C2E4055800017508A /* Video */, + 7938A8192E4055800017508A /* VideoTools */, + 7938A8212E4055800017508A /* YFTimerTool */, + 7938A83C2E4055D50017508A /* YFProgressHUD */, + ); + path = Video; + sourceTree = ""; + }; + 7938A83C2E4055D50017508A /* YFProgressHUD */ = { + isa = PBXGroup; + children = ( + 7938A8362E4055D50017508A /* YFProgressHUD.h */, + 7938A8372E4055D50017508A /* YFProgressHUD.m */, + 7938A8382E4055D50017508A /* YLGIFImage.h */, + 7938A8392E4055D50017508A /* YLGIFImage.m */, + 7938A83A2E4055D50017508A /* YLImageView.h */, + 7938A83B2E4055D50017508A /* YLImageView.m */, + ); + path = YFProgressHUD; + sourceTree = ""; + }; 7949FF122B51093F00B75A21 /* CustomMap */ = { isa = PBXGroup; children = ( @@ -658,6 +769,7 @@ children = ( 79B966372AB0651C00308A8D /* VehicleLogoutView.swift */, 794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */, + 79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */, ); path = View; sourceTree = ""; @@ -668,7 +780,8 @@ 79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */, 79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */, 79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */, - 79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */, + 7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */, + 7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */, ); path = ViewController; sourceTree = ""; @@ -754,6 +867,7 @@ 791887732A7CD633007EA0C1 /* Rescue */, 7918873F2A7CCCCD007EA0C1 /* Main */, 79DD0DAB2A94A0EE00768FE7 /* Source */, + 7938A8222E4055800017508A /* Video */, 79EAD8132A7B86610036E093 /* Assets.xcassets */, 79EAD8152A7B86610036E093 /* LaunchScreen.storyboard */, 79EAD8182A7B86610036E093 /* Info.plist */, @@ -1012,6 +1126,15 @@ 791887952A80C361007EA0C1 /* WebViewController.swift in Sources */, 79CECC192A89EE6A00B95D8B /* ReviewFailedController.swift in Sources */, 791887C62A84D9DF007EA0C1 /* DispatchOrderController.swift in Sources */, + 7938A8252E4055800017508A /* YFTimer.m in Sources */, + 7938A8292E4055800017508A /* VideoPlayView.m in Sources */, + 7938A82B2E4055800017508A /* YFTimerManager.m in Sources */, + 7938A82C2E4055800017508A /* AAPLEAGLLayer.m in Sources */, + 7938A82E2E4055800017508A /* PCMStreamPlayer.m in Sources */, + 7938A82F2E4055800017508A /* SRWebSocket.m in Sources */, + 7938A8312E4055800017508A /* g711.m in Sources */, + 7938A8332E4055800017508A /* g726.m in Sources */, + 7938A8352E4055800017508A /* H264DecodeTool.m in Sources */, 7918877B2A7CDD1A007EA0C1 /* Initial.swift in Sources */, 7940277A2B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift in Sources */, 791887C42A84BFDB007EA0C1 /* Tool.swift in Sources */, @@ -1020,6 +1143,7 @@ 7918878F2A809E37007EA0C1 /* TimerStrings.swift in Sources */, 7918878B2A7CE9E0007EA0C1 /* main.swift in Sources */, 791887A02A80CA10007EA0C1 /* RequestList.swift in Sources */, + 7938A6502E3B51270017508A /* VehicleMonitorHistoryController.swift in Sources */, 79FB761C2A9EEC3700DB00A4 /* GroupData.swift in Sources */, 79FB76172A9DFC9600DB00A4 /* NotificationSetUpController.swift in Sources */, 79CECC262A8C749B00B95D8B /* VehicleMonitorVideoController.swift in Sources */, @@ -1045,9 +1169,13 @@ 79FB76222A9EEED900DB00A4 /* CommonKeyStrings.swift in Sources */, 79DD0DBB2A971EB300768FE7 /* ZDViewController.swift in Sources */, 791887BF2A839716007EA0C1 /* EntryStrings.swift in Sources */, + 7938A83D2E4055D50017508A /* YFProgressHUD.m in Sources */, + 7938A83E2E4055D50017508A /* YLGIFImage.m in Sources */, + 7938A83F2E4055D50017508A /* YLImageView.m in Sources */, 794FBB192A8F4AF000D57BB8 /* MessageCount.swift in Sources */, 791887892A7CE79E007EA0C1 /* LoginController.swift in Sources */, 791887A42A80CA30007EA0C1 /* ResponseModel.swift in Sources */, + 7938A8452E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift in Sources */, 794FBB0D2A8F040D00D57BB8 /* HistoryController.swift in Sources */, 7918878D2A8081D4007EA0C1 /* ActionStrings.swift in Sources */, 791887972A80C6CD007EA0C1 /* LocalizedStrings.swift in Sources */, @@ -1055,12 +1183,12 @@ 79CECC222A8A2A2900B95D8B /* VehicleMonitoringController.swift in Sources */, 791887452A7CD05B007EA0C1 /* MainTabBarController.swift in Sources */, 791887792A7CD64C007EA0C1 /* RescueController.swift in Sources */, + 79EA0A912E3753D100320195 /* VerticalLoopScrollLabel.swift in Sources */, 79CECC122A89BD1A00B95D8B /* MessageCenterController.swift in Sources */, 794FBB1F2A92F7C300D57BB8 /* WebViewTool.swift in Sources */, 791887822A7CE71D007EA0C1 /* AppKeyStrings.swift in Sources */, 7940277C2B3E9ECB00EC52D4 /* ConditionalSearchView.swift in Sources */, 79CB07CC2AA8465A00154B61 /* UserPermission.swift in Sources */, - 79CECC282A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift in Sources */, 792EE0972AA74E5800A212AB /* PushNotiCommonTool.swift in Sources */, 79FB75F02A98A26C00DB00A4 /* AcceptOrderTool.swift in Sources */, 791887A12A80CA10007EA0C1 /* ApiList.swift in Sources */, diff --git a/OrderScheduling/.DS_Store b/OrderScheduling/.DS_Store index 11fc25cb164d926347ecb548500f04ea91480af6..3c7bad3372f1f487a84e4ab93a26f43dcf8a21c8 100644 GIT binary patch delta 267 zcmZvWzY2nI9K_EdT!MUrXfNNOsVPEDsohlUkNLBo8r0I_0|-5dIQCjKMbH@y25tAf z&vD-?timcy#jdPdPqD9KZBUQ2NB7df5*zpkAd+k~%hOB(n8LvcDKh$d=*$R2$f*Om zIwo+*T&e}{0P^FW+0(Elvm+Nspc})|3Fh9N#AL4dB-!p^P33P{8gW33@PFWk1HW^P a@7$Kl@vZCV$T$*lNLQ&2Po-A-FZB(eGCcDD delta 32 ocmZn(XbG6$&*;A~U^hRb|71G>rOkQ*UwJl`K4IF-uJD%~0Kqy7(f|Me diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/Contents.json new file mode 100644 index 0000000..01eadcb --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_alarm.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_alarm@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_alarm@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm.png new file mode 100644 index 0000000000000000000000000000000000000000..51e78fd6a1a25474bfd0d7b2210c14ee78d49b3b GIT binary patch literal 2159 zcmZ9Oc{tQv8^@=pOrdO1gt29pEo7Z($Pi;I*^;S{U1Y5k8rze|lC`p=M`THpt$A#3 zwjOUpH7XR!P7G%I{eI`2xA(f9KiHq0KppX z#GQ4xLzj<-Yp1C-N+{G$w!>)`ORoEWz%a~J`1tt#(ZO#J1o@xBX0rdr( zoen|J#>NI$Us_t4pPy&3ShKUUTU%T8_4SpNmASdOGcz+^exs$O#ZBRcxSJ^y%HZJO z)YMc}RTX#r>o?pAW@cvY?(RsS%I0YqZfCl!4BbW=zM{Ngbp-M|%5HxKT~Y$PyzufI z_?5M3C$oWu$~ox&m=%|LLdlkf5g6^xQ>-VW@wxjrC{Yc9AUWSBn!_> zz|&u#v3_=4Ay`cTH23XL8#>8~<)y%MmtVJ(N9L#CiBHf_2lSQ-*6wcK@?+fe0yK~9 z=(EfWEGJ%b!(LjNS<|>#D1fK)Jm&;JACm3^u3LLku+QYYOIl0zQ)G4UzB-DvR zZdyR$C*U~!ZEsy>)Uho$O-6t^$47}yRNOv`fxeEyv>|A?2O0T``Lq!H%4a>j57lOI zD(--9QOxW}Fc}Wzhk)@{%WoZbF z41U~Z-j4x`+@?{i{AdIcC78D zzcn+$3S?R$)hD4|9PBDv5+g2zqENi+1U%Nox^Lp+Wfx~3i9KP_7uzL8 z{KQpAj8$FDMx&o(p5ufkSO=^EKFDQx^&cXxYqc{nF}-`tStU(P#11;>co(}ikq%8c zmuEV&RHw@=-<%mV{-Clh{y0QGzTaT`!y>X{Ol{v|16K2r)EFhuVJ>Px$<$&zuA^pF zc8^!@EJ{X;>{#w2;alDzI4jsu>)NU(F{E2jajtwe1#J_`^F&QtQ#B2vAQxjM7m?li z+QUNfBG$9o!BLp~(?4l0dhJ!PiCke$?x1P3hVK+f*)6qNeRpVH%JG(K_vY81`Z8lC z3d&sSI^OI$asoa5s4Y)f;Kmtg)nHZ0rjVj{4>dd{uOG^g@H=>NVW;r{Ba42M{dwsh zt6r_%epVVh8O7`$@yP35&e*jlTOd3~QbupXVMMPqE_k_Q-S&a6pkyQWQZXKM)nZ&V z_Lq-tLaXfCO%zQ`je1Wb*hwD8W4u3cC^4^RF6uNlM=Glg9{pF>R!cAS%^6 zqn9sCq>{&=rt8_yJHrQco}z8ds`octs<)6x_%j`i5m?^(vJ2PYJ77#}9TC z{>bCKEip%MOdTL<^1uDN}QWdo3pK+E=4FwOT$}tBBA5KCvD`u(O$`# z9pSAe3!=C{oVfb%<&hfF5Il6n)Xwx%Sg5kd*f%tGd?}jcu9@)md~b{$-pOe#=(BMx zKzSvMJ|4{v;BU(p?efyWIqt}B7|I$LScGvR-zVP1AEyR=FIOMxId;yhRFQx23>N$A zlasq-gQxau-G$N%@jfed8bphJPs>yex zdwDn`aw?h0Y-ljM!S>w3_P&)Fq&=ppvi0+tj$vP3=<^Z=JO@$pGyG}UD%#pA*=b}$Q;S35hb)B?8&;GkIsKlh zG$hqEESs=giK|K6-9xaEi>O~cS7*p^EsHVgTU^yaTTV+H-7C;d$W>75z00qdWhk>= zBg_9lby?XakzBqk>LfKIcQXkuf&JE!tMJ6o{y;%0PXejG4C`%)3_&PknZf%TAv@jv P`xziu+T-hR7ZU#iszrQO literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..995b7f789e65b3c105f8a271aa9819bbbc845d95 GIT binary patch literal 3865 zcmX|Ec{J4D{~lSI#*#g28M{!HC{lLD62?9u`&gswge*gYk)bGQL3}T!_4W0=y**YAE1YGouCC6@%ge~fsIRXd8yhPsDq?w$j*fo* z{K>LROiU~p6wad7Q5D`89_>WHfMHWrY99TO?~*N=xKDZp$9FoOhsIxxvw04W>TN&*(5 zsD-zW-aG;R-evr;rG0t@kTQTtPdagoKKbqFB?eedJbVz(h}8#ZN6eLZ=G-K6W)vVI z83Td9lsjXvm70H(5vESQA_?s8Fc*pR;U4N!6fhqF%-jU#!l*ex^yh}mD0TX1FP+dp zZ!7~g(t+g|V9X65moOWOfwg4D1J;oc2R;`lu4nfgmB^-0T4Mzz&I=fIq{YGL&n)Pf z`uiT5J9g)p(*s8}4}i`518fRF3_VOi93(hXBH^@Wwlpm4Al!nvwMu_ebXZ;hEZ+s@ z5`i2?Mut65VzKLM&bVU+IJzyf!WM+!0b8FjEAG8B9VLFdEEk3SyLBKl*xgpLS)}O&ida zWK8rQJkO_x8&O+y$d)R=DPG1vD!s;gl;fXS)|wKooYw3CCx+qj*<^2 zTtfok>|b*0`zb-r!Hop?bzz2h&I$bL+iD#Joom;IcI*MQYwB6$No!A|SKfSLHr0si z4JV$6_%-{Lq!)BB0|z$lGK6%dtC{=MWJU0QZ%P9TUn^h_> z??5ewh*%EN+4%%U&>i(r7j<+sOsoe<0^HpTOQ*wH6QG)MO3R#jrs2JB!+P&*_xfmw zpT5!?_G)1E4@T%hB#I|r_7(7Wkf{DouyS{r7XInbCLoj%d`V6tnV>XKGcnut-p@Qd zywR_|TsXJ3VPnbp!IzY5+;D$(UyWA5YtLp;Y_JKJUY@r!)+y_U^~v+u@+di7#V)oC zW0g98df;CIHpJ{6sW5nIFXoP zswpmIZQL33*EzZ{tcunc^q^|^JyZ<-aKR`N6Hbi^K4qSxj{DGENswFWK^hO-hnFZj zv`Y8~pZ@n;mm0zZW?(^$`KN8rOmYI@U~DNWBH@vh+4pZVhOK+!Mj@2lzlMj_ZB9j8 zpS{MbeJWZhPDIfoD_V2q&a9@E{1}cqM{3@ABCdo9>T3SFY~nA;99VzMA!2WF3;eD1 zk#>M^mV~I=s1v)St0%d+X3d8>zTe&LmUT?!5(DewY``PW6Ms-L;=2uB4VrZ-srs?J zxK3%=j9L}J{asMJH|PFK;_94OJWfk7n^ zRW_04^U(*5S^_*KuzJ_2&~o3Vsu4${sHo22z#z#79z@>@HQv^3AJ5(+mr7aj`d_rc ze8=5*gRnDuQCN6eyYnAAeW(6dn}0XX+G#7k4bdt4(5_BBa?ub?zMoU?yNMj?el=N~ z)0%E=o|Kf7Wy}rc?8C3NY?>1>eP!S9rSYHDRj3`IQ=Z{s+Tb?ufpW2DLxRG$P>-LD zrtFZ(I>J?#-KIsIGQR4kjsK)d(QSK_?($psitmyGldSkrL=6wFf`EgX%@Fico91xj z(AAUPQ@d8a_5t6ueEFx}{cBLU;ZPGg`ca+C$c6?ey5Pr+9%chHz2`^A1RJ})a^(?Q7#PL7S`EqF&$*f;Iw z6u5@kxM%=19K_i0DYpp=akI+NS*jC z%3-KIn(G7Z2~Wb+B)iIvQ)Hg?NuvM>&y;$J7b;ruXbCKz)}ct;y|K>QqbPLnv9>zh z1(<=1#8$k{aCQWq`r z>54*Pn)d@)L$kxe!+9>lxPr|^tZHp&9v{!uoQ!+}g7p4UHbIW<7Ujutc`qCsrz1_G zpwrl_44_<>&KaellNodC=JBwG?xM1nJ+}2%(iS@JG1wao_ zqY$^oeR^MNpL{@Lya09$h^d(dS7QE*b!Og+{N19H_&(ZD;Lb^D{rJEAnfKlrra0tI zoa3#j>@0hZ@$mNg^@(N_w8H4?>!UrJED-4Af=2#rv+PdZc99_1OvQLKCoTPIUieht zMQSbJdX7@V-d=)2Uwj8wo}atUJNMv+Fm;6H50W@XenS&T^n zZW5c+7F^0x9VCOh7+xM0u=456RkfubTLt>_i+QeBWV495S?AmHliKal?*49(tFtgc zFWNU6g(~v=EnOkDHeL3#_Jt->@xgZtL2lh_qzvV2mj}nQcBs5mfG%UO$ej zcwHg+J_O-yeZuVXpPZ(aIdFTpvB1|cUfn);+AfiBEB$LFq}h~&&$e_#yGy;Y|CkU2 zhjBX)P(1j?MFakxOsig$NkvmKK~%r&-Lryfh`p#%{D3bUEdimZ{tZ=A3*3Wo$y!=~ zt@uxc5i*-^!>axIvQea&{Wh@<&e-?2pmyhD^H||(jm|qSGjWLGN~B3In0esk?|5d> zvb8V!&*9XyBt!3=EXQ~JsWGn_)r70cyvVMD%(AeH?Q{;?E{X&6_$$TwJ-d{^H< z++hE*JV@<^fA#1yfjaMr;t`EU7cjzj6j6ptI+F9moz!cO->MCUFCgn0*MAG5tW^-! z+`-m81iW&9G+zvwqqeFKb_!|s#`_^r6Xbr~Bn#~4YJEjV?dqzdwJNxXcl5+;OR>Ni z`V$%NcI?mHDVw2Mr>w`tekwI=wu{GAdl%VNdie$KXWmqXVEeg?&UAQIUVvsmmKNY0 zV-MYJx#8Te12}y~KEE2?3&{+J%JWHkZ6rnMg1P5UNRW;A4f$SfQja|rQ3wrLLtl6u zF0N{57LscgeFeo4Wv~ms>xva>kP9(KMd#!P29Bs@JsxTs>6Di2D#oZUxHPpX*)%xx zA;-PUO?8PziC;fQIoH;dK`p+Nj2%qpVq zTbG+KrL%g;w^UaWF&jRnktJ<}cm4*adU={I&Ie!x{smRKB{=Z$9;UCPe|A(Ikb+yU zLCO=dz25X7k-Y*|vTt>7O&m8cLcnWz%yLzA^3nyHfBhwdc4^8}n4ffg&snCu|7f9| zs}hKNl%a9n%b8Cfyd`TrlX{BHqbWR5g+tBD%XQ4*%SBV6`nkPS zv(`B?)45hNkRB95!Ef%kn5|lnn_-&!U05f6QS>{0XrQwH{ci9V3O1k$!-nIJK5wc*K+9osL9*ZqjGne5>sp1~ zq;I5x4B-lNqQ)w?N)`m!g$m2@8Ow_!P-%jXAD@xC4?CN@>dgdOTAl6r0jwRj*=4Sc Z+l#NxRXPbd!v6cCwKBIedt>60@gKxF!utRK literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/vehicleMonitoring_alarm@3x.png new file mode 100644 index 0000000000000000000000000000000000000000..9c72025c218aa5905688603fc542cb7ae17111cc GIT binary patch literal 6311 zcmV;Y7+B|tP)Q^G6-|S|jp79Qarx|Jq~qP$2R}9sm3K`(P#fWG4UGVEetR z{oYLe^6vfG(Esp{|JYUk_4549!T;}!|KDi;+-3gn>;LeQ|JhsqX(<2hhX34X|M8Xo z?tcE~;Q!%q|KV%@*-`)RiU00`|LuD4J{$k!bN~4D{omLB=Xd_<VW^~d-;N1|I=0delPvi$oZs> z|J`iyJRAP#;{VxQ|JPmr)mr`J-ut+u|MQ&tX)gYCE%sO^{o&gF-O~T$e*e@@_h2ag z*iHSFNc^6B|KNB3HEUwwwB{od4c_^wf;ae$Z~n(p{=`i4TtND6 zKKpVt@j@d177hOl2>jR1`^2^OZAtvyM)zq!{d7F}WHS1*qWG15_>p!0+;RAYW&Mj> z{<>TKgH7^JF!fq4@=7WG!KeAAl=+^8{grC|%xV3RWcqth`RYOQS2^@nEC1+`|J8E! zd1C&GL;Gqr{icNdnsM}EMECST{d_+Dkvsq9+yCQ>_=#)&xoZE;V)b=f{e@NZZdCrF zO!Q_;`gKSA$wL28Ed8vE|J7&pe`oxuSN*a__;5x0&_wn8JO5oX|1%r^$#DIiI`?Ea z|4AkO(}VxXdG?KY`{hddur~ibApay3{_L3ju4Dbgc>hQuxlKfe0000wbW%=J00jsP z5EC00A0Z{|`1=nm@YLX)6Who%l85=IS|+fBcJt@Du#Rj?K}8{;SVc1RWIo~KQX+W2 zS9jgJr9c1x6&y)KK~#9!?Ad2bQ&AKLa1s0j5~Pg8&{78pd@|8EVx?LUR9v_R8pJ_C zQ4EUKMgxgL)DUM|wL(>>HfR)4V~7SVjzmxc;+EhR5f>WwKKGt?dBsQj9^&}I`DFa! zFXw;mx%bs}ZPiw7)mClQR?f?EEfZ zLZ%Fp%l-WR0}V~yfkDBF0ADYapuBto6v07(-v5+_rm)}uU!RUDr`6HNHy|X;`(L?e zbaJIXip0uaDfj#9EyG3yDiuB|$>pO^hW=F;s^l`9Ch6tXOD6w&Fr4y`rl#>Trmo&* zeR8e*UVgDoSDb&Z{QMK^lGRgZjBhHF;r#y{RhX=+%Apo-w>~K^EmRAtu(bTDb^BtK zLv@vfVaNXb;S1@d;wx(1&I`rHh=>S|GIy@J$)9vLORuhra)gD@;s4Y5g?8deiN@OHqMYv$0toZ!7)hbp!WP#}V zL#*~ezKwfo#-2wxqawvw45lmHGox|G`UbT}*dJM+!ChEzY%AX}>n^JGJH{LI+lz+&A(7mHC>RiJYT)16}@=7|H=sB zD-cOC-^$%dU4qF5`~4lN?8pVUclkh$#kXsy(ILi)iY6?Rso5aeuILCZsNY=-sEXQr zauBF^7IkIRe78h2p2mt;2bS)^oxEGkHthVoD0Jaf^jk}m&DE^Cg9Z%1k+@ijRTbd+#+GW;>(>_cFF(lE#&5jk zbnw?j{pOw^s!(o=uD?D6R0G2Y;Bc`Rv0V@&Wr2$ya$Q}vHs+c0ZBfc?)?T5+)v{js zDX>?w9u65oR5V0%v8gv!8w_8ouw9OKV?lPd_KvG%Q929zrSNNsi>TTMcgFkXE~tk@%0c;kk7^`WF@4gxgiLv^G^h} z-a$hMEBseheM*YC%$^ZFCel)!2rS4jfzQx>M+>Y;`^h?e^Z4V98!umeYW~_*(F*ZK zmUx8tJ-F>OiIAyy*I08(N>WNnWlnU4Id(RR0?Yon4pcFd=Bz2`Z_536(J4z^ zcpIlaRJ>g{@Az)NJQ&wIsvKPQBvQMKMq6BLY$6n72r6KK>PziR!rCxr&VEzD6w10- z;ADH@`(B3a5>dI!yV6s*;w&_4Any03tgV*%ypoa!#U}}8_!y&ejK(pR%!GvtDJzaGt-LQ94UAR{ zvep#n)~+?VTK224UG6kSk!ImaSaDX-|T*zWe)sjS5aSp7q&l-*93b5iiR_+@SRzAL=yVG{LY!-Lz z>>WIK2y0gJtP(hlEW`xDs%+3eJ(vk0GS&;7hzpm5r>p65qs{6=GhBTgCyS|su8bto zEo8+KR^?}+DzXEM>(=z#+*@M1einU6^ug(J-Yk4(MIAp2&2qv*SN$T=Ef_8?#L8L? zsn2)#*_ys!3Zy9VO-m1>qBpk7>14@0@!A~cOAKS> zN@3}&_{$PYQJA53cl!T0Cm=&)4G`^?{@r`1*RN|Ehu#ts-y)X!$#{BQ_hu;yGVC2d zN9*ori7cG0*|Bj6nGKC9hrK0@RZhP(0(!F)g)BPpv#wJXL@14w2&~s%I8~LFu_Sk^ zl>Xx7D`zahXyFTR=~=>pW=Uhkl5Tw~LROU)x4iU`XP+8gfz1pTZI(xS-!c{X7JV-VIrJPp=q(`$3$A4=(cIQ;aI@ zZeU3ck+IImFX#0+t)HI^Y}dfMiw@$AWvYHY?2uYpy6OUqz~C2V}YZ zI`JLw7w^-OSlMZ5S(%yX>F}wjPpExWi7GN#Wa2k&B=4GB!cvQ}it(!qSnkcT@O%1@ zB$jq}YFZY7>EZWDP^G3)6jN>DS#!l$I{da4V@X;NF}uN)mSqN4x_K9<=3}cML896O zsvWsgj;)1X##_l^Axc`OG0DlhQ&ZE-=Ja&4*$h9o;Z#u+sD3tcelcxTaU9o}m4A|M z4_hdGax9PlLzukiacZ1i8Fk{;Inu^9u9-1GUuqk)fsz1CSc+hTV2jSMs4N5pNdU2c zOktM!2NJ^6Xh?K%i!n|g+=KDW-#Nc?dhYe?a=S3hzRRHWHt@;!_q)II=lav*F9R2;-XJ0vW>cq(x7#E|W`ymnvsSxY?tLT?yIW@&9AyjKFi)r8)_=~4cKYQxL z)4=uVSw=;(iXus?s2J9tKOmOX;A4vA#G%>(>)@LoGJWvDyPz+r-lh6L=tHinSFip- z!T!OX1dE~CX^O=sHbK|sON^t3b*fK95{X1T2)-S8{0tMexlof{1wHT($Q+ALY#gRo zm4s>#u#^RvmDCFy*pnbH>zQN4aT;ifRmCUGE_L~8E#sBcK6QjF_~^?^fAn0GG0+m~?pT5gJ!Q-q8C3D;)ZZgaDrj+XRpdx4HRrL;Yb!yMq>rRqTVue}+S&qGZQ|`|sCJ0enIsTBp?-QnDNOl+|nhLGUHXO9p!ujJSZ!wfonBMxaJWn0$Oc92Us)o_=sO~PsUNr@O#`z zH}Q{u>(jm%P_S4C#Rhn}n(-ikmE#~(O^Yr*Tw7tq!{KnUF9fEv)8kQ%_`8^9uJ;oy zHT?CjM4S9k^PR_H&$C`)eS*Kxk%L%Br6yMuD-pgmv`gWNoST{o5R#ml>Oib0L1|HT zygQ^3cPQl6JS~SCNvNTLr59wD?29u{9lFR${LcM`)KZhHyac;NCW2Ta7?)Yf1J-gc zsHx98+lg+@_mZ~J(b*7SwUJmuLu!LzwSD`E(xpnV2Jli;NtKdXh$W+lvxu=^P-mAp z)@mnHdqmh7CT;Wc{eYDSknr{4F!twaz9#?Ayf4lWLsmeF9kv~&SQy3q*lm#RLA!KW zuVVSzby`KNNgtCZ93xu)cr%=qP2UbRL3O3+g;+9#hxlbq8AHTUlgsAh7lkpq6fVT- z99;q|cR(sOhjpu^gJ~fe^Ms~`hdutG^&^ju54|6Y4v02Fi6t%y+mzH&qF94#>sDG) zAH`-C1*^9yFaub6Y0JhT`})iRB>w1?6ix_JSFZ5>dQ2}g3#ZG)HR3!}v7}uUIGFrG zxpX~csJhh|FQb^IZqf73L*?dJZCt`IAoHjLvY7}ckdxi_S z^o>$8>8MYmM`IK#o(>Uj$Mp1cmxqKF=>}X_2qV;bJy%Oi3%PX#0>0LYJ!c}dcWG+`bWJwtPYKYy6Vz|1lk4Gd|yUQ8F|Nb6MNJEZ+gvc@%B- zU6}Q2Ju!n6T8Ztg?Khj z1li(@aiL#EEEv1pbZH5BFSOt{D9SXoF0i=xSy<>anq^@ZPU?xfyC>*kTr!SEtOvVj zEGAba)=Z00Uo)k6>pzkmKmMzm4?9M)EbOwC-c7^|hjL*Y5iF|Dt(HD!>r`h3%X8zR zbp-S{l7Vo^O76bjW?_~rsf;VP2%CqKmm$3jacoaYtj@HDj*J?7Zh1sv`TGrOi~&{y za-n|a&BH~xI@3&2p<_{@86ZLSF6rdT?=fUBCf@%x!ySXU4F*#vn6QXjCQQ|`8wIzWlcd$q}t z3|4R0prjA5co#H$xfbR9%%wo3m}S9b1FoIO6&73&$N8zL#^ug~AT}m$V7-DeySyNC zYknl7&g-3>*WwHd)ByjlC6Q_;P+5i$p$uWP~(7Gkacpu<87edtwTksT9D znq_SlrkCU*f$j+r$R}@t3-6TRSVs>ZO>_r{ppuEDV5u^OF*m;mMn@cn7Qrygd<4U# znkgF|p#Ol3qQRH;MmLWjrBXwfJ(t~)xwUPL8+MF3hGYk&6eJXU1h^PkWMNX=+U8KH ztg$e?gkMl6P=WLX%B6ytiO zkMI?L`;m+yEn&j7w>0m+RG^#Zs@<2l$+Y#C9;g)n205lD6C8pv!}KJ43O+gA>LaM) zl3EL@thwwW@nMD4x&mM{$CzapP3f9&36Ut!u}6r?b6cR2^ zdJ*RM0LI7=4TK~p_z{tgQCC;GJ%p>U;Qstvg-U{1S1%)1UYS$g?j1X~wg`?P7^0z& z282jdi)*vG+r6D-c}6ORmAzChd$sQ7cuzPz*3&~RLo}ol67M|q)Jht6Y*4$a?OUKy zZy^5V>5I+o{MFg{rqrtGeyqbLl#pI?`T(GYuFHdikbp)-? zN6Cd?`DKohg6yGomN;tj5lT{7dSx9)45R_fUgp@XWd*g{QD!$lVF0&81u`0z0Ryzk zid_ZwZ1#AadkS_{ROSN{hmuw)5!@1&1Y>0R`DLX=`}Q2T(Zh%i?AcdTTE>|qlp%n3 zZUKsO3B!zFKxVJ1DXVrkOZMzKKp)+7VAq}!r=z;8rpgXXMkry`Eqr{2Gz`Iz4A2;y zB78p=nGi}^#i?#Jm_nvVVcHHT$%SB&4AHnajgZaAgiwNNJHR9vLz6-lp&TQVghFi( z7?L5HWRpaIgiI>btpuaaOE5hd!bu^JWD-zFwH*><5)I*`B#=bfW+rqD!H`U%sk#f9 zB-HjluS}9j8E_P)ZH7|OB%J~1|3;Ev5>1svGC%olJ(MI<@NS1EE#7h4J^K+|W4euq dEVN~?e*rJkw<}p_0b~FG002ovPDHLkV1jg;m^1(Y literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/Contents.json new file mode 100644 index 0000000..5da3b52 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_alarm_level1.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_alarm_level1@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_alarm_level1@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1.png new file mode 100644 index 0000000000000000000000000000000000000000..52348b125dd6123b3e909b87582d7d4794230cc6 GIT binary patch literal 1994 zcmZ8ic{CJy8=kA7p{Xkxq3`sCxU$^rs!1-!Rz{3{$k_LN3sJ5$g!sy}7NSTTh zpD0C$ijFOMUM64*&rEX3$@N5dNQGv)SAxFE5Y9VsXjM%}ox6!(=kK zB{mjhF_+KCriCj0Xl8d#qHLjQ|TU}k{>OX(}%yrw@*=cHO>h0|< zFE1}GE#;Ey>+3l=Ik~yH+=jtmXlQ82%F0ryRG?}FZ34LKa8xXywLeGMoPE-iee&Xt z;(1`m_+N@AUqANUgtQ519Klh_5?95K-URqU*c%l0~abd zZ3kMd;oK6OoPiIYqO~j#mW)=DfO`y@_5&m*plJ)2R^Zz~*wqK>TR~1aTFV5%iHMN^ zeBxkQB$^FJ6Yl5_8X!_xL`#;k37npX9X+7E8$P9@^*j)pgO)SULL6F%MxP^KSSZX6 zhV?-(FbI9}hnD`(+ZWpTpkXTTrm~4P@Y^O_+knFpaA1^EQ4dm!KzuInPiBWE?Sv$N zOOa?M2zmvfW^a(}4a+=OWLNZ>1|n!6&|$aQ4h>lWnkDlL1rCv!>SPo~LceMu8wF%2 zhX{D^ybCsW!GdZylnvWcU|k}75X*6nf_;(bYXoeL0Mu}JB?t}&K*s>s=?CjwfrSgm zrvV#Bc7X%9?tt#u?_RU#G}-PxvjI1(fQpkEB3|>WEsOUT zc-l_UuDw{$3MuKircu<-*`C|@p{*rMNW))ld8lFpvqR(TbOb94YA6RMoX$v)sK?x- z&7IgN=$n+xdc}vS*Av&-xFZyF_z;m2eyWy~Fq3>j|y3{6gM7n^`|rRM*!r%3EEI`i&ChsY!6V!>jx7i2SUgbFQ3Sg>0e7zi83k zcJH2yNoxk4yi`?A=54$cDrtQAagDyNv*JFhR;6oS=`HpbZoS2Bln{T?ZTydDUys~v zo_xlIHN||QrmxPY4X}N=FJejiCdCz262yI1hXqe>yM|GFWwDjtS}?DaB!ZnNgQRcAijUE@AK*gnO32D& zrcU9rerQb=N!znzmy~pOi~S=u>Yc3tHa65uW~*;cp(+4l5yOxLBmq@zzcOhi)wn$EWO+4226@9QNJtG*HJ5jyO5v!xAkWsF7FlWSCtq6=>o2qR&ak@5-j> zcrb82^Rn~)dBuf+)hCn+%<%!Z=407ZUd7JC&v=A?@5{^{vXp$a@sS*qCUNy$pePjqxE)70d9)85-y zFJv}2oY*erojN8aMi*Bo-ShTbt-j|kQfx5-o-J3Z$78VXsd(9MHIfJXd3gdjSKrWk?n|pO`mdCJ zqt1eiV0w9HG;?vHi1o1ld1%&TXSrlOrkEgAmLNzceef7E(ii2KIyk^5QSRn~7fw|& zQP3ON?741mur%`Qub8$HhIfCn?hWxRhW1X#wy>0Wv$f0FTl0T@TVs7wQmw92{Qm&_ Ci7hw) literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level1.imageset/vehicleMonitoring_alarm_level1@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..74761e9f03c9b0f1134c3974289d05236270fd5e GIT binary patch literal 3698 zcmZ9Pc{J2t_{WWi?8*L34NM94GpV~HqwMTJ5dLg z@eI{69dvlo(9r%f)<@r`F8`lkFc>W@E!6@5@b~XuCnqNm1gSn%ySux`$H#Bpyvfhc zKRP<1Mov#pPfkwAWb)3=&i3|pbaZq=LV~fe@yyIjV`C!~pzfs#f}q30Ln=)9KT&xS zi9~g&3D?%vsHvu>rw0ZG4h{~crlxv&dL}0)aX8%k{QUU%cw1YWqM{--zP`Rrg`1n3 zsZ~))Q&Uqw!3?zUq9#9;LxJX=5FOo#i~&W~2t+>u4PB3~>mEz%fhHb+k_Bk&22DQ% z?mGbLc7Tc%1OU*C6QKG4(8C@{YC_vdpqU4t;{@D$_)h`}&ALESw!mE*K+y~&6a&f@ z0MZfKjt8_Ip(!h9(hQtmhR7k%lrglE0+FMkjYw$0?bOr_xNQZF%|QQ-pshgYPXK_h zgZBS|y+h!ea%h(TK2L#2A<&X1pouw_(uR&tz~Myk zrw9G^g?>K)36{_>8u+CTF0BH^H9$fR7?lC|#{)P#hz*A3vB#RW$Es+Ev<3blfzu1% zTY z9sI#C-(wROAOi!%n}WmvFzW;05pwJj2(9{or8qFv2}pOKM5dm2g+jwYCwAUIxGUJ= z47fW}noWUUdQhg;$(rWTE&}pZhsZn7elE0^1udt8`2k?UW6(d{(~oEfuW?9GgWOdi89r#^6*!#=5`s^io>44u zpsyG7(*v5I*6$AWIzc`5lq?%C$QW|C3F(VNysXfEKG^vZ9EkS8(G-TIP$pL=4Y_1)V* zw3^)YEAYd=JKIBj#Hvr_W$*I{A%0K1Tr8E9q$RILxni9i9Yo8;m2kQV#(y@FMhpj@xp*%_y6`@4cU3*Zl*`I;Kk6-ARPiU}*% z4>+II-~QTIUw`Xs5Mxv1JdsuZf^v=0id3f3qvRPh`?yztz8@tt{M_Ef?ptL~@-+IP z^Wh=;mKEby1&w;utW3(rcrtSO4OH?zr?0r@*~1ll5x=Ku`U}(oMl8BJJ9R`x8d9}C zXAklo2Gbk;t@axhbkIo#Oi{QC6q8L1Duu+xH$okM3GZm&4IyoQG` zVQjt(p(ajrWn+M4n&Tuhrb#ruaD+!XGo0~vzDmQ@F0CGxq+gU8%MG7&saPwn=3t?# z1(H3H#bqv4r^o#d6Jn*tbNd_i4H$Xp#}z_w#j+HoO3Moj6|!CzEZ+~P=w*}(X$xpB z3XBhdFS4#WD=;gzCGx*tG&~$lsVw>~t0T|mTnpnsU#eYDDF5i`{>6B&=Z??i%=R6f zr1pr)WHI$W-5nxYC4!zuP90)3B3prM>oLzg`LPQUsrVl(&n~TwBO~$ZoSVEiHs4oN z2qhtp3vnVIO1&7IWo?I-lSSa3K`Rr@%p(f`7SjC;KTMLFk@2*7+gNu$+(>TjImZIRW9lvRbgBeVuEY zvX0BmdwZVFvOdd_<+b5HZR-@e+i1RTGUPdz91Si`(br8WMgoMuG~Ks@eSK@Kb|`}C zZ11<=E>tp2!dFGr2!;Bn6fw7fmm9t&YJ8-!O_sIv^wM{~m|!f93ml6LL3I;wl$VmZ zZv-Y7ZU5<2Dm+Z2{f~UBYsGA7gY&VuSF3^*lIzw|?5dfDveHKjaa|)LT%4TlO%f^1 zJMC=pCNH~F^`r`pWjbw4k^QoDBK;3JBjlQ9zF!TaNwKk5pI7NS-CV=iIf;RgDO>uk zCGMLnUrRko7ls9R7@5X?9k-Q4OR=I;apuU3%jx&H+-QYmpg8*rPAh3~@pnCoIpZjk zFBzCv&$VA3N!3P5ab-Tg9~i;k>&jgxnw6YuJ-z9(%W#{K!UyKZx^sz#Ew)8hEk!PlrqGzM`!W|2%1zb362d;`|4et+ zlz5LCT%Jpf*g+~+=*#3xImgJN)id*-uD-b{QC`s#NWR7-| zzkDG-pli^+FM_X}|kQu83r0_kKi4&_i+(N70P}?nH)?3;PDzD`Up)I!Q!tJJmc<`9P^ltuc|x z>c2~0y1vW%Vt`PI>$YaW!<<-Dn`hapQFd&ao)YlmixE1Tf~k%)4pJOVigsawo65a1 zveguL+F|mmXW5@Ma~?DBPHK(Agw6UqI&F*6dwg%1LJxA-lS)4(z zRM`51hFnDN3LZw^@4fVSwBfA&tf#?L;G{C)^on<;~v(JL5hhreT?6`x?V~q zv9DFH2G~!`_$RH>`A=ko2qNyP{@DMvtuk{Oyhy%ce(6q=+AWC(on0NTu3esQI{bWT zQd&^>EUupa)0S)^3c9%0`?1$ex4Ovxl#%&K61rEs| z8#}S1LQ9sr{DOZi_GQLid|jNEm#4 zWBB~*yu76DW$I(N9Ix|%1daH1bemW z?Ym(er#H*bdrb;xHU`!(Gj?I_{mWN0C4y2;JD0+iN6QhB(S-w6|Kb&19D~30ahG}4 zi+(e<-=KJ1ioapqXz!}ZiI0)R_;|ORdXHq6I9^h)g&~zH|NIR^Setn9m@z*6?l$x0 zE5dbe_EY4-Ki-}*BE<^Y2MI&C?X&HxYO7;R9zsreU+??;(5fexrqO%>u^&5Ht=6;X zut8DkI&uGOz409|blwueHzsjnBKb{8Awk>TLD=ixZ-i9b%iNp(;VBN`@1uSW3$W}= zS91${i;1dM44j%^#QL+cIlBjoJaVnzvZ{}tm^0V@`66<2gPW!tnJ9uQnY6KqGa_@F zI*Gp*b<;ab)6Wv5&=WI)zsbyB$dO^NMeWN(Jz!@o!!V=AkwAwD9;? zN!Y`Hn3gp8GsrV|wsNtE2{)mKHyS$1l@~o1&pWq?*tdzN3s+YV&(PK>=}tw_nDxmb z`|2HH2@R>N5rqzQ%|=8F{S0yyMGB4gA!-U zKs$AdF&KotYgS3&`XLSQH}bQym*5w@*O-4w8W{epB3SnR`L8$^ceGR$Asn>wNL~R= zl+e}CDSL&nX3>%$j84!S>q??)^mQHXk8&0P-uts|7-cMzs3JGFUu6N4oJVGJuPA#G zK`M&R(FK}Y{%FyCCt}&XI$KFkSfvxiY`B*8l7B@^{OR+kPy>AH2fxk#5;D`PKi%MZ zq40T~cPt01m7(CWcDJjJr%l`|IZ7a=*2J8?_t9~Hk|%8;V?2b-FE78YLYGo1pX*RG z6e3>wsuNzNy818W9*0iDP-laWgH-0Zh8s|3MTER>c$r%{-VFX zLVXd>Vq|D2D5wq%^P;{^mKJ~)!EtkB zH+R5A7x3>U(DDPEbtkL9fr=);%#}Q=OO8$lmx4$$hQQnsFggYJCjeGhaOE#x?geyN zf(IYLy>!4WoaFk3j0pmNV@~z$!Ers1m4TdFNe)XP2PK_329T{@o+~22r9VJX4G{m4 z{NfE+8g?wD4NT9G%Id%$&fqg)a&-%kRd#0W1-Q8b7%UlXL)Jr|D476VKY{i>psaz6 z&j)*afk>}2Ez5tWDgbeY^q~m+6G67~JJYs4ePnvF{RHS61&IORm?!Yg0i2Bl0=&si z7_iZbT!JQb>jUp{z_p|^m@_!$NDgxV2kgjbyAyQ_Am4yIr$_pw1)^m@MP87H85kTR z6%fFMWS}Y>sB|YkF$Gg~!Emj!Weu`65_FaWA)LU>0x%H?WV@asozIu_ft3&7U=n}} z0Q{Uuv-+Sb6ljeHzJ~%ip};p^@U?0$3W2$}CH(~JrkSjm)#|VXw@~@)~MC)Qd&l*bNR-LA=lM-iYPTMU8XWmx^~SdvOAug z2gj#>{rEAB2bAXlh0*nET4P1(30*BmonBuL&;d!avlbdb3(7bA*Car%)`K8y+TeQW z|FvjB&C#xVN<;7YVRe;)LS5ZoqSiLV>tMICW^4az?_kewoo#EUau?H8{8dj;-+N8s zVDlzV84sjPEHxBkgZ*p0@=pR2Ul!F*t6_`bKd8hslHv$A1yX*A3Z~VdR!@#)?36QY zG6XpFGpa}DbQue0YRa56PY8!{_v*Ip>x&mpyeNrFjyrZyY7h$2K8dfDHoe#MRjAs_ zYpV~etfw~9id9H0nuhm8F7lgQZpSsRZ;;ZuK2_B${ufBh5*IOopnHv)o2LO$H@p3Y zn%2N{oY(=UVIuWmUVVRu5>5pD0vx`H+|Jbv78kjl>%7 z%ceh`v2)^fl1rP%V7$DHahQZp;kRTpf4O_o5p@m{GbAHDQa^Y(NO z_q~?BB_1(s%Wsj!{-a?zk#Y1|afrN*xpQ(*@^gJCheWxyt@s<<@}}_!f#*v3BMm9n znP+URzZ@J`D+}}zZ=?Eo6LI|+zf3k>5=87hJVKn68?$8P zRfoB^^5p+OK1y!z)%NpZFbiqoQyat@nb~2W)WjWdX zNaU9D$jyQS%evza$`i0=)5NH&@EeQ}hZc!O$%?s*m#*a_3IXe8nWFO+&~HtI33+*; zT_pS&lrM@=-qe#8 z|23EGmcFDr_`SJwPdTHM+wpBNn?j3L{O+LuBp7v+Fn=aL#k*7tp4HS~d1W)Q3oO*a ztkiiq{%x;Gd(@4JH39{73PsAt37z%sjZaY#LtUP)rO``p#qQWhI72DN$N3Pwf)C>( zV?jUL>ieWhykStbC`tyjw>BgQh1@OAa-nN%uIwf7mA<9Vp(0B6>#A}cBo9hgOaC~3 z&JU+_=CXm-)b4pKaE`-X*CtqEj^J?k5yt!;u9VWdGCP9bu+ukxw$fB%5wGUHnBeH9A`u?ifzU<+1&ehY-QG9bLOM3xuyM#_s&f{U@RvFIIKUkRkC_6KibgZ~IT}VN`P?q(ta_n4?xNzjT6m;g zALm^tbhg;RU>0g~v<)r)n41&|@fL&+I;SASi^q=TxMy9;UJS+PaauxEGArXksU3)uF5$L5tR^Xl#U#a=}-W7`iYF?5Fgvb~Lsnx$7WiES!0Pb8~eyL5^CD9n1Mn)ZO56|P%291X*xL3(G^^Da#IG0=5ecqogP1yDZG~(=! z`1$!g6TLYSHXx^yIiZl|LS`=r+eo_#_~Z%lgPy(MA{P6mlZ7Lg?N=CtORT3l!t{vY z*`ZrrzVR(S-VHsZo)UssexhrU>#S^^{FGSo=#e#o4BptQ>gvXe>&Pu0~s%_FT-RoKa(4{`H**4_73mHgsFiT2G`XIl@8J_k)^nIYqBPd5J zrNqiDEGnD`JxzS!|13y`r5;xh**xj<>WuKUF+%UTX)y87y0I24nLNXs!xT* z;e?PmT{_tDv3y&G=@){vljbZlEJl0Tm!Z&ItKH*TfHfuUZxNh17-|9pxN6#zzd(O39n15`iNt+bYBR}ze_3OP&xM{$m zgU{4w;*c+#{nkD8#fkW@<<(~fI4g~ay-HwGb%Ll?p^2cg_}3Gzs@9>`hixIxnqF?w zOsX1wTo$3oiLFwl-dJ!p(8o8Q2%BA18%4q?>zv0**|pvtWGeY?b1YK^D*fC9rFra9YE4(a4lFHHHD+Dudn_0yk@552-M&0s!cZsg1{T)%SdepI%8V^Uu$C*7 z1})2enof^)59eRe3k?;lvyYe`+9KH6OMO{-??=Qm57g@ z+MX-+Ho7JXPS;~^I%u0FvZ-Y!{~`vi(A#?Q&27%Li38!zK}D@CLiwnXeCwEj;2ijX zxnjy=RYp_T{4ON$NAMv9L>2e* zz?`-4O7(xOAKyK$me}4$G?Kp0>1&Z!u8=5xcILz$8S=~Kw_TjDU)(&7?%B=CU8B_Z z^%>E?NQ*~ydDAre9YB&SD|F??R3@gpu(;#+l$+&MIy%zd<&}CMD-lL$CMJO<{v^0(6Uwh4%h@TNM zSLsKu>4+ih#6tQ}rPe*)a8_-!#2+_jFtrKCSB5JlXk4 z#nY6JC9N#J&86S>aJ=8x?bgCX<|#>t{vN2>=*6|?X9sN`5w3uK=Izk(aXFg*ZhIf; zPm=m$4M(Ik6`0?UAO#?qPpG-QnaUHS6lH!S;aXEEu2-^#ci|nn3A;EiF)`wF{L&5L z-A3u-1!Bv`tp(MDKrAidUdZ7q3Ez9H$8trVQ2u2c5JR7_{it>h`_HJAK)2uTYEWX( zT)nV&=i2kE=ml@K?q@Ia>Wugu6kIz!du$_4f*X0@>z+84jCoNAXJfpXoR4Jt@52nV zfn$4RYEdF9RuX%^Dw$mrrRB(jm#QpX(YTcBv&U?AhT3_dRHS$A`P7G(Zy4)x--Baf zZs_NLrQ0j9hv&i=jrxod6v*w$Gb?>*qf^3WGkJ6twMdxEhF(_gnGD&AZ#sdJJf z47O+PlrM6z99zRmbmZfOQC9)k_r8>kj zx{x7^2yau|+>;#d721>PAAgc*REAe;A3-EZH+QWTC)r6%@`T<#EC2S?IhuvFc;WmG zU=z-xN-B=4OhuMHNLz)N21{Lg#U$0-g%Wp5t$j2-bmRE37A^j&#O7wm5e_uS{y68(!*NaXMX3HlhofcYyYq%h16m~bqVviQDB{`3M3Jt$QTy^ zDd`-PfCjwAP1$^vnVlnE-UWIgjDJjh9CAL^zkR>7$#4|6P z@KSiWMV6vC$?vroA23v7L>w|a5iw;C?Ny|U!_0IMs63FZEk0nQbuSTg^J+CNM zN2|Jh{RC)K3gi)Ta_po0ITu{U6|x!-G2Yls%_YrpFe|CJrBS;<6d!u|#f1@-ci26ckH{!T|{ z?cS!IsIu$HVs1v%MsWOR%g2B=(Q)u3KJ&Ku=J|s-(j3?^|Fqg-HtyF90_`8aiZx&} z9Y8&xfp;6H>DGG0dMVn(1cv>z3;5P0TNnGvQmcre9r@=cEB^1GKOq7MS@JX@Hf!uh zWE>buK+@aun=L$-qnT$dLVeRBqwGvO8RR1L^2PCu32S%10XK7#ibFq}m0G_O)ry?E zN=sJ^6NA+mH9qHzRb5ASASH{^v(Z1b!>lYjZA_8oZX3B_MeQ6J9s^#zY2B)s4%CcF zJvJ;6F_Azb(>?jj>;~4vUejNhLa^@kp4+H=`G#=rW;qUX57a=|DwA?xDW6Qt=0m#w=(l#5}#$&serT}>`M)2ZJk&i|7s@KD*8XkS3S&D1-aYn#H6=riUC*ey>gpbdsh3;8>JH;7;!UH zK+fdWKciGN%m@vXGcT}wV3YHxazHHVQFL+43*4zDJr}Is%=)I%i{~M1BArIXv>6Wh z3dXR~{_V{C1UfnAq?UNNNJ~if*@~vxHx|vP*Hzena)}C=-w2d3z0aaBgneTDn4Qt) zj;K|=<96na(duBS+=e&~wH+2rE=-6aQZf}MCR&%Kc#DQ1)?unZzh*$h(s14#Ki!vx z%b_z8(!ms1vqnn$3<($^i|78TarHtTK6O;PUL4HHs1zgpp3yLp&JbDb@g~1#C=zOv zL1(l?*^wNgH^dTk=Z+K`2ffkKe0wq{!la?RA|5tDbj}&=Eg& z9i}JT`|rZ?9!08|7i{e$8%Fl*#@F?vN1on$3LaD}E3IT?wNkwWu>bFiTU$e4yV6X|ANG)Q!dnl{LG$X(s!UV!9NXqRk-JXrt3a29Yf>mSk)##K@8? zms_?hiA)r-Gm(A7EZ@)PH~sEEzw>*}d7t+@&wJkIulJlc>bQgDW~rT0C=_b5wH5J% zsOyMIS5iX6qqPM%6iU4HxSjJck^Da(2=c!Ugb)BAazxDM^Laepzf+N1TU!(H%*+gj z!x8zjv$Ms;#U&*rNl8g#|AnTeW=cxRzX?lAOI$8@etupc5D10Bm6a7y+R)IDXtBJ! zytTD8H#e8bWQv@Hg@yk9ei5_T?BU^IJRWakWaQ@NmYSLhqg22n&0m3$DoBzhpkqPe zLE&v>5P=2JIFO)$+{1#TgAlV0A_AD>EV!X8r0s+6)8W^1u+I(loj^jdaG(~>$HJK} za5e;fy9nac!Gryfw*uF=a4r(*AOfZy5_b^J4Z$CSpd%YDB*3|FI7)#m3nbMH#HxX4 zHTZK58SjM?A3zTav}OtGGXymma3%~^)8GUZj!=;bDrETzp7_8*FZjh1$#(`%?0Ml< zf*4DXgNIp0Fv$=w43H#Ukf{S0I{aX59`zufibKNo34)dRR3)Cj50fO1%7fym?uE)aZup^vK>H~4R~PzGc1H{1dxu0PxO&^ePNgu6s*C; z2{=EBObtQ~8;&%AZ}rHRI;6J-_E!r(R)fBBq_Y_5%t1adk%lM2+6>r~1Q+69T@;+8 z!Rb5T%`I3S1WW&d-vVGO8GJYkn>^w3GqBGU=DQ-#NJ54q=&=X2w&0;PKhhc&5n!1) z48{vHzdO03Fof9%*575g3ku)7LHl9P|cdBv9!B z%3a`1OTq9l7)k_r1Yx83>f8|+iC-HxgJEXS-xOXm;Wrz@3wp>iU15<9blDHhm7s=f z1n0|J(PVLqU?!y6J6Zqwa<06Rh6$F zrzM2CI@(!V9W~O^P*d8qMe0bGI}3$cS8h!-ceZLCWKVc*4N%;8^>y{2)c3@)>i*ek znzp{alh=J3Mk}6lAYMk!TI_OsMfES3z>RnM2Lt+9d)8Vyx87i_&IphY|5djLXmXy; zEuFQYem_brpEdAm_oR_;7uI7*x9N!3jTLc}dynu~mu)gpGwt_037UsTo!DddaG5m5 z_Ushf?>#>x*E4;lJfpTaWI0fxQYWTVSLIJ8*rZn!{ByaXGh9_H;i-!DLxN0#V5oya zc&bcW5TeiCG_KDt&WcS^s#Blk*-$8Td^s>nuRe~TM8RZ~L`{js``uw=7X(3@f%XmgNzE;TWe z^~rM52dfeHneLcQr-ztrH=Lu4HRtsVRuCMMqZFjbFU>9P#$>l!72SCvXXsZW7N4&s zN7tbb?n?Y9vn4enY9u(ngO^+7w>Kr#*d)uK+PCGBM$5-+oYx^c>EFIBd*m5k-1|WK zxmZEhfi!8lp*`M=s24cO9logkXnL}P9x(Paaz{?sKYHZrp}FFD4fh7>7B{CF+?~Wq z8J9d)Qat@9=e$K|nBxoQd7NuJ?`#{{sbPaoh}YzJ;&ivP?JG&jrO>S`J0Eku5QeSn z9Xe6s?x%*GOLi^cCW3dio_n1#Z=DOtLA#27mLo6AXuJLxjd>&Z*^$1uU#mQOx?QfM zPHm*bI+WSS;LAJaChyKXMapz@a&ifp1%4T$WepJZE z+uz`{aC5w}wQ79FZ95cGoAalyhq{M5-@(Zqt7jHgRaK?ojMT;{DHs>Lk@->&Q5N#PWYLM_4;L%^5QC&kC+tE znkn8!l9pjVY6PpZWWt}Px9Xemf38*#_FcU|G~5U=$UNxR*1++#U|cNg2t#8^S>BAE?tZ?C9F2-p9W_wDmCu KV$spFq5lETo?XfS literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..011e0caca1ee26f5b38188a9a4adbbc369afa68e GIT binary patch literal 3730 zcmX|DcTm&K7Nrx48kB0Ghav)@C`OUqqJV%%M?|`GR6vTf1VWGWE=`nT0R@6Ise&k? zARkpag3mHR%Oic9j^hf0XU}R)uU|;|MKoCSCktqMeQF(;T&CQLCje~=O z;o;%<`1p>F4l0#;)L&Uy+1=fpn3y;cgoTC8%*@!^+pDXqkByC0RaNcn?H$R8MB@L< zD1ZL^SzTT2>+3t(;z(0mTzq7O!C;Q8$p6KWa(jFGXklw>Yjt&XVPT<;j!tG~CWuEs znezW0FI^78ii6`!v`8^1O%}q-Kq*onP8=jkLMbvJL4x{3j20~hrrjWnIGA*X@UoO3 zQP37kOFR#fB*D1-p}z>&Oac>k!MHUTyG;!frG|(ecnX89bV`Wm;R6w{5ew2(D1oBj za35@>fb|eSJp_vbU<(h{;=o1(Xbc4l4=9gB4t<2d#sY2D5K331;iSOo6R;czW<0>S z2~?;@OOb_YJ;9h8lyePi#)HjhsNn%v@PVrDfeA-2YE8pR0`d;{{R50Of$x>zZw#o{ z0^7^b!cRcV1f7Xs&J!Z2)0(b<8DB8%1xC!FqRZg#I9MHpCi}ruE9lJtL)K9FE%4J2 zG@uWIl%RY?C|ePbHo!^Bqmw=rd_?r%vlfk$zE&C=2)~3a2Qj@MwTT#GO2`W*f#v*~C zB)B3#{Ys#<#Dees)S`P(nF;7MI=~oGx=~OSitH~7^hAL0G0!q2+JqQ$R)zND+6DJyF< zX&L;&5#yMyX>dWq1j*~<5y>EmalXZ!smpB6OV4Eo-yfO!$L{mg#cVsX$^9kUZ#!*v zy}A2?Ntl`K)dJ(`!Q7bx@NECihC!-oLEMg?;4GoJ1^MzB>7$#sSJ655Y2P6Zxs4P2 zerLT^@P0}|KJOP|PR6EeNFqY?CVY2?esQFA1g3Ju?Bq<}Ma#!?O^e)Hxbe9z+j;rr zPgL%Wx`FHSPR$ERE6ig4XQkn~7Z2`dUVm;PV>magI94m17>+|ebBU#)ajID@ix$NU@>ES6p< zXfwlZ2TSBDWXYu!ai#rUy~M7<6FHX*n~Z%SRaRtTpt94V^Mi3AR}yASJciT?|NimA z%O4YFh)!pVQw5SmPp29c-U~&=PjZ{!vnqBgG(VKt8rtuK zbbZnm_+ykATP3$t7peT-!b9421@+D~f%w`z<+jSu)~)l>qRlNO={%QYYdJjFs(w~_ ziF}Pt7MMXzf101XvoNzoC!V3_k<&aEz?d9x+5erSu_v?eMEa?l`<=eeREo;<==s%f z8V)bLei7!BY^-RP_Tq(DSk_1EzaXkqKS>wNiT1qq31yD#fLBQ!_a;|UFdZY7Oht?T zJc~(2+iNJe6Kv-P{to?ipph&#cnxz2Gz z8JXoKj>xoR67+?)JHWmq!gb%w;aqt1-(q2(8x~&OE`DhIi%`o!UnYut*}!h`ZkF1# zGjaVQ?=mc*txj)#=eiZHxcv9>a?!4jQBJe}!s1)&)m8Z7*!aidVst&}m{7sE|3<-} z%@*p?Nsdz|6s;0mbcgDc`-rQsXys;3s%(k8}x`if^LMoDCDTc(^gZ< zzZfUAF1+UI@wM<|)<^u(Qu{zq`!~FNcb5D>uauLAV;KA@=gFE=n8LT^Q_@0@M2y-8 zmKs0cPb?=EA(QUKnUaOyEHr;JZQwfh{Li@rLzeP|6g#;YIpG_+PX)*oyH|APVpOsF zA%+CLv$E5ugTY-naheRukV7~f{`k!u^9~9mjq4O6W7I!3Vuzy*_=VrStPK^EDCv7q z+IK%}IZ;?5g|PS66xzc4Ugp=mE7`bs*+HZ;ZPtv8!d3^BhZDWm(J3Dr2nTOv`k7IM z>VccT!uQMRbxgbWv6?I6SKu+;tJ){qvOHa@qr}JfisVn(FPc(){6xp#+xyq_T=AOX zLhaV7?6^Q%Ct~3h3HG97!4Q1cv#!~BA}7aH>sje5_ruNzafEv`l9ZvLc{?_6t|ns_M*#;?#sA3lHl(GcGktssvEU+JBJ)#=K$ zneNa?qRECNMf6Gin1bWo(I?@aad{P4T%)z&QGL^SSg!6I)cp$ zp$;X^zCzmpB`aH@GkO8D3oL%PV|L!daW#E?Rz+$u3Kxi4EKH8Q3Aw%z#3f0N-7qDy zu217Qu|r;m{A||GCEBa!(CEiwJf|ZsTuj+|w2JB^r8E3SARawD`^VtCyxZ+4Q){-q z178gVbH4WA@A;+&!?wQ0>@`xpSHcLDEzw*uuA>$f7IOB7{ki8kh?bpnm?Rc>XYJX6Rx2YvRdB3(BY%$^jLXYc)d2kDssK39ngjWlWy$QRaaN5P! zt08EQ({Uuyy*@Kll5iq*tdkTY-9I%jZNBdKnb$11zxgRFoze9DL{(H`1I}IjkI_0Y zQz}4oKku~uM&acwM9~N)*5XCl7?~U#ywj?~WrV*FhNx5d`DDb2+fw@1&RDj;cz#CJ zPJXK-UUcmGu}W9Z@4~jkr9qx(y3*-vo^=_U{pzmfiJ0ub%(OK;cF{l}exGz(B@JPN zGc2)5ZWQ&r!P?PgCbsED`irJCBg)U^`R?z@iiQ5XU*$e=y{93#yZik&&)sV6>xzv$ zKJ#qGPgb6289#QS*;_L$z@9t`ewA7|D8r?v(6w_hPER2~8?rtvq@R%anS>ef!)ayK z!@gw+*v9)fIQfzOnw5@Qdd0kpS2iZyvN%;YWQ(Af+61Esn+gU>ufvmuvWy*EY0agW zGcsPD7y9GQw4b}NvnMBlnBH&YjIN0upnWZtwa4f^W-hC7!3vmFRHUW7($5PoQK}mX z3cY|rsM-&akH@t9i1SP&^^_`)YC6XLW2Wsl>MP=p!a5J%F`FV?wkW30M#BEd@*B=? znimQUJCl3TDOlDdWU6ysfP#J+zz~nXqSiD~#wBJhSndU(P(mJC1z*VxRu23uh~9eS^T*K}$}y!=A1cekufdq)<~qT>Zm1?O@@Coa2_&<7wdbuE*T#MNW!2d*Zz+M$#g;1z zap&Oi?<%dbVkBTf%$cTUs-N1Q7c)%q>CU>!$oh!;ANc*XtbXDp{)3ibKw-W4<{|fK zWn7!ovN4mZ{~4({0!{h(Ao+3dS&yPWwbg|g9cp`VQrdIh?%&Y5`@?7WcXB$44`V6# z;tqSB+f2Jwv8fU-UN>Vw9HS z(zCKGO#B#|MJ;?>r$&29IUOUCpaFE~)dKj2A zn7|fpR%jW1cEEH5xx`7VFTS^dOhPV8A^9a43PM>ixR&N?q zIoJ=;i%wxnNl1X5h%?!0#7WVmy%91%T!?+g?8`sHVScH+J>%{8gDssi8p2V`>6`%m zVE&_Er`|*a%)Do_ixBPDE8Ha!NNlVBYy76h)3e(4amaLDc%lE7l*2t@V3* zagW2*mX(878yQkD^wo+mZo3L@H)XcwGf%Vl@>^n%8MS`(jq3v(;oj@^vXMjGv&R-3 znOM%Qth=~L%KU9(^zvfat#1ejcp_lKpean?zXoGWn%(MR`j@hcA!ldZ!`0XsI(k;3 zv;>xgyQTJsFk}>0D>;Hg`FMJcE}(w{V-O4JFOI<@Iq9OrLVgREUz8{q3S~vSisCrR exO{ymVn-f6vx-}bANu#JO^4Ps)_JXUH~K%ZGM^6s literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level2.imageset/vehicleMonitoring_alarm_level2@3x.png new file mode 100644 index 0000000000000000000000000000000000000000..b98b30ea6ea8d433de122037c08e51a1b12df1ca GIT binary patch literal 6172 zcmX|FbyU+){{|F6LAsB}=l7g*pBv}-e4hKq{oHfzjWsmTx_yiN76}Q-Z5?fOQS*jE;^1u>w~+C8moL)5M4|{KP1JVzLM^S&W!?AE3Dozy*OgVIl%_ z`Ie9Hj_<<%9wCB{5XN`;hW9d%_uPjEn6?6@%n6BLVAko}lLtr;0;cVN(*s}^0n9iM ze(Murg)d&+0|*zu!5Xle0?fMrJCQ(+1|bdvEOY`x#lS`cu=yU?4g(fm0P!Ng4jfnu zAw&xUyJ^7Ad*C+=m@)>Ie1Pd^#C&~VCmxuEow@M<$GgDc7BFE2lqdq{$H3}qU^|3Z z>p?`x0Gsgu-iMeXL(Gx{7Tti=Kw#dBSmgptI|5@yXRbWJ&N9%K4J>;AqvpgSV_0B&kvV+I(j1seT;66obSA>elaFl-O>Jv|Ro0`@Y2FA>19 zKTvK5Oqc?LCd5nyBJ#@mH?Z;(z()Wm`Lh6N;O{6fg(Y+q66SP>IWp(S=RmJDaZK;x zgC4O;R8xH_XfzSH8z~T==PdTuk z1N6HQO59F&lrO`?FP5|c8wuj?pTwCTg#N0_CM2+ta*^u_R6RY9&^rI8e)(Mq$a_r8 zlLAt~z|0rmAP<-h13K(3b1lyk%`c{(5E>OPh)#<~}=G)LoyW z9`6xi+!i+5s;&C;+9Ff3RimaL!b3`k;bW|mhMd6OKYSko9v^0}EW4i)DlEsz7SJ%y z-b6ye6sx1I0tuYioO+|5;KN4Y{c#49P!Atpb^mdyZOp(hTM3rDY=5DdVa5RUuKufyOLQ_1OqB;vxp|UzXmWn+T4}E*Mn|}#f z_H*kISXox%&M_?hqBxb>atS zj#4254ys7*JqeqoL+rph5hThkB@quA?ubmx=^jn}sn3Vr-IX)7>!f@+-IpaP=b3#G zpk;o|dAtZa6X`Wp@;zfTF#Q8RirL8>zT#-s2K|4U^dORU9N(0*~StUFB&?+ttCSU2m!`Dy~;eO<(wb+U1&XZb>)+gBB8bvr;Roc0PEM>JA&dWWqC%1}{Kv%En%_^3~MV zkUjYZuf<-s7#@^Q0tsTWAewp`Vmf%FYEn(Tl0Cu^3uEOb^EE;nul1MgZXAZu^jl?;Qz*1 zRJKWkXm?21CpYvYsIfXIo>?>Y+r)zHOWO`ic!(eC}>&BlDeyFWeU3z!4;K7E1^2E2`(!VrOT?t))!RM z%Vj7rR`@vQ&iHt)VL-`}4Ly$8)E=W0vIe&c-VjdaNh;HD z*q5yzn%>rjT!0Xzo zGO;4Noh-?J7IxNrk7-)ki{mJTYfRrx_VtOb4 z3AC0p)z;tin_h>vHkJh?o7E!DYvZ6vLIJ3H^_pODGpQfDG}GWF&qFH z54f|u0sCx^Pvwp@*Ge<`G&_z>qpIB zv!7f=g0WT5aKXPmTZqoj-@&zL(RM%Im_)i0n?Q)#ppVZhNzp|7X~ASzrq(FCV(?++ z83n#dU9dfJ##d}Db-b!AF~(HMch?)DQv-T2lu2d9RAhQ&TRqv~2-bEZMzBg@)GuQ^ zM-#EOau{2f0*%_9WSUcg^kDol@l}5S>G3kI0K!y`4L~KLi#W9tCrI zZiOEdq?raA18wUG_vXpXlnB9|3s2$25wMXSJ8s#ukEb%hJ6FoKWBop2be#UVnK z#TGJiCg%BBLvo<<8OM3GL>Y^7bZ`A?$=DZO6TjpaPao(T^{Y-I`wso)XK!09aUGW= zAQXn!@A5XOU)IGYM2G<4KX3h93+%=a`woYj+y@$kU9iOob|EQsT}(BrzXBeFNSlW^ zq4)1^_KUo5b|3336M%y3N5U4p6civnmS5p;@(EV6Dqrc3CXsHKOVyb(TZ}cTW-j8j zlK~=vLp%o=UCYwNPV$rJB90f+9z?V1XmdQl&>WmH6y-G?{q2S7! zX?`RYg>_ez!BB(|G=QEeFT7{2oe=VP$o>NN0b!S!!Vq@gLT;)8-ONAN>)|xS^-m1HUd~dKN1b>J_I zW-MiYWO(4^?LIK2!l&1b*eJTMKLqh|%b*Hu6?9bSn6A^x-SS^E+bz&^qnqb(^_AfH zaQ!Jy(zja)4?7!lRa5`mectfU{uP}{@=F?2LEvyCJa`zytpIAz@_JN-OOy;{N(lQ$ zJY8t=TEHE4MN^%;Bd^NSSb-k2)qLzd=?Md@rx96*2eb4qK=8q9TKrX@7**EVO67q3uY67(^OA zpWZq&M0C2W%nx3ppxpekCmj)2HpKZOiCoxl8hY|K*j{!E66-9k^wqc(o5~$@usTT4 z8XCUX-9F!2 z{xLT<%PPKb?^qWM9R(!E<7mJS%*TsZuuw|Q8qb&Ea+%-Jj^TN2P1SZMdOs(B(gb$xS?$QMOr{nqieL1UfSq*#-k6tjcHvCHr3k4GsIPUj_kRR2GZ%`x;?%Ol%;IaOY8+&iDygOO4N^di`mIIZ;gVJ3G%Q9s&~To_ z*VU;u@B!urENR6*`;wo_7G-<`f5g`!3~Ia$f;s3<4VkoBI^R_&YiFH^ej~klYj|b| znZ4y*D$L?Cd=1YMa14IS#BvoLzL zc$YClpT&*VsHUXfIklKwT_bA(w$FYYeBlz98n1Dldlrs=9HjQ|Vw)aIJph^QvvBc` z8DMYLP&Q@v&k)d{(1m@@RfXc_k~>z;CGt7CD5W?!O=zb`R?@MH z8PJ&(8VbufzYd@fsfwld&XKpwGyTq_QV8T5*x5JM^=K!A#y#C=mLZm`V^lor`{^YW z-H)4L!dz?nAhFH|e>Z$QC-d6g`r>i=k3S_Vj@j3TOh0R>{fbA`UF$F&8}=&_i5B@| zm7wQPN7Ys#E>>YX*e&xgy$D+{q%EL0eV>UtC;owT;csV00h7#(x1<3LE8a%Tl2mo( z%S+Ci%0~$j#S1)`L^eOa+=&fz&pwN-X759hM8e@a<#&6Xj8vwFl@s0zA;|tIgpHwm zR|Dc^`{PD7^F+T`?9 zUBQN=D#o#YBY#~FPqvi$z{NIHy;adUUyuGscXRvkSLH$LH=96a>BKTcQKa#A)BEdm z7JmDq)MXQ3{(2T|m-0>CD(r^g}{(e9|~J$)BukwXzT%>={yxDKO4wS+{5 z@g{}#lNPMHKvwYzgFktORAJV}_wmZ*5u>W83~Hr+Kjg4cY`KH5RPK*wkF{IFKIQ#_ zwrJ~fuCJut7i`!wmkyv#fpY#J4XKdU&vP(_M?9mk&eNmgT=G|x*nTW2>UEP{Kcn$a zV@%X6d*wY(`5~3WKY}WB*YB-ZGpN?^XM2|6V@i-0gx{0EG9s+yWaa)L7flOXDI`o8 zP&o}FHA5EP0=FM+KMvyn%BrbFa7)Zt+_~r%b{+ZFX@|dl(w;i@Au5)d5EN^XWF`p< zJ$K`q&)ThO4Os&O7Gb~5r_R#f4@~q02l_N56W{v1{_seJ^;gG`!7b|UmI%f zgq8B&UkWc<$DWH(Gel3zTTLtHCy_;$qiIW9=n<>=smJrSHWi1y>UIGuDDvgV>o_x> zS$jcM)9SP6>|Qiv;=5RzLI`h$WUq&>L-(dow|=wuIPsN5zG3t`=a$Rl5ze@_79%!7 z%y<4RQ{;^H9m#C8?ZW*4_cYSkKg>e-HTS4Af zCd}3_+}SZ6`et2(d+dxyKHY}VMPQjn*R#TM&`ILSN9xc=6E^IHgGam>A}$MWJe2(n z**ZT{Ka}Msb9Hl*Pn}*uk22FcC)ki%LDPOV@u|1NM;B98yu|3H-$6( z_rttSUOP#$(?KrXYrdsegG7~#6?VqiaBKpp^a*WbMJw^y_WicULvhy3Mgq*x<8=lb z8Ex^~46@u4`6PUIM(iEsq7>-pW3%?Li?u@(|K9HSstHtH6jcSS8msE$mX5ug5V3aI zyiclq0CWbSp=*>1+zME_Y5m8Pl5+y|;jQ-fH8Qp9hn-uWteeiX)o9CJkJcK;svG&(8Nm2F&gk>yjjLC3` zq|kR$)#0VSsZ1+vyoEVLCK}D3uEu&x+M$6+pFdf@dpB9=pQ9U1nnik%@1G?Omo6TY z=NpBr+?oxXmWcZjS;Fn6LlDXUhZJ61Qh96?!=&nn^KWBsAIZ-vR*5kUpHsBUaQr9< zWt|7PT_lmz4H?U;f(0e!%+^x+X%;#BOc0aTq%QqW)bwvl31z-ZftbX^N=?z#@zC8Y z$tw6mhQm-xq3-cg@@L{u?;>E?cOre*r<0Oz1bx$y*65SyeG*9%PQjn~E<0T(Pv9l5 z!S(R(Po+&e$=P1k=-i{%uGj378n#cp5ab#vqqB4kG|t3UrKpZ e4eH8Z@nEMqW-Y}uEry-|)OsqA5JFvpT5%_K%C zl`JjDnlfd~n1wNnWyY*A6MpZ!oj>32bv^g&bC-{`X$FqjDe+Xs^8; z3@+JCCF``zZ<0FK{X`E0+R1QrLfA--|F5mBElCsjrGKVku~=fZe`#Z5gU{!0Zf^2; zJV}3LWrfLPvRJIPwzh_bhW7UM($Z2oolg2CNy^H~s;jGuKp;r45~PG7F_J123MFv~ zjY_3Tj7TKn{E`G{Zf*_+gT1}Iv$M0YSZrlw<@VO*^h0;jUq}goSdJvr!AYf_#8OYv z0|c?siM4+z%OqBLN+Lf6EK;u9!m9#+7&u)kxd7i1{ooNlu6=&C z_@)?GD z%%;1*LI$v$&YMgFSgF8LGC;jCR~yIdjs<8j^s*>wUF2MOPXGiYCOgAetA54psfRD+~tAXfX_$yhiw)pc#uW#-OhVGd-y#7dZB!H?7D z27y_YGCX`Qf23=h`}f9NA;3=Cpd_<~;+Okk#66MgU&469L9@6Z;aDL1L%?jaAF0}x z|H((x=FNWXNpJL^x4X=BUY;LtVvaffq}lK}P*D?fy&B3}fC!5qf(%Q3vMDb@mxEMi z+bEVKcO8~IM2qWiXLrzU+5LMJRaggqiqwu0H3aN$J+EFpYeeTOz?f?CS96chi*qN3?0+BqFmtp>Vy*!qi-NhBgG&G(T4NYxV9EeQV?=vr^q zdzItIkDJKdI-NCmyDs&C7}nrfx17{6F*W7oB#p^-I@l43=;X{ni5-nRoUwm_?I{X8 z5+X*>Qoij#`k5QE8(YqU!O?&-OmFl+-r{-9FRRya+^gSW`tPqKU|mNjEVY}a2IgSnOk}el1P1F=Mc*73lTJ5S#U@UT!daY$# zb+i;l2LhgIIvU!%uW0U+}w%^J~$t7RkMrioDqW?$Qm9kntEHJZC| ztbuxacchBJ`XEN0VhI*py?C+jMbz;vvAoi0Tj|RYnXe(935_3N_Y~fw3#Co}f?!cj zas7`qE0m*MM@H?#nR|3>@fs_AvO&dJK~h~9Rzzl%ue$Aiy<51si7Z9!vNMmabgTO2 z!9cZoR~%J>Y(ry6s%K^$1-Mz!b1cd&U`SUsRL7~k;Y0n|Yq^v&TEXSi8SIXRC(8Lc z(TMy%fF}P>`7Bb{ytZ*Cc6lod#l6%|Fl)~|zfk0_t5>dO0rxDgs6e{8am9O~@&Pu= zt=iFzvAW9iW>BhPxKps!xdfBQRIg7)_3A2m`)%07op&g87htl2qSpya&=|Qq2j(%< zKBrLARu%MVYoidqE9Y&lUZ2Q#l$JbA_os$dWDiLDBV z4nzLoq;?v0(U;Sx%T;!t3qwKQzpE_Jgs|6k5GTv#%_qf2o*x~YG8%V1g)BPOnmeHc zEsT11hfot48mKtkzid}e#P<^APabu> zqSFD5sw&BICZ5o{q|4DraT`lEG6rU(n(#BLl~t3Sv9~>wI4Ji3(L6al7891yY@!{7B4FVzU0n-C@9%8{QDNBWw2QdtyU5M z3H+KsOM#;&c6lT|zumN$i%&p|w^YO6HKjUg(&jOb57}2lo&pR~Ymfi;?;)oaQv-Z6q{nl5%T5wZ)F#pKw6zF#d2$ytN>m44O zw)>D)RMKK-KzZmm`@7bjTl_cDIa$8wj>HbF?UP80mr-9m!1cEHpb3qaQZC?*mofQQ zuNfZTog#$q(qBH76))3I&w(+ed_TWq?h|T5;$K}cM~^U+rcks8xp5!A%?p7V|5oPI Uw1p6C`M>|&-o_c$3JJ*kF9C>qCjbBd literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm_level3.imageset/vehicleMonitoring_alarm_level3@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..4731d71641e4b9f5266ba6014fd592bc885096b3 GIT binary patch literal 3874 zcmV+-58d#IP) z`+|akSXfxj&d#8qp#A;*>FMe5@$vNZ^q-%f-{0RqKR^2V`uh6%{r&y^{{Hs%_Wb<( z_4W1f@$vBR@aX91`1ttX;NaTY+Vu4F&CSj9^z_-;+3D%&$jHd`_4U};*zWG`i;Ih~ zv9Z+D)c*ed{{H^#?Cib0z4G$%prD}3%gc?8joX)4*`ZVa|Nq*SSlg3U*`ia~pHbSG zSK6Ca*`HMGECAb;SJ|Fa+mBP+ic#5}QQd+~-G58;-^<;HQ2*Ox+mKY;jZ)g4Roa|X z|J-KUmsH$~Qs%dc_2SY0*V%Q~2xN_U6>*zL5X#i~sL}-JfOf*1G@g zdjI2d-gQR){QLd*_5baQ|LlhU=Xv+ra{uFP|KMu>+F{$8SO4gK|J`ifsAm7xT>Rlm z`tRre>wy2}bljv}|J7I9lT!WOOZo2M>d2k{;(6nXW89}y`tj-i^PKWzwP2YP>^V`Pk&Z*$Abl|6N;H_@nqGsf? zV&SS@+oD+8pI86WQ`(kK`0qpc=H2(|+xOkl?$5IA%&Y9LmFB#Q=%Rn=yl3#cSMW|0 z`t$1Sx0(6nkK(F#ZuZe;;ge+Gt5)H%RQaVw^@BR`$hGg%wCcj0>cW}o zy_o2md+)zz?!939&|mY&UGKbI_nT4b#!T+lO7;3b^L8-&?wI=Ff#b1x^VE3y*Kg&N zZr_t%@V!v=i$(aFLGx}Z@nR(O(ZlGehWOlh@XmDSyKMc{Xy>_J-jQ4KuvqqwJn>u} z??w;pI|=X5f!vi?-HuoM!BgzbNJf_)k4} zWFKKvVcwpeK0Y3f6XA^~z&}v*N(E|t7^8ZGmh@d29iFn{M8PT|909>1LcCI-wVbhf zsskx)Z`DwmM)b*5t2{kD$e0Ll!L#b zlircC1x+Z@pti4_P1zRQaH^;R^+uil#Jh6J9`3D>7jIn-j`I8>7|R9eaM{!n4BzMsYobz_v8U75RR_)2%u zXyiY9aGVMF=h0`J(zgFiww283Gt}fi=(gHd+n>Q-rd)5@RiCl6O1gf1f0HA zPZJ6l9UsB9yT{9k8_gAWclW)!0%8^~kBd!>%-Vkt48#3GN?8=1Fdac@)#xyaLG5&g zp(_Xs1rRLv%3YcB17q@+$HqoRX2s8@Zpm3v7}Tf^LXny^4x3e?$AOSROy$0YDnP!O zp9#p~{J7W^V8lsvV7sfg$>$e|{%mxvH244z}w*f8VG>ZihH>8GJV2-UuzprCzKi$enD z&kuo#K#*^@AD%ts{a0T*m%gmFw)TRNhN=1~4U>c5Hr%LJR#sCIb#`S#&6PLbf+&H6 z#BbM~osP)Nd57j568nU|I!p}1Ey<(E#!DDv8wC*tpMAqZfn@!p`}sYN%oOWNygIDK zC@%>`n#e{!XR&+*lZr?HA(21nbe9$6jd?~-^WQU4`Jb`=N#W@s^7KY|@P>_Pe-PS>!#l?llvUFonUcpZnPY6UT zt&VuY=lQ|dr6B5k?*I}ANd9FYU?iA?kVDdKAC*!(^E=W4GYm2g$9}-q%YKqtLf#Eh z5HfVP5#dSZNd(q<9Q`|`8) z$%h1drW18oBr)74ykE4w6h$Bp;CF*PVk9#1mVqJKC3OqdR{?&rctApW;%90X8VQw* zT=@j*wql5x80}XLej4woAfj7O!qv^Lv`f^jBnb!zT+FBPxfr~>PvDP(1KxqArM`9NL0>+O^nvm6m$YY_SxQ~R` z;Ni#@mkH!L87&cc@^nbrv_#AXz||XgsEjv-{GihvSs@{EIpB62U`B`tVumX?4U-s0 z-r>k+U`U9u?=1UsPkBP}Y0ykgG0mcU(%rwz1cg9$^RuQ9v-@jzZirBdP{J7M}$R)ECW97=3 z>j;tei9n3COK%jZdVltNk&&r!ajB_U@kG3e#z*Vct-g5BXcD{^R(rv493}@p01;!y%J?5XTDkHyP~x+|fEXwM0R|zL(&cZg4zz?H zk`UaCr(Au#Al-MU6p<_ithT#F}FK-mN#Ak3vC{*NAWilR+!z6@``nl5bGMNRw zO^_nrekapWFCk;`rd3RnSGOTbc?BsVioy4k$?ka58Y>~@xQ1`gP~<4GG9$QNTfd;O zs$0(eDz0F28F>&tNX(TE`{T0=>o_R|EV@FQ!x}3|jD>eA7cM?15JfRFdmKL!Ey8{o zjD8CV{ewh6vUmNGXndmaS$yXs{TMD)vs`o(Yarl<0D4)b1`%ogObaIHsDw5p6|&-V!i znOurn(cBn<{YqTKM{imwVTmUqSYmb^OVoPgGPmk#L_`pu4D6PSbTl*XKM-&86vf^& zGK*oY)f{82OiTY!CKL$b4Oa&CH#h4j5QFu#2BSDN-z$Z0H?Klk3_E5;F*l874r8TF z$V3rjleD%3@**@un5+?B9w*Aaw;?$H^!2+H8o|b=p;fEqX5VLqKr_#It`{M zbb#5LM0N+Oyy0j`#-?dKB3J~Tl#R{u!EbYeJBe-?M(K-Dw!&Va#~uo^+q-$~#!D4D zSj6rH-r3MImRyq%%+}NKq8KMINZ-aF64D8$5?urhGrhv<_!G(k7F7|=PPj2QI>WHQ zD2&F5bTW$IrZ*ppk3GJChVikq#=z={N_IPWjD=}ZSTGAtfIAB)j%aKs!N(qB&2hUE zB1aj{sA0J8O=NkyzC2s4&dy=Enx!y+T)cl#fq~6p!{s0ff>A66;iIUuxvsu2{MNH% z?>;^!c@9&RFRc%T6_!8>8RhsWYLEp6X+-2GJ?NntjKz;DTV}K@I3Z^QGaN3Pvou%5 zHbt|X0uL-7pACf>*sLBRh=h!mQ@Fbuy=*JFp=fL&OU~B$z&Fx_3-iBunJJdhFFLo zGNLGtky1R@-%syFEi*ou!Y%6Z3~##OXGc_c0`VIek12*gq}hYg|51h=E1K)?0?Jgu z$d#=o#1m?j(@pFZd$fNiF?AVH8fI=`OOMeL(-!$-%P>r<3|{BNv^^R07~R&woHvUQ z6AiPaEKyq*&2@Fr<76<6T5|Bh_(_xO?SXJU8AUm4OUz3n2OwX)-kD0T z_XWW9Kwcu_F@{@`A%<>R;d8;3**>RYN}RkKusi)-U4f#)f7_F{hVfE8XN#@Ec}Rvy zg~Ie5EPj^Ve01xnw6vn);-a(z+jbt^EPj?9+?Qg=hUvNRmO&WHzP$%BlQpn+UrUHV ztr9bDyFy_sZAaR){!_WlNLx#a(RM>928?Bok-d%m=-+!}4@(0=7;RS+1{{Q*AJ*Gu zK<8fFy7lTjz^3=G=K(-LA%xLpMT9|y9Jxq9Lc|b8+ruafIamhoPl7^-7Bq#(o?uii&mqgJeE(oF>3_1vjA^?PT5`rOzAW;Mefrxf8v~3b8~Y@NJt|iBlh<8{r&y)^z_Kc$ocvC^78WL=H|}M&hGB+($dmvYis-a`|+93g)7ha@+m~4X|Nq&bQreeS+MZO~lveC4 z0NR>X+m=_^o>AMFSlXOc|Nj2ll2zK7SKN?P+=oxxj8fc+QUBXx-Gxs7-f92bX8+h$ z+>BHF_3!-l^6%5O|Jh#Lj#J%%P2GP>|L=p{eMh^YxwHj|LTAL;e%M;dQJB9LHOX-_2ts_+{^LCvH9vk`Rn26 zzn0^ZXyvzF`q5k5kyY|)DCehx_u72$&wKIAZQZF--G@^1&b;rurstV<lIqDtO!MfLqW?X{ZhuaxMniSox;;EGx3zF6UZRO!r3 z@ZL=NuT1rUHum0)_0~`N=1cdNK=ahW{K;+hlS}o9KD^3Rk^lezKXg(~Qve4G4iOs@ zA{7@N`Rw)K@ZH7|N((9{gy(c6*s+vsTQ(HYyI4p>G$o;*u7_kGrJRpCIkl^beN{)2 zf-A6h7zk6)000$NNklBp{KtH$su1?*%JGu6E z^Yiua@$~H3Q=t2YLZn_Jz1;oWoV&|K>hA33=kC?77jv%T#hHBE-JD&*-N!4Rs~AdkT=a+~Ecc<7*B9Xd-g51FSgrb${nX}%*n zE@YVU5iOY(tK;&oY~5L2RZ&(N6jWMPQB}Tk>!SRL>cuU|MvUwb$8dZ;1AEIYq~1|< zx~fdOV9f8*R#crXTBMd+sJE{Vg0=hjdJb{6JZTrNo+;hCz|1ttLKLmE;_AgPORyo> zW$g^*)1M=$^WzV%U%vok=JAJb5d4Op0$o}BS~W-O;{#yro2q`kJ>-Jo;zkUXY zLivk@fP}2HJU(A8sE6+$G_1Xv%FEgOM<1NIFmygr1))z|A~2A)Y~Mk%&CXu!RARI~ z-36<{CT^W|K!6GgVo=t>P-R;uh6!iV9TV0LRt@qKT4B$!c?agtpC61+Ab%+^=2=CN zTHqQ?2H6f?dAZ7&@^;M*M=HQVVbY~7({9O=GvOd*e_K{gLQTz_J$^o+3Z4->cI@w1 zI=x&7BpIsp{?>)jU(A{Uga5~O;Lz^E0B=kV2dcnvNQGEh`V+YfhGa`D zovt-7?LM_|Abb6hUotEPqzcP>6-lT<0vQ(N(l>s3;%|YavA}wJ;zV7mhLz`qA=i*U z^b5Qaaus(oc5-<5XhJm(xR_s!`}Y^-k}*;lk}a_6YE{uETJMUgd^Sf9%)R|)`5;x; z+Ix|ZtI?wa30ANr*8Wfa6btOC)@X{2!UUT*mdLM)wdj}2Fd313SfF7}NOg!%g;Oey z1u_|cs}by?Sf+*slRuNGM5T(Zvl@K1SU7p%BZ!^d&RTTsz1Ucw3V{*g5CyQkjfF5q zJ*FNkGMeLFQmazE=9w4ymB)H@5aF`3u>8=gxO-7%s?g9-=5SN1S-fCP>HaDOoqY+BVy_FbtY3?9X5SaZSC7O8|-)%?&SlhVDV3?Qp}M-bxS*h*rl}4Xv$7BV<}Y#!3yAZ3GD?kJ~7NmPSIp%r{&~iW~Q$zsnbbc-uqbW`%x|%sl0u| z@WzRyBMd>a1T1bBh5A%jn3PPY(ts*+-MV$k6H;K%_eJ;#zC2!%$#NKuBiwNfM;M4y zgbPHl_!vJG?k}8^B$x$m0hY41QYU3s=^`BCFdi>Sv--1OMetymu>!}f(Cf9@v6L&< zKuThc*{t+*;+6`^6w{@<_2B*a_hLpKuRh)^Uec#dR%0C5l%brhYi(l1Z86JrMWyn=x?d z@DXyqn&uMa7>(5_h_xA$MX;Ves#hY`^y{~>5bOMd*5g&RM@}>l$0%ji$lmmf+<8Ni z#i=;f3R6?fwt^?dhK!VC;1bNrp;U@zbcU0rPnhsp_j=Qp_wU}`YWpigU&GST1-^z4M-MJiR}u^olCKJsb12OCVf=Tg49<)$R|eE!^64%eK(FdZS%d z%o>hc&^5rqDh;rLl}oX(=95x@ieXj$hLt7tlBo;%MOpn16OkOr&jY z;a$BsIpZk{*H=}l?QWsQ81v{B(T{~oj5Qn|S`%5vGb(AUl#FM)*^QXW_Zo&JZP#Nt z(J;gkSA18zr|%G~$+lu86Rd1()St?AfOW+7$YakJ;ceYjRP>1Di2L%3M6$-)ft8d} zU9X_~a%w8Wnr4q%r{zSxj=zUX#<1{B+s(uniWO+PTg7yXPfrId%$8kPTjWH85$pF0 z5wP$^ybEsGiB+>3s7e6qkFs@KPUHqy4kEV*7QPX=pSV2A9;~FzDc@l&Ey?6q_PJFd zC+d$_;)CdjJK%QP+-jz`54!b3QJ<4z#)`JntuovM5lbdIh>rNi`)uO!+tOI+&CShe zY1!GDWOx%;|FDEp(Sx`e9(-&)iKX~d<|sOfn0QBg*>=t*w^Gy7n{#piOrz0c!vrZ6 z9z!V8)vG7ijunK@){gurwm690x(is6bsCbZ1g=cv$}TRTZvj(}98n2Wt2b^0tdpCv zr2jiBV0A+*(LwCShDcjJT%xyBFtPN^%$(xtYDrfHUb=J% zGxMcOUqaWeUHd3qlIt&8jiy+%RX3WtwU-7PV9_Cl8Q8m~I;-^l+4o65K4XE^LW7N| zTa`2h26*F=Rm@$+%R##Ez3! znR#&Kk7Z()375FZYd{=gG#K5In-3k3xo`CyrEulrR>oz$Jg_ERbU^lq$)5OdJsrN@ z>C+D~^0kY&f77f6)@6D4s&CawaxRo~+{2Fm<16|IU@s?D5{)`z`e}qBB<%OAJASAII zBJ=KWpzXvBleO5@lU*G?2^I`7?Nrctd6cE9g)?NMI(~*uBsnCOVp2=7UY>Rf^vVNk zBUdNOuHm4>nmNZjlTHIx|8pi-^0ZsMT8M_FXfI=2D;vGMihM2RjkRxru?a+L@>Iyr z{L81f2ADPuh72y0m19jXt{Qtv+sYhI3#rd*Vb$m}xjJ3h)h)4R8S~6JXATB zr#>-^7ko7uuSKDT8sn-gRKuxpDc0|#6X0XLZSqo7CYP_vMEk_2=LVTS+U)Hfj?X7X zQB96D4*&X%SfGMg9W(>h7pjG*q%TD~?bd^L`;N-%82R-sUyGkj%r{3z*mT*ND{Tdv5Y{9+_eB%RL|AffOzm06<8&-$ zRzsP2RMEDZ$xctbi!ODha0TNnS=33NoR6-}PH1d(4p?Fqjy840$H#f8Ceud=L6wjf zg>8iu{ zWLCZ1(BhR{s*07c3yLfv^)x-_+*LdMjO6MXf#V5eJ(3vli#<`7fC81Xn{ky?rqmEJ zf>B4c+sc^Gl>mA@xKPGiyO-C^Nbb21YSU;W@alQKU83RD*+T{LKw8q6}atFoBU^gPxp z;wAmE=JG{5X1jcR!kInC{F4~$LC5@`r_A*)zaXfBbkJH{XEe*)u11Y+mJ0ALf$o)d zhoH4*v9-<>jdgp072-h~(XpD^Uw)yz9aKZK3bn>mvrLMJYl_k?xdr*xHv*FD#T@Gx zjKwDgU7j8fLgj1riW~Q}VPzMn+J`P&i2CKW1G7{Ot?aUuZZd3g8hY7Ry{{0ohMNl<;|mAA}SRND-jwE1FuNZ93&H>MAQOZdh6 zMCYK}R2{&IF2`d*K02ejN5C>V5#(njF1t16nsG|1d3Z{@jxg&XwG1uo0#{uT`L!as zc>nTs&2^f|%NvC3svVxtJe(bl2I*wU%(8#n=^Wxv7w0E5ihDVyGF^to>z$&E;3^@m zNO+@<_pgK8Fj|q2#d@53))!yy#3(^`L3=xb??7LDbYOfedU}Y!k5DCIPj7|0_oTb@2L<{%Bh*r?TfxMPw@W@C$uH zFXW;yjDsD9W$9!(9=(=G5Tba5LmfCAM*(GL*pV7{)f{q;7q|+_A7xUJ+lv9JBpf3! zBEttHnMji9hgXig`s%?02M+%fVNy?&7l29yE3XG?Qn(zuk`I|q-QEc3jtR?v1~{WY zB$x>5&`&p$51Cdt6cv>)S7mZlQ}D%zg6Id(^m)d3$POPNG$C0cQY$KJGCf~^m?m2ymRkzqS3Ytr9%VQK2O>F{0dS}*9h z+QhF5SEqlQT1tQ8h251qVAcInDZf-90p_UL&Qdk%8dzPra%cM1)$qdN;zIc9t?4^c zORG2`$Wpa^w*&Q41yoe|l8J?@xTIkP1r583@_kc}qNl3x9|=&{@enU zGAsiOWYyJGRduzE73!^_ mGg+52Q?hpdv$o>xJO2fA{brg&`mKkk3Wl>ZG)e+$$8W`_K) zsr+A>|F5R%Us?X&;*9^z?SCsv|K=zC11iXl__t#V$eh;hzZ2X4)z$yWjr;fT;lJCr z|6RNK@50%CCrxN#0002+P)t-s00030 zW?cV=kpG5}{A*bMnVJ8Il>U8s{&sNwZ(#jvSp8^H|B00UiIe|&b^U2o|B00UfrI^W zW&LPW|A&wMdv*M5TmO4@|AK`6fPwyWYW;I&{b^PFXjK1$i2sC&{cvFaij(|lRR90~ z{djTyb!-2LlKy^u{(O4=f`$Egbp3X2{c~vkfP(#QUj1@r{cvOdhmZYhS^kEN{)LPF zdwBkOcK?5W{cm9XZCm_mR{w;E{)319qMZF{RR8>1Tc7i| zHz#}ZY&%U~r2OJ@)~>oR9@8AV)Imr(BaiOo!|xQ;3BmnY*!2}(@lpgarM#@A@Y5E`Pg_b~ zAO)vOU{z#D$b7^=uuSD4@=zqu4qRER;*-T8$iPZz@4ajNlJiQ=IXUM$M|C9WppO^X z3&SvM6ybxszX)PNUCyVrXp26$e}s1Q=AT} zss&ssC`d=SX-K01c|XEJHmr~#LJA7AK^_xi)0~bY|0oa&P*n}!ekkW>A=McqDTREt zs|g`PQYqr5A*BwT;-ai%DCDE6(@>#>;u2V?g(4gX3qip#kV8kxdWiFov=Wv}kV1{h zNE}P^k#yq@fZ4;3OlQ=g6TuvS>(r3K1%l zegGi^_#9V$Q7*uFP*Mhp3!tnDmWiQQ2!#S5;KB+yEGM9-7@W!kyyHkw4GAd}(i*(XrDGFJWuru0U>AQLJfq>}nB#XUT%wgl-KZSP5 z%jecK0-nb*MF{nqhRX5Dukx>+AKWS!rMMS>h z2WD#G5*qVYmL!}ly?yj6bo;LMEyO$h!|8YqpW+ZXDrTOe=c@& z-K8>15#6Tn_w_&8w0oh8LJBE<(taOjrI3DEZHSvI3EZb1JlNUUw%2{y@34NXb^A1! zx*WxeThAF9HXR#XMJRn(kH-6Y6GL(Fr5{FHso|H+wp&K85xwu1dSBg)sO`+X7Zmfc zq4KD^{*jTNc?$JOt)vk0^7WthOs#(wrZVJtgu`b)owpjLvKqfSAsDBJZ7{V6*sZK5 zF>BWC3Sz`p52Qw(BTWQcobkLQVlA1sC*O(rTl^w@kG?hP4cRb0ZykSnsQRWt^v3$Q fY3O;%Z!d!d_vbsg*GM;=ABQExCBcv; literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/Contents.json new file mode 100644 index 0000000..29af728 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_history_icon.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_history_icon@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_history_icon@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..6a05253da8ec423c970975bb49a3773dbd3ebf68 GIT binary patch literal 251 zcmeAS@N?(olHy`uVBq!ia0vp@K+MCz3?%z9kG}>|t^qzFuK$q$Pz;Gj0#naMbS+S` zcuA07@YBM{hZy`TB5s^Gcd9n_{+){(C#Ab731mgNHvyISdAc};a9mIA_2z3(;9->y z+M@8|`fvHrOWR*GMVib_xwMFNIiq3olN-l{9G$qb&ZelF{r5}E+U$%SwL literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..fc6878cf07d8636e96f47ac2eb60bc3d01d116e2 GIT binary patch literal 322 zcmeAS@N?(olHy`uVBq!ia0vp^Qa~)h!3-oX$H;yMQr-bRA+G=b{|7RGB;{asKvD$I zEXk4}zu>0~jtd`fZ9Ls@cjrXDV>d4x$~Et`pEac@a_{nzV4$L2PZ!4!jq_6{yyk0G z;AxFMDtgMb@ZG0>{#MS;MmMs){B}Mx{jwQ*0IQ8gN5H~4yj%8s3c4qqv|dfdf7O?O zs~y>IL@n4|AEqq)ccSZNlrmTLgs?*F`UsEg8BV^)62WZ`^`n0<%;IC&A~)sO1fathJYD@<);T3K F0RY)Tt4sg@ literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_history_icon.imageset/vehicleMonitoring_history_icon@3x.png new file mode 100644 index 0000000000000000000000000000000000000000..b19972b31982f0aea27dedf169099fb6c91d2123 GIT binary patch literal 457 zcmeAS@N?(olHy`uVBq!ia0vp^>Oic*!3-pAUtV4bq>2K3LR|m<{|{sWNxFfC%}>37 zR;rZ*`2{~^cp$N*=!V_tTHTzAyEAvhoR3%CzkI%oP*3CK!|VG4JUFlJsSG1(&@|pZQ%X0p`>b~3|dNj5%;@SN25b24p+3$PZVJf_FL7Lr7 zlljfL)<4fYG+EDYJX{nca55yy|2tp#^_x*^4;y{6%X{JTNdC^b?U~2JPsxNST^I3s z)YtdmgTe~DWM4f D$)^pP literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/Contents.json b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/Contents.json new file mode 100644 index 0000000..fbfe1a3 --- /dev/null +++ b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "vehicleMonitoring_offline_icon.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "vehicleMonitoring_offline_icon@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "vehicleMonitoring_offline_icon@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_offline_icon.imageset/vehicleMonitoring_offline_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..7235678a2a3be529e1cf883e1da4e8ba71e0942b GIT binary patch literal 316 zcmV-C0mJ@@P)}>z*Zu^Z{|Lbu71O)%;Z~xF&O{q zYyQ}4{mf;5k?hUH264&MWQMwFcO;xA96hMMkL z5!u>i;`%)e3fq@a$Gus^iyVpEk9tHr$d;&E3pFMgB3+A}0kWZ560IEo@Ov1_GbiA- zr$)}l0f}>z*aR2OX{{#g4l3M>=E&G^T|LJZ1+id?53jX76|KVx=(rEq2XZ^Wj|IA?gr(XMz zS^sM@{^f4}-)jHmY5vz~{?2Iq!e#uUUi_e4{G43<%x3(vV*IgU{I6pFzFGgVQvZB9 z{}>JYv}6CfTK~9L|C&nwN+bU(7XK?1mq2Oh0003SNklcmxXsm#wZey{hb9oMBk zw&Z_p=hcCmmh<;ttMlTsAon)MI(2%Egm_mU>%grfSFMB)E8kFBI$R8F!Ks}~ZE?yr z&S6-LIyzR4lwr=*$`Hpiqq-?e(Zxb(s}cagFx0NKS#7hK)#gAcNaIE4YJ(7TSlq;| z7`T=P!}egp1ths(&8=z3jn6U;t>Ay0z@v;%7j7f4H2w3;yO8r&r a<$eL-y$(v$eLaT&0000}>z*aR2OX{{#g4m|OdqTl*HZ~xF@{^f4_j9C8KYyG}t|Ke=_-e~-& zU;B_*|LSl5>umkSX8fOB|LSf2)@uIVZU5zJ|JrB%)Mov*Vf>+9`;A%u3Gu3-F}T>q9v|0NUs&T0M1XaCn_{kdZMm0JHsBL6ZN{}KxS%U=AYUjN2i|G``T zvsC|mJO6Yz|4b$SHXHxWU;n;X|F~8Eib4NuGyhU3xfLwA00061Nkl z5JgQ2#O4A5f`TB9?a)p=4|n|kzq$~Kbf9R+Q*VdG1AA5Fq>@fOGfmTG9eK>#-PjoK zTzRu^(2Vg~R$S4X(3BI`W6J5vEpq5ljy;kwM>;JSN(4Nx=qHS+M6MqF%&vW}HWhK( z#&hSDOiPoEhoi6Bl?*AvbM~>$iyqsUhg^Jl-=5zbzB3Zj6MrZvZ@k}WiyfgCo|TLn zoA*0t5>@_c?_&OF9nuhR(`(w95uj%CnbAVrY#!f42a1`DaPUP*3vw`1zx^tnD~L(M z+K}7;3J_CIYQ&=js99j(Ma_QDWYVRH#$yu#K*}ZvMIX_4NJ`bD0(S!>9-rh0<^%*& zcdNa|gb3;rGq({?0jsjoWWl6Dc221{|HB8Pc6^Tx5dr_{@9E!WA(+_s4t-KqA=*0u zaPumAVn~k!0|ZhFDj}&c>Xlqty;#h?PY{0gULp{vgtU$jFM6FT%K6zRRoNKj;%q_? zHgmS04}dIpnfJRwd;L%{=9{t!#TnNef^n}fUeTCscH9m=YMw4NdjwNqX|j?izgf|< zgPU1M3Bg%xw)Z#hdvZlsaFDKaLh6WrJqX7@Fb|!MO`1VEPQ;oxSi?p-yi-u|-^73AWhc15kia1T0000PnU3@&V;cnWxng5TM z_RRd7-uZCh#_1A!zVAQzVu`AMuV0YRA#)!${k0XDUnX^JKbAT(=<~$~yE<1&nk+h; kb>rf*?!bGKZcP5m?0!PF@Lcm-383W+p00i_>zopr0PsVG*#H0l literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@2x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_video_icon.imageset/vehicleMonitoring_video_icon@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..408138bac325b5c136a3d29fdef185a1b9ae91e9 GIT binary patch literal 514 zcmV+d0{#7oP)B@aCLpH`C8-lVx<50tE{FHA*Kr-qzWRa589%*5B&ZWWyF@>`L_oVl zKed?GqWr)u_-UGCM~ZbDXbhPs~IM$6+F2;B&ZTA ztsf$%4sHB{0DJB#O z`sCx(zmkW1b#z5D5K5&~0002pNklB={gTP#B!6<54dS9jJeZ-(=Rm(O^jV6ANpc2>HBV@S(^$?eC`{DOdv!)#=FzaJ zF7t2_7i-LqRCbv6^qfl*u&pwpEI&)}|1WdZo+}fusnT<8lDSIHjY;MzJ+~%cN2TY^ zBy*LXdlRsw((z!Dxk|^QN#-gYRaJeoKj1*Mo+9is*ws1G5g4I-uw zBB&2PyhILL#UXJ-t65uK*;e6DF$`A*K&1tsX$ULqNMkKfFUf zyh9|b6dt4sK)ggKtQsz`B_^sCDy<$cu_!sWI5M&;Bd8HSyFxy?Ks&iSIJPx5wKFxf zF*LL$#J}s{zEUq6Srw$;d3?!)&AEgQ{t|C3UKU##dDF6TfICN4@QveEd zbei{ONA;ST7xne*<(it8W%c#*x3{g8h-PMDSv4Z-MK^G zn3t6!4LGJ&Jm^`~l~}O1{=SYFJT@Px89j+N-Zf_YcxR=lfnE#6HMhtFD7O(wQIU)y)r`vz!_a7kh%hu%m}VrGCbt+QU59d~Q8F16ng)^lAcnz^ zF_Muew?c&pg_P1`*5|zA?fuT#d#!!;Iq&&C@0$7M`|+;5_FC&XYwxr7S$l0_MU(OX zC#B6w8LR-A*h8=aWCh5?JTNhJRuQrSq>cw9?F3*~0QUxPujc>10sIQUukAm{o9gs7 zo|MT)h@@Qr91Y-Q0JiAin%@KX6oBJM{-cMSL7Pqz5J~TBKp4_^1LaL5KQv?~QQ4CL zMAC5p-WD~W68~QT96<70mE;wRSvnw+&I9ngVhidwJf7sS^~L%mE)5V#Uk30rpPEbI zI+LW$TcgtGfJizNz{f|mvR`KhkvzAb2!HF*0FiVj05=1;vtPYd&L0B!qWzJ)BqdAI zmQ55s3xNIIx3KS9tu4vF6}ERb8wC(a2Lt$|TdO-VUIgH+Brh&(zpk02R|7a1z&1gd z_R9SBV4D#8;DAW7McU3Gt$^PF*pKAp!FFMGNpA%3jzGifNp2h953vpkh@{&9*ihxA z3rHT|Wv0tma17O3Phqfhoy@xTvGYBA(?g8M&fFkFUJTSmN7^|f1@!tsb>5}I0O0-^1r8_qks3-?14PnY0N4}np>3wT!MA9Dq!a8-FJqX!gnZRwe2S+gAaW>k z70KuumqYCEEX~|Oo&x2^Bp+SU zUtd6S^G2~^CB}@|rrDdbg>M%fO!AzlPQ`#YwnT0uX={>AOOot+{$IKbEE&3r0Fm?{ z09Pkp-+GLr8wo*4hXXjhL+49L?iI186p-9dwHwJRBZ_QfUP)H~u)VN0+L5FsShy$* zh$MSk`e;J>c_g3j>U|S2rQWa4+TCybBmc3<9w$VdK+Q+&ixdD-MKLCGBNZXm@Rk;o$zh_p&`;z>6MBW&@l70c; zp)GygL^8L%9c{hO9b_lwSq&G1jWP1>hnd}dfw9l_gr7qob53e^uQp43?nSF(T%z!wQWN9u~ zZ!i-XH@m0On|cG1L%fe=Jpq|@opeAS{ZefcDYKs?*_m9P1w_(g0d!^!E(s9Z8`>e@ z%%x2HCBv2nkTaUoOKtX_LGtAR$Anl@21(iufC1xL0XH3wI`Iz$h~-S(wZoJ4x|v!m zh5%S(_825-UjUZ>8==!l9ua9!!Yj!V!}o4!yA#RQ&ljEu>73Aq9Ing=44CWoMH- zB=7`=S);~ubs)*|#w>pMLhMmvyuu|})LZnssL$JE)P@`&wqt2Kmu++i$+KM+6*5JQ ziFDI^-E&R+Qv&1-08Z&J^k|Z&Mw}4k9TE`JMUrKzKO(2slmM|!u&%^{_mO-}&X_*G z4+ThWe{MnNb4c1L;v$OLkQ=gW*>B7LHX7tB;e1C?V>}-7Mnxm*B$By~OMHskUIOr_;H1CzG_-<0`ZQ zWX_Gtii(*El6C{|^A==(L(&Sft)opFA;GBVJy&!@X4hM4=XWFzAb$pMpN{ygesI3; zq`vH^SkV7k01s&K-E&EPt~5ey9b!ng(S;H|4V|Al5-D`(xRF1f$|alE>s8gc`p~vigx$M{<#pNBz8jSVnhO zF_vRV9$&+VMH!hShh#R)f~;!m3a_1shd7zN?*-&l09wBRuZ?>HohM8fWp%W&dr*z@ zMvIEM;i|LDNX}O-Kyt&DWu??u)p{eu`IL?x6?1^tnHD?SvW{v3V%rE^IlNbpe7Mt5 zZnhqoi=#)yyip0gvwB4$8-C#JPvQ5^~qGB#O?4#Y0CDE}q{6{>L zd1Aj$@^KM)Io@D|WS_oVe>Hwj=zIgow)2N~=r#(-2bu$zUAp(Ggi|<&EIlgbqQmkJ zx)MLKhjqdc;uuD5CV96SPnx4IW0^!$G`dglsj{H#|wQ zn-Tt$G2}5Me^6p@uxfP2TF&f=UeT(?-%B0Dh8@(d(E-6+jwLtk#I9JkypW zQ`J2C_-wGYl3oJfW0?+qNzyL6ET(FJG@zuuEY2%wSJYilto6tZohe0CUSO&M(tzUK zg1?yL<45*ws5|F;u_m*05?@rPIv_z%jQu$zKOgec2+Wec3&8WhgtkU*LQ8lgy&iz~+)I^VYF^L50BJz^8Gy*AU7PIp9OE*uMX)4U9mh8Z zKIw&9wD5+y{Dcw&aX}hR{QF9aZSDI_UBmJC8(r&P>PJ;u|fRe0{?y$=0 zUL8}T$fl%cHz#pzO;JLRBAHu@l*nCcGYTLx?IeA{Ze=dk_J0j+rEIr-+G$!lxOvU2 z5N1<^()N(-&+4vQfBRLi^-7X*#>Y zMaz=`Os?+kg16p7(muwGMoR-^4w$y;)uTC}qobu<-97polJc@7yUF=`1#dhx!8?Lv z*Ighr`{|Um^ayFwOp=wouuCU%7dqB(lHKk8(%M;il{{c)vTcL92fP4S|002ovPDHLkV1nE^QTqS@ literal 0 HcmV?d00001 diff --git a/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@3x.png b/OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@3x.png new file mode 100644 index 0000000000000000000000000000000000000000..06bb804643929afbfd622f232aceddd54e951c85 GIT binary patch literal 5365 zcmaJ_`8U*$_kN8DS;9LZSt5h7h6qU{F?O;H!elGP7DLvMB_Yd8GnVX;Ffx`Q`_Q7H z#9-`8cE(PYA$@)Rf$tCZ-d~=3?z#7zd(U&9S0+X}=a?@s0|0Oiu6xhyG-Cf}XBket z8szB=09-_tY?v|qDhsr9$8I8pC2&=nRnfeZcA<-c^c)cKN&yI!VStlS&D?(+^D zvEJuQY8kVb8O*T=`7dT$(h~V#i@An0SBim_q=Mm@%hnFFN7g)S=^?+eP?dx|mBq>1 zoyxP5a(lr&f7b{LSt#|mS7L1FY2a3^ilWL>fDY{RaAwbIy89L42UZol+r#X1ogU*n>esf z=feU_F(HR`pRN5v0inD^E}(VhAKYF=b2*lOIDiQ5J^}XOZB70RUz#8fS*O*&jn(lX zGsQOP2+`$I1!Uvj7n4Lrpn2#H$RqQF^__o{OQh(sAi&1B$pY*Nyna`eGzpnr1MGFS z6RIT2E!?{y`s4}D{9FQE<;)n61Z;AHQ;5i|*q5KSPm4i;T()UBz>7Qg(3ku8F%W)s z`UYqvy~yG8&c+Q{ytaNB7>6lMV6M$SSj>SxcXPSl?3FLcBn7|2Fr5hq)|Mr*Cmy*2 zj{)T%u0dW&s1&^Z%qBme13gG0CXDE5pD+^jfoxrJgl(`56B53yToK(oLH4WN04iaJ zEn!Vh`e|gSK3Nkmg7|DGJzyhT0rHG51^gojtxfR&2e8G#e%keaf4pUZXMj(?3DrNQ z2fFDD{xcskm>Sh*k%CihAHEq#Xl0%bM16%kyh}#9>w)WMr^U$|Ja3hrdpf-YLmgfQ zl)rtbhb%^=KY)yv1;^g>AeR5!IiI0^Z#$f?zRk@KTvvrwzf@e=|0nfKbOcq~aPM0y z`gA%rKtn%OBZ4LF7XZ*c=;Tc9~*Zfa2JMnD+6k`75t$l{A; z2W$U)yX*FwsJCZ~4Ra?+_TSdEHyi7{Le(D?w==z6;K+Xj9;e6C1}%6Pq~J{ud<$3~ zc}X>~NSeNmzgU|5?4?%V+=GqJ@KN)}Xd4**M3BgC^<&ug%SKA*b8S9-2MaJJWQv!j zB~|R5+8pFo%stOTCk0p9Cs=Qo1oK3Qr#6Q^ftH&alLdk2cdUKhu>HLVVjXmfO(QYu zN38K$3_aVNfFOMPG9$&|&>fTkOv-HV$3e=ZHvR;N6O{TFSij}tTgkBb#iAivK)KU{ zfF3^d1eBbm`XXO9N++Ge6%2_DrHqSq|M4`}AVYy{TjlwX)LM_RocmC0(PJR6FM{`Q zcr|URZsJ5~HnWHfO<{-Z^UcyV-NM8EazwN-TzCH3pa{2pU{uS*2(!*~mn)dNwE7wL z^27CxjYq7=Fr~~l(To`8(e~+rVoaUWp>w4r+F!ex+O#oxkisO6JaC`ga{L@lOz;mS-WZOrOXHu0ElDR z^HU82dQxsoI8a2!#!#s9!}1}4KLwqAP6T@V=^0CSq!m>9^lcyHcV_`M*hoiY;knLwxFoo0>z4P8za_UwM40V8H<88*c*|Uj z#N;P+HgJIGUl$lV#E0%!GcZ!BE~Xk}-{HdN?Z@n$!0ZAzEKV;~0nj76bhrl-Kscen z-5MZtxzY4&Q6*$J%DL3>*>2E4Kt{GgGiqG4pHdHJ+w;cJQ}`g!vm8=-NMjlyxj19( z?beP1M2(M_mnJ2zSsFItG-O;QXP-u`UkDqdquSEjH(5AnBr)ye>eK9iHZG6ZD`3*6 zW|vYT{J>JnsenQ!S?0LO?31oN!Kk;*oj&**rv<*N&(_@jP!V?>{vY(m4qTP>f!sL; zY!MU^3LAbmn>obLd^W&;){(w*^)+pC?|@t(77h=V2N?g$Y#N3Rznojt%XEO^(jkF* zc&&1tE9nL83tjORblo{?DQ&v6PpzW5pdbB+>3oPhXD~O(Q9MJFsK;>3R`4k#qWiM& zvQIM0I|iZ+-_c*VD%~_z2rg^b{$laxA}4HLHfPfWX^lF0+upMWJG|UpZ5OY2hBA5} z6>jZc-5W$eVqtuUg*RiBLc9|>0j#L2cz%Rp31dTDRL`W)e+vNswu#{XEy2p}1l+v= zw59)^<O_4*Xx zCuUx~EqTvus+2D6jp?4CHx6sP?sVkIMa)OoCwU4Mh`?PHn^R-myY5}I>OX00EP(`` zNRS*fPsWR!Ed;9_wK0VY`vSVb<%p~FyogkKSb@LDSjdW7(VfE8@vDtLWFp3Q7d4cr1OQj z#Fdo}fh;=-b7tHb8LC&PO|UN9!LMu5Kk=%p);Eb|B_Mm*UrrpG)a?~}!AVoYgIPNWqg_EXStUH^l-O=ho4>*#a6s#L_g0t?gC0Y-6gj4HW@ z4GP~|olV75!4t}(*9)uz)jQH6kl4>ey-DS%xT2rZgXH_l_7C3koMyRd413F#Dd*nI ziOF2j-IkffIq!UTa|zAQ)E6aIPZ%vJg*QRE`Eyg-p5N}=o!;p)M8j0RJTodiG69XE z2U!s>A%Uomns=s>h*|Rqu(vz3{C+*dDjrgJ@@DJ%M40X2LWdx~Hu*3t3nEEgPU@+< zg(ow+9(HqQNrO>G@k{hZA>{rjT{GvcKn4=7{5i?;iLjz`)-4EiG+8C{->A^`^6RNI)(NLVX1o zkrb67DD{`%u|=8|{_VEPhwflbQeOA(d}m3uQP)om4C2Un6&T6N37Z-F-9g)u;jr(a z!(H+-=s~4Hjvrk8>W~)`eiiPKJR|kz;m4XBZVst;~LOlK<9WIY? zllEO#9O?O^_`+S%&>dm+lGirNBwP5M%x(0qke{+5ZdFfYFhI9nTT1VU-*y9&=WZx( z-Cg*eSeBb+w{>pasNKi7r^{7; zSV_{7M(2t3*qP&B8`xP99y+8gI(f$^NMdTb*hCrQk1%qUWdsO=-=iU!Bc}b0&N7NKZxN+2Vp82r@>Yly3y(#~NA7ymw<*9G$5D-r+J$MK zKKH6;^F1tIbOI!E4o+%)sWqf7rc|Dx_?rslm^7977bDMO=k~Q}GImuSC}-m-{+#>1 zZriiF43_UWr#f{{sr<10FH!A6hqm-0i}R-|N)ZIE1ZcK$58qyi!V0*!L<$o1#wND{ z?pK%#qaOrf@rK|U-q(~TStK*Tpyu7!6{M(&BuQRLTW@e2S?QZpWij?7 z(;>=3XSyMppjAQkwI3re_Fn|TUg%jo>zgIQy)*-}KGKiFD?DS(R&2 zb6?VD^I+Xy>gqCd9?32h*cR0EU(V+92v)y3xdYqsi8Z+qkK{naR7~c7 z5)i@KB#l1t1t-rCgZTZ-ajPF8Sf_g~Yrf>T%lc%VT9$$yqRk5Dlq+jGS_epHFxN>3 z$18Wg4gypP(0XCDm($0lAqJMQdIpD4-uel_3OKC_rM=@x3HAEeOIwVJ#MFI`n|WIC zD<*8@btD05qLpDs^9{zbZ#$d35KC_2M)WxG1T;$Bwyt&bNO_gIj$KKTm7g|rF8_e( z>VxH|<1^eNxdI#~B4_AJ1usER#YUk!DJiR+S6urO;1h4_v>q@2V0fj=+4AGs58dZA z2b>>1m@wfmYFj^3EBNKUr$+gjV?H;zf7K&yFLy*Rxy?Y-lILDESQA3+A-EV)jPzm~ z#qzpApUqZ{uAm&6e~_p*9iL%`d)@ccAg5T^c}M~6Sn~%q!X(5TYGt>~Ih8%QGP@9# zQex4XXc&PZTKL}Ox0qyR@N8i-DStjQb`QBPESYuV_&Vs#PUrL4Q99D4fn;^YbQDvx zQ-c8yI7?IMkhBkxrx=AfCuS2f=Tt=RdaSl+4uDk9p2DhZc~;wC z#@7Tjwn3ZcH=&lJY{~ur<%Op8q-ylsS9<@l`1kzZmAIZ7Wv8d1UE`$nK^fzqiz0r< zmq^@IE7S``RGqBVj+{o3xq*xohx_Cnkkfx!^Nw)1&!HVm*9{y#DG=Id810piI{`{5ki?w4 zM0?>N%FCtBc%#j~On3B_XtYG;fC7AUv5Mt21osxA%uY;Z%g|JABt_KK-{XR}n%4zC zt^}F>q?EfJZLr}orTe;fO)4Er9@LorzNf&SGS{0_f7Q9G1fu-@TR1mGQbPR_UQGfX z0NG(yiry2>)bErln!uEcxGfePppA0R)AYdd#>+|H7_gtGo9lX&++ROUikr!=9I34m z&9*00yHRh2AaVVDWJoeIF4ME`{SPHKLA|52-JH2PVR%!?Dk@n}dAS6{{SGp%XMtFb zawo7thkkb11ixJ#J{o39uDb!R{+DO$vo|j#x^u_`GnTDDJb0c;tWJ-M@)_uT49AYc zDZ7M}Mlc(HTo~UYt=`;Nc#>bn0;T+@I{<)1wo8BiU_wPUIHpBoV=;Dn$|dW%TLxFICp=CVi!H<_-4<)QK+MWP6n{jC z2#)Z7u@~3E)Z9Ms%E_(pSW{ln@UHm&NG;65=~;aYS)Q)@5^V^wDM11Gw-yY*9;=$` zdYLmhr4*}&(ud*7aBBT2Ycs-)M8nn?rY$0eXSHa}@)LHWKS^%rrUzNcEz#MAE!{Grc#4=b$$HSZ%Eit5e$ifAhCH%GCFUHyZi^@qmHIxcPw@45PVW{!5{A)o^ol@2`$yMQUH=^G(V`%o^(wbsJ$u7U zs4X3WTBrMq#4{F-p$oqe)Stsw<+6&>RWI?o^G>mCK0>iZdy+qb)9DUtM4Et{MHovsNZs-wy4F2OpT)TI)LJun1a3; zf`c7B?R<;hYo>giD(@`;h9CXT-u*WZ^Sn&(d}0eloql_cmoGZE8db+mwC}BPInCbs z=S^Qx85p{_)W-H~8Fl&JUw#t12jA|G%>N01^#88_cznm-ND6r3H5hOzX993-qkA9k HB47L;kUw5x literal 0 HcmV?d00001 diff --git a/OrderScheduling/Common/WebView/WebViewController.swift b/OrderScheduling/Common/WebView/WebViewController.swift index 1e5a7c0..f75d222 100644 --- a/OrderScheduling/Common/WebView/WebViewController.swift +++ b/OrderScheduling/Common/WebView/WebViewController.swift @@ -115,6 +115,14 @@ class WebViewController : ZDViewController { } webView.configuration.userContentController.removeScriptMessageHandler(forName: "nativeObject") } + + override func dd_backActionPop(_ isAnimated: Bool) { + if webView.canGoBack == true { + webView.goBack() + }else{ + super.dd_backActionPop(isAnimated) + } + } } extension WebViewController : WKScriptMessageHandler { @@ -141,6 +149,11 @@ extension WebViewController : WKScriptMessageHandler { let vc = AdditionalPhotoController(userOrderId: Int(userOrderId) ?? 0, orderCode: orderCode, taskOrderId: Int(taskOrderId) ?? 0,canModify: canModify) navigationController?.pushViewController(vc, animated: true) } + }else if action == "goMonitoring" { + let params = dict?["params"] as? [String:Any] + let code = params?["code"] as? String + let vc = VehicleMonitorHistoryController(code: code) + navigationController?.pushViewController(vc, animated: true) } } } diff --git a/OrderScheduling/Common/WebView/WebViewTool.swift b/OrderScheduling/Common/WebView/WebViewTool.swift index 9dba6d2..f83930b 100644 --- a/OrderScheduling/Common/WebView/WebViewTool.swift +++ b/OrderScheduling/Common/WebView/WebViewTool.swift @@ -40,6 +40,8 @@ open class WebViewTool : NSObject { case invoiceListInfo = "开票信息" case indexList = "二手车信息" case reportIndex = "报备" + case vehicleAlarmDetail = "报警详情" + case vehicleAlarmList = "车辆报警" } public override init() { @@ -144,6 +146,12 @@ open class WebViewTool : NSObject { case .reportIndex: vc = WebViewController(showNavBar:true, title: WebViewNameEnum.reportIndex.rawValue, url: "\((h5Models?.reportIndex)!)?token=\((USER.token)!)"+(appending ?? "")) break + case .vehicleAlarmList: + vc = WebViewController(showNavBar:true, title: WebViewNameEnum.vehicleAlarmList.rawValue, url: "\((h5Models?.vehicleAlarmList)!)?token=\((USER.token)!)"+(appending ?? "")) + break + case .vehicleAlarmDetail: + vc = WebViewController(showNavBar:true, title: WebViewNameEnum.vehicleAlarmDetail.rawValue, url: "\((h5Models?.vehicleAlarmDetail)!)?token=\((USER.token)!)"+(appending ?? "")) + break } if let vc { diff --git a/OrderScheduling/HttpRequestCenter/ApiList.swift b/OrderScheduling/HttpRequestCenter/ApiList.swift index d3ea816..58d3cc2 100644 --- a/OrderScheduling/HttpRequestCenter/ApiList.swift +++ b/OrderScheduling/HttpRequestCenter/ApiList.swift @@ -34,7 +34,11 @@ open class ApiList { public let vehicleMonitorList = "/supplierAppV2/dispatchApp/order/vehicleMonitorList" - public let getRtspChannel = "/gps/thirdparty-vehicle-position/getRtspUrl" + public let getRealtimeUrl = "/gps/xq-video-monitor/getRealtimeUrl" + + public let getReplayUrl = "/gps/xq-video-monitor/getReplayUrl" + + public let closeHistoryControl = "/gps/xq-video-monitor/closeHistoryControl" public let orderPhotoList = "/supplierAppV2/dispatchApp/order/orderPhotoList" @@ -69,4 +73,8 @@ open class ApiList { public let getConfigByCode = "/base/baseConfig/getConfigByCode" public let thisWeekNumber = "/toc-user/car-admin/thisWeekNumber" + + public let alarmList = "/supplierAppV2/dispatchApp/alarm/alarmList" + + public let getAlarmByCode = "/supplierAppV2/dispatchApp/alarm/getAlarmByCode" } diff --git a/OrderScheduling/HttpRequestCenter/ParametersList.swift b/OrderScheduling/HttpRequestCenter/ParametersList.swift index 715a580..8641e38 100644 --- a/OrderScheduling/HttpRequestCenter/ParametersList.swift +++ b/OrderScheduling/HttpRequestCenter/ParametersList.swift @@ -181,6 +181,14 @@ public struct RtspChannelParameters : Encodable { var external : Int = 1 } +public struct GetVideoUrlParameters : Encodable { + var vehicleId : Int? + var simNumber : String? + var channel : Int? + var startDate : String? + var endDate : String? +} + public struct OrderPhotoListParameters : Encodable { var userOrderId : Int var orderCode : String @@ -234,3 +242,18 @@ public struct GiveUpUserOrderParameters : Encodable { public struct ConfigByCodeParameters : Encodable { var code : String } + +public struct AlarmListParameters : Encodable { + var pageNum : Int + var pageSize : Int = 50 + var orderBy : String = "create_time" + var supplierId : Int? + var handStatus : Int + public enum HandStatusEnum : Int { + case pending = 0,dealWithByTechnical,dealWithByOperations + } +} + +public struct GetAlarmByCodeParameters : Encodable { + var code : String? +} diff --git a/OrderScheduling/HttpRequestCenter/RequestList.swift b/OrderScheduling/HttpRequestCenter/RequestList.swift index 554aac8..c38f8f4 100644 --- a/OrderScheduling/HttpRequestCenter/RequestList.swift +++ b/OrderScheduling/HttpRequestCenter/RequestList.swift @@ -75,8 +75,16 @@ open class RequestList { return DDAF.post(urlString: HOST+API.vehicleMonitorList,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel.self) } - func getRtspChannel(prameters:P) -> Single?> { - return DDAF.post(urlString: HOST+API.getRtspChannel,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel<[String]>.self) + func getRealtimeUrl(prameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.getRealtimeUrl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel.self) + } + + func getReplayUrl(prameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.getReplayUrl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel.self) + } + + func closeHistoryControl(prameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.closeHistoryControl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel.self) } func orderPhotoList(prameters:P) -> Single?> { @@ -144,4 +152,12 @@ open class RequestList { func thisWeekNumber()-> Single?> { return DDAF.get(urlString: HOST+API.thisWeekNumber,encoding: URLEncodedFormParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel.self) } + + func alarmList(parameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.alarmList,parameters: parameters,encoding: JSONParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel<[AlarmListDataModel]>.self) + } + + func getAlarmByCode(parameters:P) -> Single?> { + return DDAF.post(urlString: HOST+API.getAlarmByCode,parameters: parameters,encoding: URLEncodedFormParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel.self) + } } diff --git a/OrderScheduling/HttpResponseModel/ResponseModel.swift b/OrderScheduling/HttpResponseModel/ResponseModel.swift index f963123..8a3567d 100644 --- a/OrderScheduling/HttpResponseModel/ResponseModel.swift +++ b/OrderScheduling/HttpResponseModel/ResponseModel.swift @@ -17,6 +17,10 @@ class ResponseModel : Decodable { var total : Int? } +struct CommonError : Error { + +} + class LoginDataModel : Decodable { var accessToken : LoginDataAccessTokenModel var refreshToken : LoginDataRefreshTokenModel @@ -193,7 +197,7 @@ public class VehicleMonitorListDataModel : Decodable { var taskList : [TaskModel]? var isSelected : Bool? = false var zIndex : Int? = 0 - + var number : String? public enum TerminalTypeEnum : String,Decodable { case APP = "APP" case GPS = "GPS" @@ -267,6 +271,8 @@ public class DispatchAppH5UrlDataModel : Decodable { var invoiceListInfo : String var indexList : String var reportIndex : String + var vehicleAlarmList : String + var vehicleAlarmDetail : String } public class VersionCheckDataModel : Decodable { @@ -370,3 +376,22 @@ public class JumpPageDataModel : Decodable { var url : String? var content : String? } + +public class AlarmListDataModel : Decodable { + var vehicleName : String? + var alarmTypeString : String? + var code : String? +} + +public class GetAlarmByCodeDataModel : Decodable { + var imei : String? + var channel : Int? + var startTime : String? + var endTime : String? + var vehicleId : Int? +} + +public class GetVideoUrlDataModel : Decodable { + var channelList : [String]? + var realtimeList : [String]? +} diff --git a/OrderScheduling/Main/OrderScheduling-Bridging-Header.h b/OrderScheduling/Main/OrderScheduling-Bridging-Header.h index 7e3f1db..fa69a6a 100644 --- a/OrderScheduling/Main/OrderScheduling-Bridging-Header.h +++ b/OrderScheduling/Main/OrderScheduling-Bridging-Header.h @@ -9,3 +9,5 @@ # ifdef NSFoundationVersionNumber_iOS_9_x_Max # import # endif + +# import "VideoPlayView.h" diff --git a/OrderScheduling/Rescue/View/AcceptOrderTool.swift b/OrderScheduling/Rescue/View/AcceptOrderTool.swift index b965d9c..c95284f 100644 --- a/OrderScheduling/Rescue/View/AcceptOrderTool.swift +++ b/OrderScheduling/Rescue/View/AcceptOrderTool.swift @@ -30,11 +30,13 @@ open class AcceptOrderTool : NSObject { // 来到首页的救援中-待接单 let tabBarVc = UIApplication.shared.dd_keyWindow.rootViewController as? MainTabBarController let currentNav = tabBarVc?.selectedViewController as? UINavigationController - currentNav?.popToRootViewController(animated: false) - tabBarVc?.selectedIndex = 0 - let nav = tabBarVc?.children.first as? UINavigationController - let vc = nav?.children.first as? RescueController - vc?.categoryView.selectItem(at: 0) + currentNav?.popToRootViewController(animated: true) + DispatchQueue.main.asyncAfter(deadline: .now()+0.25, execute: { + tabBarVc?.selectedIndex = 0 + let nav = tabBarVc?.children.first as? UINavigationController + let vc = nav?.children.first as? RescueController + vc?.categoryView.selectItem(at: 0) + }) } } }) diff --git a/OrderScheduling/VehicleMonitoring/.DS_Store b/OrderScheduling/VehicleMonitoring/.DS_Store index d48252ab8ffe1be174fa078993b2fe9e5213f687..60042e7e036cdb291e36900534e1ef99d4b29b31 100644 GIT binary patch delta 45 zcmZoMXfc@J&nUPtU^g?P;AS3{V5Z6XY_dFM!9{sF`FZIK3=E7L3+ov-vvd6A2LLMj B4E_KB delta 28 kcmZoMXfc@J&nU1lU^g?Pz-AtnV5W^_D;PJkbNuB80Dw*i-v9sr diff --git a/OrderScheduling/VehicleMonitoring/View/VerticalLoopScrollLabel.swift b/OrderScheduling/VehicleMonitoring/View/VerticalLoopScrollLabel.swift new file mode 100644 index 0000000..6427661 --- /dev/null +++ b/OrderScheduling/VehicleMonitoring/View/VerticalLoopScrollLabel.swift @@ -0,0 +1,93 @@ +// +// VerticalLoopScrollLabel.swift +// OrderScheduling +// +// Created by 中道 on 2025/7/28. +// + +import UIKit + +/// 可点击的纵向轮播标签控件,支持点击获取当前 index +class VerticalLoopScrollLabel: UIView { + private let scrollView = UIScrollView() + private let label1 = UILabel() + private let label2 = UILabel() + private var timer: Timer? + + /// 点击回调,返回当前索引 + var onTap: ((Int) -> Void)? + + var items: [String] = [] { + didSet { + guard !items.isEmpty else { return } + currentIdx = 0 + label1.text = items.first + label2.text = items.count > 1 ? items[1] : items.first + setNeedsLayout() + startLoop() + } + } + + var interval: TimeInterval = 2.5 + private var currentIdx = 0 + private var isLabel1OnTop = true + + override init(frame: CGRect) { + super.init(frame: frame) + clipsToBounds = true + scrollView.isScrollEnabled = false + addSubview(scrollView) + + let tap = UITapGestureRecognizer(target: self, action: #selector(handleTap)) + addGestureRecognizer(tap) + + for label in [label1, label2] { + label.font = .systemFont(ofSize: 16, weight: .semibold) + label.textColor = .white + label.textAlignment = .left + label.numberOfLines = 1 + label.lineBreakMode = .byTruncatingTail + scrollView.addSubview(label) + } + } + required init?(coder: NSCoder) { fatalError() } + + override func layoutSubviews() { + super.layoutSubviews() + scrollView.frame = bounds + label1.frame = CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height) + label2.frame = CGRect(x: 0, y: bounds.height, width: bounds.width, height: bounds.height) + scrollView.contentSize = CGSize(width: bounds.width, height: bounds.height * 2) + } + + private func startLoop() { + timer?.invalidate() + guard items.count > 1 else { return } + timer = Timer.scheduledTimer(withTimeInterval: interval, repeats: true, block: { [weak self] _ in + self?.scrollNext() + }) + } + + private func scrollNext() { + let fromLabel = isLabel1OnTop ? label1 : label2 + let toLabel = isLabel1OnTop ? label2 : label1 + let nextIdx = (currentIdx + 1) % items.count + toLabel.text = items[nextIdx] + // 动画向上滚动 + UIView.animate(withDuration: 0.35, animations: { + self.scrollView.contentOffset = CGPoint(x: 0, y: self.bounds.height) + }, completion: { _ in + // 滚动完后交换内容和位置 + self.scrollView.contentOffset = .zero + fromLabel.text = toLabel.text + self.currentIdx = nextIdx + self.isLabel1OnTop.toggle() + }) + } + + @objc private func handleTap() { + onTap?(currentIdx) + } + + deinit { timer?.invalidate() } +} diff --git a/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitorHistoryController.swift b/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitorHistoryController.swift new file mode 100644 index 0000000..32478d6 --- /dev/null +++ b/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitorHistoryController.swift @@ -0,0 +1,467 @@ +// +// VehicleMonitorHistoryController.swift +// OrderScheduling +// +// Created by 中道 on 2025/7/31. +// + +import UIKit +import DDAutoUIKit_Private +import SnapKit +import RxSwift +import RxCocoa +import BRPickerView + +class VehicleMonitorHistoryController : ZDViewController { + var code : String? + let timeline = TimelineView() + let dateView = DateSwitcherView() + let realtimeButton = UIButton() + let disposeBag = DisposeBag() + var fromDateString : String? + var toDateString : String? + var alarmResponse : ResponseModel? + var refreshSub = ReplaySubject.create(bufferSize: 1) + var replaySub = ReplaySubject?>.create(bufferSize: 1) + var videoView = VideoPlayView() + var closeSub = ReplaySubject?>.create(bufferSize: 1) + init(code: String?) { + self.code = code + super.init(nibName: nil, bundle: nil) + } + + @MainActor required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + NotificationCenter.default.removeObserver(self) + } + + override func viewDidLoad() { + super.viewDidLoad() + dd_navigationItemTitle = "监控回放" + dd_navigationBarBackgroundColor = .hex("354683") + dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)] + dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white] + + // 回调当前选中时间 + timeline.onTimeSelected = {[weak self] hour, minute in + self?.fromDateString = self?.dateView.getDateString()?.appending(" \(hour):\(minute):00") + if let fromDateString = self?.fromDateString,let dateFormatter = self?.timeline.dateFormatter { + let toDate = Date(timeIntervalSince1970: ((NSDate.br_date(from: fromDateString, dateFormat: dateFormatter)?.timeIntervalSince1970 ?? 0) + 600)) + self?.toDateString = NSDate.br_string(from: toDate, dateFormat: dateFormatter) + + }else{ + self?.toDateString = self?.fromDateString + } + + if let alarmResponse = self?.alarmResponse { + self?.replaySub.onNext(alarmResponse) + } + } + + realtimeButton.setTitleColor(.white, for: .normal) + realtimeButton.titleLabel?.font = .dd_systemFont(ofSize: 16, weight: .semibold) + realtimeButton.backgroundColor = .dd_hex(light: "0273EE", dark: "0273EE") + realtimeButton.setImage(UIImage(named: "vehicleMonitoring_history_icon"), for: .normal) + realtimeButton.layer.cornerRadius = 6 + realtimeButton.setTitle("实时监控", for: .normal) + realtimeButton.dd_customize(with: .ImageLeftPaddingTitleRightWithWholeCenter, padding: 10) + realtimeButton.rx.tap + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] _ in + self?.videoView.endShow() + self?.closeSub.onNext(self?.alarmResponse) + self?.navigationController?.pushViewController(VehicleMonitorVideoController(vehicleId: self?.alarmResponse?.data?.vehicleId,simNumber: self?.alarmResponse?.data?.imei), animated: true) + }) + .disposed(by: disposeBag) + + refreshSub + .observe(on: ConcurrentMainScheduler.instance) + .do(onNext: {[weak self] response in + self?.view.dd_showHUD() + }) + .flatMapLatest { code in + return RQ.getAlarmByCode(parameters: GetAlarmByCodeParameters(code: code)) + .flatMap { response in + return Single.create { single in + if response?.success == true { + single(.success(response)) + }else{ + single(.failure(CommonError())) + } + return Disposables.create() + } + } + } + .retry(when: { (rxError: Observable) -> Observable in + return rxError.flatMap({ error in + return Observable.timer(RxTimeInterval.seconds(5), scheduler: MainScheduler.asyncInstance) + }) + }) + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] response in + self?.initUI(response: response) + }) + .disposed(by: disposeBag) + + replaySub + .observe(on: ConcurrentMainScheduler.instance) + .do(onNext: {[weak self] response in + self?.view.dd_showHUD() + }) + .flatMap({[weak self] response in + return RQ.getReplayUrl(prameters: GetVideoUrlParameters(vehicleId: response?.data?.vehicleId, simNumber: response?.data?.imei, channel: response?.data?.channel, startDate: self?.fromDateString, endDate: self?.toDateString)) + }) + .observe(on: ConcurrentMainScheduler.instance) + .do(onNext: {[weak self] response in + self?.view.dd_hideHUD() + }) + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] response in + if response?.success == true { + if let first = response?.data?.realtimeList?.first { + self?.videoView.wsUrl = first + if self?.videoView.isPlaying == true { + self?.videoView.beginShow() + }else{ + } + } + }else{ + self?.view.dd_makeToast(response?.msg) + } + }).disposed(by: disposeBag) + + closeSub + .flatMapLatest { response in + return RQ.closeHistoryControl(prameters: GetVideoUrlParameters(simNumber: response?.data?.imei,channel:response?.data?.channel)) + } + .subscribe(onNext: { response in + }) + .disposed(by: disposeBag) + + + dateView.prevButton.rx.tap + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] _ in + if let date = self?.getPreDate(),let dateFormat = self?.dateView.dateFormatter { + self?.dateView.setDate(dateString: NSDate.br_string(from: date, dateFormat: dateFormat)) + } + }) + .disposed(by: disposeBag) + + dateView.nextButton.rx.tap + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: {[weak self] _ in + if let date = self?.getNextDate(),let dateFormat = self?.dateView.dateFormatter { + self?.dateView.setDate(dateString: NSDate.br_string(from: date, dateFormat: dateFormat)) + } + }) + .disposed(by: disposeBag) + + dateView.dateButton.rx.tap + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: { _ in + let picker = BRDatePickerView(pickerMode: .YMD) + picker.show() + }) + .disposed(by: disposeBag) + + NotificationCenter.default.rx + .notification(UIApplication.didEnterBackgroundNotification) + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: { [weak self] _ in + self?.videoView.endShow() + self?.closeSub.onNext(self?.alarmResponse) + }) + .disposed(by: disposeBag) + } + + override func viewWillLayoutSubviews() { + super.viewWillLayoutSubviews() + if timeline.superview == nil { + view.addSubview(videoView) + videoView.snp.makeConstraints { make in + make.top.equalTo(view.snp.top).offset(view.safeAreaInsets.top) + make.width.equalTo(auto(375)) + make.height.equalTo(auto(300)) + make.centerX.equalToSuperview() + } + + view.addSubview(dateView) + dateView.snp.makeConstraints { make in + make.centerX.equalToSuperview() + make.top.equalTo(videoView.snp.bottom).offset(20) + make.height.equalTo(64) + make.width.equalTo(200) + } + + view.addSubview(timeline) + timeline.snp.makeConstraints { make in + make.left.right.equalToSuperview() + make.height.equalTo(100) + make.top.equalTo(dateView.snp.bottom).offset(20) + } + + view.addSubview(realtimeButton) + realtimeButton.snp.makeConstraints { make in + make.left.right.equalToSuperview().inset(30) + make.height.equalTo(48) + make.top.equalTo(timeline.snp.bottom).offset(20) + } + + refreshSub.onNext(code) + } + } + + func initUI(response: ResponseModel?) { + alarmResponse = response + if let startTime = response?.data?.startTime { + dateView.setDate(dateString: startTime.components(separatedBy: " ").first) + let currentDate = (NSDate.br_date(from: startTime, dateFormat: timeline.dateFormatter) as? NSDate) ?? NSDate() + timeline.scrollToHour(hour: currentDate.br_hour,minute: currentDate.br_minute) + } + } + + func getPreDate() -> Date? { + if let dateString = dateView.getDateString() { + let date = NSDate.br_date(from: dateString, dateFormat: dateView.dateFormatter) as? NSDate + return date?.br_getNewDate(toDays: -1) + } + return Date() + } + + func getNextDate() -> Date? { + if let dateString = dateView.getDateString(){ + let date = NSDate.br_date(from: dateString, dateFormat: dateView.dateFormatter) as? NSDate + return date?.br_getNewDate(toDays: 1) + } + return Date() + } + + override func dd_backActionPop(_ isAnimated: Bool) { + super.dd_backActionPop(isAnimated) + closeSub.onNext(alarmResponse) + } +} + +class DateSwitcherView: UIView { + let dateFormatter = "yyyy-MM-dd" + + // MARK: - UI + let prevButton: UIButton = { + let btn = UIButton(type: .system) + btn.setImage(UIImage(systemName: "chevron.left"), for: .normal) + btn.tintColor = .darkGray + return btn + }() + + let nextButton: UIButton = { + let btn = UIButton(type: .system) + btn.setImage(UIImage(systemName: "chevron.right"), for: .normal) + btn.tintColor = .darkGray + return btn + }() + + let dateButton: UIButton = { + let btn = UIButton(type: .system) + btn.titleLabel?.font = .boldSystemFont(ofSize: 18) + btn.setTitleColor(.black, for: .normal) + btn.titleLabel?.textAlignment = .center + btn.backgroundColor = .clear + return btn + }() + + // MARK: - Init + override init(frame: CGRect) { + super.init(frame: frame) + setupUI() + } + required init?(coder: NSCoder) { + super.init(coder: coder) + setupUI() + } + + private func setupUI() { + + addSubview(prevButton) + addSubview(dateButton) + addSubview(nextButton) + + prevButton.snp.makeConstraints { make in + make.left.equalToSuperview().offset(18) + make.centerY.equalToSuperview() + make.width.height.equalTo(28) + } + + nextButton.snp.makeConstraints { make in + make.right.equalToSuperview().inset(18) + make.centerY.equalToSuperview() + make.width.height.equalTo(28) + } + + dateButton.snp.makeConstraints { make in + make.center.equalToSuperview() + } + } + + func setDate(dateString: String?) { + dateButton.setTitle(dateString, for: .normal) + } + + func getDateString() -> String? { + return dateButton.titleLabel?.text + } + +} + +class TimelineView: UIView, UIScrollViewDelegate { + var dateFormatter = "yyyy-MM-dd HH:mm:ss" + var onTimeSelected: ((String, String) -> Void)? + var selectHour : Int = 0 + var selectMinute : Int = 0 + private let scrollView = UIScrollView() + private let contentView = UIView() + private let indicatorView = UIView() + private let quarterWidth: CGFloat = 36 // 每15分钟宽度(1小时=4*36=144) + private let longTickHeight: CGFloat = 10 // 整点、半点高度 + private let shortTickHeight: CGFloat = 5 // 15/45分高度 + private let labelHeight: CGFloat = 18 + private let totalHours = 24 + + override init(frame: CGRect) { + super.init(frame: frame) + setupUI() + } + required init?(coder: NSCoder) { + super.init(coder: coder) + setupUI() + } + + private func setupUI() { + addSubview(scrollView) + scrollView.showsHorizontalScrollIndicator = false + scrollView.delegate = self + scrollView.snp.makeConstraints { $0.edges.equalToSuperview() } + scrollView.addSubview(contentView) + + indicatorView.backgroundColor = .gray + addSubview(indicatorView) + indicatorView.snp.makeConstraints { + $0.centerX.equalToSuperview() + $0.width.equalTo(2) + $0.top.bottom.equalToSuperview() + } + } + + override func layoutSubviews() { + super.layoutSubviews() + let contentWidth = CGFloat(totalHours) * 4 * quarterWidth + contentView.frame = CGRect(x: 0, y: 0, width: contentWidth, height: bounds.height) + scrollView.contentSize = contentView.bounds.size + let inset = bounds.width / 2 + scrollView.contentInset = UIEdgeInsets(top: 0, left: inset, bottom: 0, right: inset) + layoutTicks() + } + + private func layoutTicks() { + contentView.subviews.forEach { $0.removeFromSuperview() } + for hour in 0...create(bufferSize: 1) + private let disposeBag = DisposeBag() + private var videos : [String] = [] + var channels : [Int] = [] + var channel : Int? + var closeSub = ReplaySubject.create(bufferSize: 1) + var videoView = VideoPlayView() + public init(vehicleId:Int?,simNumber: String?) { + self.vehicleId = vehicleId + self.simNumber = simNumber + self.vehicleMonitorVideoView = VehicleMonitorVideoView() + super.init(nibName: nil, bundle: nil) + } + + public required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + NotificationCenter.default.removeObserver(self) + } + + open override func viewDidLoad() { + super.viewDidLoad() + dd_navigationItemTitle = "实时监控" + dd_navigationBarBackgroundColor = .hex("354683") + dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)] + dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white] + + vehicleMonitorVideoView.categoryView.delegate = self + reloadRelay .filter({[weak self] _ in - return self?.vehicleId != nil + return self?.vehicleId != nil && self?.simNumber != nil }) .observe(on: MainScheduler.instance) .do(onNext: {[weak self] _ in self?.view.dd_showHUD() }) .flatMapLatest {[weak self] _ in - return RQ.getRtspChannel(prameters: RtspChannelParameters(vehicleId: (self?.vehicleId)!)) + return RQ.getRealtimeUrl(prameters: GetVideoUrlParameters(vehicleId: (self?.vehicleId)!,simNumber: (self?.simNumber)!)) } .observe(on: MainScheduler.instance) .do(onNext: {[weak self] _ in @@ -37,12 +84,15 @@ extension VehicleMonitorVideoController { .subscribe(onNext: {[weak self] response in if response?.success == true { var channels : [String] = [] - for index in 0..<(response?.data?.count ?? 0) { - channels.append("通道"+"\(index + 1)") + self?.channels.removeAll() + for index in 0..<(response?.data?.channelList?.count ?? 0) { + let channelN = response?.data?.channelList?[index] ?? "" + channels.append("通道"+"\(channelN)") + self?.channels.append(Int(channelN) ?? 1) } if let data = response?.data { self?.videos.removeAll() - self?.videos.append(contentsOf: data) + self?.videos.append(contentsOf: data.realtimeList ?? []) } self?.vehicleMonitorVideoView.categoryView.titles = channels self?.vehicleMonitorVideoView.categoryView.reloadData() @@ -54,69 +104,52 @@ extension VehicleMonitorVideoController { }) .disposed(by: disposeBag) - reloadRelay.accept(nil) - } -} - -extension VehicleMonitorVideoController : JXCategoryViewDelegate { - public func categoryView(_ categoryView: JXCategoryBaseView!, didSelectedItemAt index: Int) { + closeSub + .flatMapLatest {[weak self] response in + return RQ.closeHistoryControl(prameters: GetVideoUrlParameters(simNumber: self?.simNumber,channel:self?.channel)) + } + .subscribe(onNext: { response in + }) + .disposed(by: disposeBag) - let vc = children.first as? VehicleMonitoringVideoDetailController - vc?.playAssetURL(assetURL: URL(string: videos[index])!) + NotificationCenter.default.rx + .notification(UIApplication.didEnterBackgroundNotification) + .observe(on: ConcurrentMainScheduler.instance) + .subscribe(onNext: { [weak self] _ in + self?.videoView.endShow() + self?.closeSub.onNext(nil) + }) + .disposed(by: disposeBag) } -} -open class VehicleMonitorVideoController : ZDViewController { - private let vehicleId : Int? - private let vehicleMonitorVideoView : VehicleMonitorVideoView - private let reloadRelay = ReplayRelay.create(bufferSize: 1) - private let disposeBag = DisposeBag() - private var videos : [String] = [] - - public init(vehicleId:Int?) { - self.vehicleId = vehicleId - self.vehicleMonitorVideoView = VehicleMonitorVideoView() - super.init(nibName: nil, bundle: nil) - } - - public required init?(coder: NSCoder) { - fatalError("init(coder:) has not been implemented") - } - - open override func viewDidLoad() { - super.viewDidLoad() - dd_navigationItemTitle = "视频监控" - dd_navigationBarBackgroundColor = .hex("354683") - dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)] - dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white] - dd_backBarButtonItem?.tintColor = .hex("000000") - - vehicleMonitorVideoView.categoryView.delegate = self - view.addSubview(vehicleMonitorVideoView) - vehicleMonitorVideoView.snp.makeConstraints { make in - make.top.equalToSuperview().offset(CGRectGetHeight(UIApplication.shared.dd_statusBarFrame)+CGRectGetHeight(navigationController?.navigationBar.frame ?? .zero)) - make.left.right.bottom.equalToSuperview() + open override func viewWillLayoutSubviews() { + super.viewWillLayoutSubviews() + if vehicleMonitorVideoView.superview == nil { + view.addSubview(vehicleMonitorVideoView) + vehicleMonitorVideoView.snp.makeConstraints { make in + make.top.equalToSuperview().offset(view.safeAreaInsets.top) + make.left.right.bottom.equalToSuperview() + } + + vehicleMonitorVideoView.addSubview(videoView) + videoView.snp.makeConstraints { make in + make.top.equalTo(vehicleMonitorVideoView.categoryView.snp.bottom).offset(auto(10)) + make.width.equalTo(auto(375)) + make.height.equalTo(auto(300)) + make.centerX.equalToSuperview() + } + reloadRelay.accept(nil) } - - let videoDetailVc = VehicleMonitoringVideoDetailController(assetURL: nil) - videoDetailVc.dd_navigationBarBackgroundColor = .white - videoDetailVc.dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.hex("000000"),.font:UIFont.mediumFont(17)] - videoDetailVc.dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.hex("000000")] - - addChild(videoDetailVc) - vehicleMonitorVideoView.addSubview(videoDetailVc.view) - videoDetailVc.view.snp.makeConstraints { make in - make.top.equalTo(vehicleMonitorVideoView.categoryView.snp.bottom).offset(auto(10)) - make.width.equalTo(auto(375)) - make.height.equalTo(auto(300)) - make.centerX.equalToSuperview() - } - - addActions() } - + + open override func dd_backActionPop(_ isAnimated: Bool) { + super.dd_backActionPop(isAnimated) + videoView.endShow() + closeSub.onNext(nil) + } + open override var preferredStatusBarStyle: UIStatusBarStyle { - return .default + return .lightContent } } diff --git a/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitoringController.swift b/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitoringController.swift index 78d2a6d..011f60b 100644 --- a/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitoringController.swift +++ b/OrderScheduling/VehicleMonitoring/ViewController/VehicleMonitoringController.swift @@ -51,15 +51,22 @@ extension VehicleMonitoringController { .do(onNext: {[weak self] _ in self?.view.dd_showHUD() }) - .flatMapLatest { _ in - return Single.zip(RQ.vehicleMonitorList(),RQ.generalInfo()) + .flatMapLatest {[weak self] _ in + guard let self = self else { + return Single.just(( + nil as ResponseModel?, + nil as ResponseModel?, + [] as [AlarmListDataModel] + )) + } + return Single.zip(RQ.vehicleMonitorList(),RQ.generalInfo(),self.getAllAlarmList(pageNum: 1, alarmList: [])) } .observe(on: MainScheduler.instance) - .do(onNext: {[weak self] _,_ in + .do(onNext: {[weak self] _,_,_ in self?.view.dd_hideHUD() }) .observe(on: MainScheduler.instance) - .subscribe(onNext: {[weak self] response,generalInfo in + .subscribe(onNext: {[weak self] response,generalInfo,alarmList in if generalInfo?.success == true { /// 如果list列表数量为0的话就显示当前位置 if let lat = generalInfo?.data?.addressLat,let lon = generalInfo?.data?.addressLon { @@ -109,6 +116,22 @@ extension VehicleMonitoringController { self?.vehicleMonitoringView.vehicleMonitoringPannelView.categoryView.reloadData() } + if alarmList.count > 0 { + self?.alarmList = alarmList + + var items : [String] = [] + for i in 0.. Single<[AlarmListDataModel]> { + func recursive(pageNum: Int,alarmList: [AlarmListDataModel]) -> Single<[AlarmListDataModel]> { + return RQ.alarmList(parameters: AlarmListParameters(pageNum: pageNum, supplierId: USER.supplierId,handStatus: AlarmListParameters.HandStatusEnum.pending.rawValue)) + .flatMap { response in + if (response?.data?.count ?? 0) == 0 { + return Single.create { single in + single(.success(alarmList)) + return Disposables.create() + } + } + let addAlarmList = alarmList + (response?.data ?? []) + return recursive(pageNum: pageNum + 1, alarmList: addAlarmList) + } + } + + return recursive(pageNum: 1, alarmList: []) + } } extension VehicleMonitoringController : DDMAMapViewDelegate { @@ -721,6 +764,13 @@ extension VehicleMonitoringController { vehicleMonitoringListDetailView.updateData(taskModels: vehicleModel.taskList ?? []) + /// 视频按钮显示规则 + if USER.supplierType == 1 && vehicleModel.terminalType == VehicleMonitorListDataModel.ItemModel.TerminalTypeEnum.GPS.rawValue { + vehicleMonitoringListDetailView.videoButton.isHidden = false + }else{ + vehicleMonitoringListDetailView.videoButton.isHidden = true + } + /// 当为max时收回pannelView if pannelPanGes.panGesValue.expandLevel == .max { previousStateOfPannelView = .max @@ -833,6 +883,7 @@ open class VehicleMonitoringController : ZDViewController { private var vehicleLogoutModel : VehicleMonitorListDataModel.ItemModel? private var vehicleLogoutRelay = ReplayRelay.create(bufferSize: 1) + private var alarmList : [AlarmListDataModel] = [] private let disposeBag = DisposeBag() open override func viewDidLoad() { @@ -990,16 +1041,29 @@ open class VehicleMonitoringView : DDView { public let coverView : DDView public let tapGes : UITapGestureRecognizer public let panGes : UIPanGestureRecognizer + public let offlineView : VehicleDeviceOffLineView + public let alarmView : VehicleAlarmView public init(titles: [String]) { vehicleMonitoringPannelView = VehicleMonitoringPannelView(titles:titles) maMapView = DDMAMapView() coverView = DDView() tapGes = UITapGestureRecognizer() panGes = UIPanGestureRecognizer() + offlineView = VehicleDeviceOffLineView() + alarmView = VehicleAlarmView() super.init(frame: .zero) maMapView.maMapView.isRotateCameraEnabled = false addSubview(maMapView) + + offlineView.offlineIconImageView.image = UIImage(named: "vehicleMonitoring_offline_icon") + offlineView.backgroundColor = .dd_hex(light: "FB8958", dark: "FB8958") + offlineView.layer.cornerRadius = 6 + offlineView.isHidden = true + addSubview(offlineView) + alarmView.imageView.image = UIImage(named: "vehicleMonitoring_alarm") + alarmView.isHidden = true + addSubview(alarmView) coverView.addGestureRecognizer(tapGes) coverView.addGestureRecognizer(panGes) coverView.isHidden = true @@ -1009,6 +1073,19 @@ open class VehicleMonitoringView : DDView { coverView.snp.makeConstraints { make in make.edges.equalToSuperview() } + + alarmView.snp.makeConstraints { make in + make.top.equalToSuperview().offset(10) + make.right.equalToSuperview().offset(-10) + make.width.height.equalTo(50) + } + + offlineView.snp.makeConstraints { make in + make.centerY.equalTo(alarmView) + make.centerX.equalToSuperview() + make.height.equalTo(35) + make.width.equalTo(200) + } } required public init?(coder: NSCoder) { @@ -1016,6 +1093,70 @@ open class VehicleMonitoringView : DDView { } } +open class VehicleDeviceOffLineView : DDView { + let offlineIconImageView : DDImageView + let offLineLabel : VerticalLoopScrollLabel + public override init(frame: CGRect) { + offlineIconImageView = DDImageView() + offLineLabel = VerticalLoopScrollLabel() + super.init(frame: frame) + addSubview(offlineIconImageView) + addSubview(offLineLabel) + + offlineIconImageView.snp.makeConstraints { make in + make.left.equalTo(15) + make.centerY.equalToSuperview() + } + + offLineLabel.snp.makeConstraints { make in + make.left.equalTo(offlineIconImageView.snp.right).offset(10) + make.right.equalToSuperview().offset(-10) + make.top.bottom.equalToSuperview().inset(8) + } + + } + + @MainActor required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } +} + +open class VehicleAlarmView : DDView { + public let imageView : DDImageView + public let count : DDLabel + + public override init(frame: CGRect) { + self.imageView = DDImageView() + self.count = DDLabel() + super.init(frame: frame) + + addSubview(imageView) + count.layer.cornerRadius = 1 + count.layer.borderColor = UIColor.dd_hex(light: "FFFFFF", dark: "FFFFFF").cgColor + count.layer.borderWidth = 0.8 + count.layer.masksToBounds = true + count.backgroundColor = .dd_hex(light: "F93D3D", dark: "F93D3D") + count.textColor = .dd_hex(light: "FFFFFF", dark: "FFFFFF") + count.font = .dd_systemFont(ofSize: 12, weight: .semibold) + addSubview(count) + + imageView.snp.makeConstraints { make in + make.centerX.centerY.equalToSuperview() + make.width.height.lessThanOrEqualToSuperview() + } + + count.snp.makeConstraints { make in + make.right.equalTo(imageView.snp.right) + make.top.equalTo(imageView.snp.top) + } + + } + + @MainActor required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } +} + open class VehicleMonitoringPannelView : DDView { public let radiusView : DDView public let categoryView : JXCategoryNumberView @@ -1212,6 +1353,7 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg public let stateLabel : DDLabel public let vehicleLabel : DDLabel public let settingButton : DDButton + public let videoButton : DDButton public let nameLabel : DDLabel public let callButton : DDButton public let containerView : DDView @@ -1235,6 +1377,8 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg vehicleLabel = DDLabel.dd_init(withText: "", font: .mediumFont(auto(14)), textColor: .hex("11142F")) settingButton = DDButton.dd_initCustom() settingButton.setBackgroundImage(UIImage(named: "vehicleMonitoring_setting"), for: .normal) + videoButton = DDButton.dd_initCustom() + videoButton.setBackgroundImage(UIImage(named: "vehicleMonitoring_video_icon"), for: .normal) nameLabel = DDLabel.dd_init(withText: "", font: .regularFont(auto(14)), textColor: .hex("11142F")) callButton = DDButton.dd_initCustom() callButton.setBackgroundImage(UIImage(named: "vehicleMonitor_call_cell"), for: .normal) @@ -1258,6 +1402,7 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg icon.addSubview(stateLabel) addSubview(vehicleLabel) addSubview(settingButton) + addSubview(videoButton) addSubview(nameLabel) addSubview(callButton) @@ -1339,12 +1484,19 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg } settingButton.snp.makeConstraints { make in - make.left.equalTo(vehicleLabel.snp.right).offset(auto(2.5)) + make.left.equalTo(vehicleLabel.snp.right).offset(auto(5)) make.centerY.equalTo(icon) make.width.equalTo(auto(16)) make.height.equalTo(auto(14)) } + videoButton.snp.makeConstraints { make in + make.left.equalTo(settingButton.snp.right).offset(auto(10)) + make.centerY.equalTo(icon) + make.width.equalTo(auto(23)) + make.height.equalTo(auto(13)) + } + callButton.snp.makeConstraints { make in make.right.equalTo(-auto(20)) make.centerY.equalTo(backButton) diff --git a/OrderScheduling/Video/Video/view/VideoPlayView.h b/OrderScheduling/Video/Video/view/VideoPlayView.h new file mode 100644 index 0000000..8a6b6bf --- /dev/null +++ b/OrderScheduling/Video/Video/view/VideoPlayView.h @@ -0,0 +1,31 @@ +// +// WWVideoReplayView.h +// wanwayInternet +//made in zhongdao Copyright © 2020 liuchao. All rights reserved. +// + +#import +#import "AAPLEAGLLayer.h" + +NS_ASSUME_NONNULL_BEGIN + +UIKIT_EXTERN NSString *WWCarVideoReplayLastChannel; + +@protocol WWVideoReplayViewDelegate + +- (void)viedoReplayView:(UIView *)videoView fullScreenAction:(BOOL)fullScreen; + +@end + +@interface VideoPlayView : UIView + +@property (nonatomic,strong)AAPLEAGLLayer *playLayer; +@property (nonatomic, weak) id repalyDelegate; +@property (nonatomic, strong) NSString *wsUrl; +@property (nonatomic, assign) BOOL isPlaying; + +- (void)beginShow; +- (void)endShow; +@end + +NS_ASSUME_NONNULL_END diff --git a/OrderScheduling/Video/Video/view/VideoPlayView.m b/OrderScheduling/Video/Video/view/VideoPlayView.m new file mode 100644 index 0000000..aaa88f8 --- /dev/null +++ b/OrderScheduling/Video/Video/view/VideoPlayView.m @@ -0,0 +1,314 @@ +// +// WWVideoReplayView.m +// wanwayInternet +//made in zhongdao Copyright © 2020 liuchao. All rights reserved. +// + +#import "VideoPlayView.h" +#import "SRWebSocket.h" +#import "H264DecodeTool.h" +#import "g726.h" +#import "g711.h" +#import "PCMStreamPlayer.h" +#import "YFTimerManager.h" +#import "YFProgressHUD.h" + +@interface VideoPlayView () + +@property (nonatomic,strong) SRWebSocket *websocket; +@property (nonatomic,strong) NSMutableData *receivedVideoData; +@property (nonatomic,strong) H264DecodeTool *h264Decoder; +@property (nonatomic, strong) PCMStreamPlayer *pcmPlayer; +@property (nonatomic, assign,getter=isStopPlayBuffer) BOOL stopPlayBuffer; +@property (nonatomic, strong) UIButton *playBtn; +@property (nonatomic, strong) YFProgressHUD *hud; +@end + +@implementation VideoPlayView { + g726_state_t *m_state726; +} + +- (instancetype)initWithFrame:(CGRect)frame { + if (self = [super initWithFrame:frame]) { + self.backgroundColor = [UIColor blackColor]; + + //g726 to pcm + m_state726 = (g726_state_t *)malloc(sizeof(g726_state_t)); + m_state726 = g726_init(m_state726, 8000*5);//2-16kBits 3-24kBits 4-32kBits 5-40kBits + _receivedVideoData = [NSMutableData data]; + + //操作按钮 + [self addSubview:self.playBtn]; + + [self addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(endShow)]]; + } + return self; +} + +- (void)layoutSubviews { + [super layoutSubviews]; + self.playBtn.center = CGPointMake(CGRectGetWidth(self.bounds)/2, CGRectGetHeight(self.bounds)/2); +} + +- (void)dealloc { + NSLog(@"***** video replay view dealloc"); + [self stopPlay]; + _pcmPlayer = nil; + _playLayer = nil; + [self.websocket close]; +} + + +//全屏播放 +- (void)repalyFullScreenBtnTouchAction:(UIButton *)btn { + btn.selected = !btn.selected; + if (self.repalyDelegate && [self.repalyDelegate respondsToSelector:@selector(viedoReplayView:fullScreenAction:)]) { + [self.repalyDelegate viedoReplayView:self fullScreenAction:btn.isSelected]; + } +} + +- (void)endShow { + if (self.hud) { + [YFProgressHUD hiddenProgressHUDforView:self]; + self.hud = nil; + } + + self.playBtn.hidden = NO; + [self stopPlay]; + + self.isPlaying = NO; +} + +- (void)stopPlay { + + [YFTimerManager deleteTimerDelegate:self forTimeInterval:5.0]; + + if (_websocket != nil && _websocket.readyState == SR_OPEN) { + [_websocket close]; + + } + if (_websocket != nil) { + _websocket = nil; + } + + if (self.pcmPlayer) { [self.pcmPlayer resetPlay]; } + + if (_playLayer) { + [_playLayer resetRenderBuffer]; + [_playLayer cleanUpTextures]; + self.stopPlayBuffer = YES; + } + + if (_receivedVideoData.length > 0) { + [_receivedVideoData resetBytesInRange:NSMakeRange(0, self.receivedVideoData.length)]; + _receivedVideoData.length = 0; + } +} + +- (void)beginShow { + [self replayBtnTouchWith:self.wsUrl]; +} + +- (void)replayBtnTouchWith:(NSString *)wsUrl { + + if ([wsUrl isKindOfClass:[NSString class]] && wsUrl.length > 0) { + self.stopPlayBuffer = NO; + self.playBtn.hidden = YES; + [self videoShowWithUrl:wsUrl]; + self.isPlaying = YES; + } + +} + +- (void)videoShowWithUrl:(NSString *)url { + NSMutableURLRequest *req = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:url]]; + _websocket = [[SRWebSocket alloc] initWithURLRequest:req]; + _websocket.delegate = self; + + if (self.pcmPlayer) { + [self.pcmPlayer resetPlay]; + } + + [self.playLayer resetRenderBuffer]; + [_websocket open]; + self.hud = [YFProgressHUD showProgressHUDinView:self title:@"努力加载视频中!"]; + +} + +//发送心跳包 +-(void)toDoThingsWhenTimeCome:(NSTimeInterval)interval{ + + if (interval == 5.0) { + if (self.websocket != nil && self.websocket.readyState == SR_OPEN) { + NSLog(@"********* websocket send state %ld", self.websocket.readyState); + [self.websocket send:@"0"]; + } + } + +} + +#pragma mark ================ SRWebSocketDelegate ======================= +- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message { + NSLog(@"收到数据了******* %ld %@ %@",webSocket.readyState, [message class], message); + + if ([message isKindOfClass:[NSData class]] == NO) { return; } + + NSData *data = [NSData dataWithData:message]; +// NSString *aString = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding]; +// NSLog(@"\n\n%@\n\n",aString) + + + if (data.length < 24) { return; } + //NSLog(@" --- begin --- %@", data); + + if (self.hud) { + [YFProgressHUD hiddenProgressHUDforView:self]; + self.hud = nil; + } + + //sim 卡号在收到的包头里。对讲用到 +// self.speakSimCardData = [data subdataWithRange:NSMakeRange(8, 6)]; + + __weak typeof(self) weakself = self; +// g726_state_t *weak_m_state726 = m_state726; + + NSInteger dataLength = data.length; + NSInteger dataOffset = 0; + + while (dataOffset < dataLength) { + + //跳过5个字节没用 + NSData *typeData = [data subdataWithRange:NSMakeRange(dataOffset + 5, 1)]; //62 -> 0110 0010 -> 第1位,和后7位 + const Byte *byteData = (Byte *)[typeData bytes]; + + Byte ptByte = byteData[0]; + int isComplete = ptByte >> 7; //标志位,是否是完整数据帧边界,根据这个来拼接 + int loadType = ptByte & 0x7f; //98位视频数据,否则为音频数据 + + NSLog(@"%@ -- %d %d", typeData, isComplete, loadType); + + if (loadType == 98) { + NSData *videoSizeData = [data subdataWithRange:NSMakeRange(dataOffset + 28, 2)]; + const Byte *sizeBytes = (Byte *)[videoSizeData bytes]; + int size = (sizeBytes[0] & 0xff) * 16 * 16 + (sizeBytes[1] & 0xff); + NSData *videoPerData = [data subdataWithRange:NSMakeRange(dataOffset + 30, size)]; + + //NSLog(@"---- one data %@", videoPerData); + [weakself.receivedVideoData appendData:videoPerData]; //61 0110 0001 + + dataOffset += 30 + size; + + //NSLog(@"size -- %@ %d %ld ",videoSizeData, size, dataOffset); + if (isComplete > 0) { + //NSLog(@"---- all data %@", self.receivedVideoData); + [weakself.h264Decoder decodeNalu:(uint8_t *)[weakself.receivedVideoData bytes] size:(uint32_t)weakself.receivedVideoData.length alive:NO]; + [weakself.receivedVideoData resetBytesInRange:NSMakeRange(0, weakself.receivedVideoData.length)]; + weakself.receivedVideoData.length = 0; + } + + } else { + + NSData *audioSizeData = [data subdataWithRange:NSMakeRange(dataOffset + 24, 2)]; + const Byte *sizeBytes = (Byte *)[audioSizeData bytes]; + int size = (sizeBytes[0] & 0xff) * 16 * 16 + (sizeBytes[1] & 0xff);//[self intFromData:videoSizeData]; + NSData *audioData = [data subdataWithRange:NSMakeRange(dataOffset + 26, size)]; + NSLog(@"---- audio = %d %ld-%d", loadType, dataLength,size); + dataOffset += 26 + size; + //[self.fileHandle writeData:audioData]; + + //G726 40k码率 8000采样频率 5bit采样位数 + //86 -- 1000 0110 -- loadType=6-G711A 8-G726 + + //G726 转码 pcm + //ffplay -f s16le -ac 1 -ar 8000 a.pcm + //ffplay -f g726le -ar 8000 -ac 1 -code_size 5 -i test.g726 + int outLen = size*6, iRet = 0; + short *outBuffer = (short *)malloc(outLen); + unsigned char *audioDataBuffer = (unsigned char *)audioData.bytes; + + if (loadType == 6) { + // audio = 6 186-160 + //iRet = 0, out = 320, {length = 320, bytes = 0x008a008e 00a600d9 ... 0801002d 005a007a } + outLen = g711_decode(outBuffer, &outLen, audioDataBuffer, size, TP_ALAW); + } else if (loadType == 7) { + outLen = g711_decode(outBuffer, &outLen, audioDataBuffer, size, TP_ULAW); + } else { + //audio = 8 126-100 + //iRet = 160, out = 320, {length = 320, bytes = 0x0000fcff 1c00f4ff ... 4816c457 0c12a8da } + iRet = g726_decode(self->m_state726, outBuffer, audioDataBuffer, size); + outLen = iRet*2; + } + + //不播放声音 + if (weakself.pcmPlayer) { + [weakself.pcmPlayer playWithData:(Byte *)outBuffer size:outLen]; + } + free(outBuffer); + } + } + + //NSLog(@" --- end --- "); +} + +- (void)webSocketDidOpen:(SRWebSocket *)webSocket { + [YFTimerManager addTimerDelegate:self forTimeInterval:5.0]; +} + +- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error { + if (self.hud) { + [YFProgressHUD hiddenProgressHUDforView:self]; + self.hud = nil; + } + [YFTimerManager deleteTimerDelegate:self forTimeInterval:5.0]; + NSLog(@"*********** websocket error %@", error); + [YFProgressHUD showToastTitle:(@"视频播放失败,请重试!")]; + [self endShow]; + +} + +#pragma mark ================ H264DecodeFrameCallbackDelegate ======================= +- (void)gotDecodedFrame:(CVImageBufferRef)imageBuffer { + if(imageBuffer) { + //解码回来的数据绘制播放 + if (!self.isStopPlayBuffer) { + self.playLayer.pixelBuffer = imageBuffer; + CVPixelBufferRelease(imageBuffer); + } + } +} + +#pragma mark - getter + +- (AAPLEAGLLayer *)playLayer { + if (_playLayer == nil) { + _playLayer = [[AAPLEAGLLayer alloc] initWithFrame:self.bounds]; + [self.layer insertSublayer:_playLayer atIndex:0]; + } + return _playLayer;; +} + + +- (H264DecodeTool *)h264Decoder { + if (_h264Decoder == nil) { + _h264Decoder = [[H264DecodeTool alloc] init]; + _h264Decoder.delegate = self; + } + return _h264Decoder; +} + +- (PCMStreamPlayer *)pcmPlayer { + return nil; +} + +- (UIButton *)playBtn{ + if (!_playBtn) { + _playBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 80, 80)]; + _playBtn.center = self.center; + [_playBtn setImage:[UIImage imageNamed:@"ww_video_paly"] forState:UIControlStateNormal]; + [_playBtn addTarget:self action:@selector(beginShow) forControlEvents:UIControlEventTouchUpInside]; + } + return _playBtn; +} + +@end + diff --git a/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.h b/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.h new file mode 100755 index 0000000..8c35b9d --- /dev/null +++ b/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.h @@ -0,0 +1,20 @@ +/* + Copyright (C) 2014 Apple Inc. All Rights Reserved. + See LICENSE.txt for this sample’s licensing information + + Abstract: + + This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner. + + */ + +//@import QuartzCore; +#include +#include + +@interface AAPLEAGLLayer : CAEAGLLayer +@property CVPixelBufferRef pixelBuffer; +- (id)initWithFrame:(CGRect)frame; +- (void)resetRenderBuffer; +- (void) cleanUpTextures; +@end diff --git a/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.m b/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.m new file mode 100755 index 0000000..a163446 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/AAPLEAGLLayer.m @@ -0,0 +1,595 @@ +/* + Copyright (C) 2014 Apple Inc. All Rights Reserved. + See LICENSE.txt for this sample’s licensing information + + Abstract: + + This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner. + + */ + +#import "AAPLEAGLLayer.h" + +#import +#import +#include +#import +#include +#include +#include +#include +#include + +// Uniform index. +enum +{ + UNIFORM_Y, + UNIFORM_UV, + UNIFORM_ROTATION_ANGLE, + UNIFORM_COLOR_CONVERSION_MATRIX, + NUM_UNIFORMS +}; +GLint uniforms[NUM_UNIFORMS]; + +// Attribute index. +enum +{ + ATTRIB_VERTEX, + ATTRIB_TEXCOORD, + NUM_ATTRIBUTES +}; + +// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range) + +// BT.601, which is the standard for SDTV. +static const GLfloat kColorConversion601[] = { + 1.164, 1.164, 1.164, + 0.0, -0.392, 2.017, + 1.596, -0.813, 0.0, +}; + +// BT.709, which is the standard for HDTV. +static const GLfloat kColorConversion709[] = { + 1.164, 1.164, 1.164, + 0.0, -0.213, 2.112, + 1.793, -0.533, 0.0, +}; + + + +@interface AAPLEAGLLayer () +{ + // The pixel dimensions of the CAEAGLLayer. + GLint _backingWidth; + GLint _backingHeight; + + EAGLContext *_context; + CVOpenGLESTextureRef _lumaTexture; + CVOpenGLESTextureRef _chromaTexture; + + GLuint _frameBufferHandle; + GLuint _colorBufferHandle; + + const GLfloat *_preferredConversion; +} +@property GLuint program; + +@end +@implementation AAPLEAGLLayer +@synthesize pixelBuffer = _pixelBuffer; + +-(CVPixelBufferRef) pixelBuffer +{ + return _pixelBuffer; +} + +- (void)setPixelBuffer:(CVPixelBufferRef)pb +{ + if(_pixelBuffer) { + CVPixelBufferRelease(_pixelBuffer); + } + _pixelBuffer = CVPixelBufferRetain(pb); + + int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer); + int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer); + [self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight]; +} + +- (instancetype)initWithFrame:(CGRect)frame +{ + self = [super init]; + if (self) { + CGFloat scale = [[UIScreen mainScreen] scale]; + self.contentsScale = scale; + + self.opaque = TRUE; + self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]}; + + [self setFrame:frame]; + + // Set the context into which the frames will be drawn. + _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; + + if (!_context) { + return nil; + } + + // Set the default conversion to BT.709, which is the standard for HDTV. + _preferredConversion = kColorConversion709; + + [self setupGL]; + } + + return self; +} + +- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight +{ + if (!_context || ![EAGLContext setCurrentContext:_context]) { + return; + } + + if(pixelBuffer == NULL) { + NSLog(@"Pixel buffer is null"); + return; + } + + CVReturn err; + + size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); + + /* + Use the color attachment of the pixel buffer to determine the appropriate color conversion matrix. + */ + CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL); + if ( CFStringCompare((CFStringRef)colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) { + _preferredConversion = kColorConversion601; + } + else { + _preferredConversion = kColorConversion709; + } + + /* + CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef. + */ + + /* + Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane. + */ + + CVOpenGLESTextureCacheRef _videoTextureCache; + + // Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion. + err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache); + if (err != noErr) { + NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err); + return; + } + + glActiveTexture(GL_TEXTURE0); + + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _videoTextureCache, + pixelBuffer, + NULL, + GL_TEXTURE_2D, + GL_RED_EXT, + frameWidth, + frameHeight, + GL_RED_EXT, + GL_UNSIGNED_BYTE, + 0, + &_lumaTexture); + if (err) { + NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture)); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + if(planeCount == 2) { + // UV-plane. + glActiveTexture(GL_TEXTURE1); + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _videoTextureCache, + pixelBuffer, + NULL, + GL_TEXTURE_2D, + GL_RG_EXT, + frameWidth / 2, + frameHeight / 2, + GL_RG_EXT, + GL_UNSIGNED_BYTE, + 1, + &_chromaTexture); + if (err) { + NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture)); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + } + + glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle); + + // Set the view port to the entire view. + glViewport(0, 0, _backingWidth, _backingHeight); + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + // Use shader program. + glUseProgram(self.program); + // glUniform1f(uniforms[UNIFORM_LUMA_THRESHOLD], 1); + // glUniform1f(uniforms[UNIFORM_CHROMA_THRESHOLD], 1); + glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0); + glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion); + + // Set up the quad vertices with respect to the orientation and aspect ratio of the video. + CGRect viewBounds = self.bounds; + CGSize contentSize = CGSizeMake(frameWidth, frameHeight); + CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds); + + // Compute normalized quad coordinates to draw the frame into. + CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0); + CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width, + vertexSamplingRect.size.height/viewBounds.size.height); + + // Normalize the quad vertices. + if (cropScaleAmount.width > cropScaleAmount.height) { + normalizedSamplingSize.width = 1.0; + normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width; + } + else { + normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height; + normalizedSamplingSize.height = 1.0;; + } + + /* + The quad vertex data defines the region of 2D plane onto which we draw our pixel buffers. + Vertex data formed using (-1,-1) and (1,1) as the bottom left and top right coordinates respectively, covers the entire screen. + */ + GLfloat quadVertexData [] = { + (GLfloat)(-1 * normalizedSamplingSize.width), (GLfloat)(-1 * normalizedSamplingSize.height), + (GLfloat)normalizedSamplingSize.width, (GLfloat)(-1 * normalizedSamplingSize.height), + (GLfloat)(-1 * normalizedSamplingSize.width), (GLfloat)normalizedSamplingSize.height, + (GLfloat)normalizedSamplingSize.width, (GLfloat)normalizedSamplingSize.height, + }; + + // Update attribute values. + glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData); + glEnableVertexAttribArray(ATTRIB_VERTEX); + + /* + The texture vertices are set up such that we flip the texture vertically. This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system. + */ + CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1); + GLfloat quadTextureData[] = { + (GLfloat)CGRectGetMinX(textureSamplingRect), (GLfloat)CGRectGetMaxY(textureSamplingRect), + (GLfloat)CGRectGetMaxX(textureSamplingRect), (GLfloat)CGRectGetMaxY(textureSamplingRect), + (GLfloat)CGRectGetMinX(textureSamplingRect), (GLfloat)CGRectGetMinY(textureSamplingRect), + (GLfloat)CGRectGetMaxX(textureSamplingRect), (GLfloat)CGRectGetMinY(textureSamplingRect) + }; + + glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData); + glEnableVertexAttribArray(ATTRIB_TEXCOORD); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle); + [_context presentRenderbuffer:GL_RENDERBUFFER]; + + [self cleanUpTextures]; + // Periodic texture cache flush every frame + CVOpenGLESTextureCacheFlush(_videoTextureCache, 0); + + if(_videoTextureCache) { + CFRelease(_videoTextureCache); + } +} + +# pragma mark - OpenGL setup + +- (void)setupGL +{ + if (!_context || ![EAGLContext setCurrentContext:_context]) { + return; + } + + [self setupBuffers]; + [self loadShaders]; + + glUseProgram(self.program); + + // 0 and 1 are the texture IDs of _lumaTexture and _chromaTexture respectively. + glUniform1i(uniforms[UNIFORM_Y], 0); + glUniform1i(uniforms[UNIFORM_UV], 1); + glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0); + glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion); +} + +#pragma mark - Utilities + +- (void)setupBuffers +{ + glDisable(GL_DEPTH_TEST); + + glEnableVertexAttribArray(ATTRIB_VERTEX); + glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0); + + glEnableVertexAttribArray(ATTRIB_TEXCOORD); + glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0); + + [self createBuffers]; +} + +- (void) createBuffers +{ + glGenFramebuffers(1, &_frameBufferHandle); + glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle); + + glGenRenderbuffers(1, &_colorBufferHandle); + glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle); + + [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self]; + glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth); + glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight); + + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle); + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER)); + } +} + +- (void) releaseBuffers +{ + if(_frameBufferHandle) { + glDeleteFramebuffers(1, &_frameBufferHandle); + _frameBufferHandle = 0; + } + + if(_colorBufferHandle) { + glDeleteRenderbuffers(1, &_colorBufferHandle); + _colorBufferHandle = 0; + } +} + +- (void) resetRenderBuffer +{ + if (!_context || ![EAGLContext setCurrentContext:_context]) { + return; + } + + [self releaseBuffers]; + [self createBuffers]; +} + +- (void) cleanUpTextures +{ + if (_lumaTexture) { + CFRelease(_lumaTexture); + _lumaTexture = NULL; + } + + if (_chromaTexture) { + CFRelease(_chromaTexture); + _chromaTexture = NULL; + } +} + +#pragma mark - OpenGL ES 2 shader compilation + +const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;" +"precision mediump float;" +"uniform sampler2D SamplerY;" +"uniform sampler2D SamplerUV;" +"uniform mat3 colorConversionMatrix;" +"void main()" +"{" +" mediump vec3 yuv;" +" lowp vec3 rgb;" +// Subtract constants to map the video range start at 0 +" yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));" +" yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));" +" rgb = colorConversionMatrix * yuv;" +" gl_FragColor = vec4(rgb, 1);" +"}"; + +const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;" +"attribute vec2 texCoord;" +"uniform float preferredRotation;" +"varying vec2 texCoordVarying;" +"void main()" +"{" +" mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0," +" sin(preferredRotation), cos(preferredRotation), 0.0, 0.0," +" 0.0, 0.0, 1.0, 0.0," +" 0.0, 0.0, 0.0, 1.0);" +" gl_Position = position * rotationMatrix;" +" texCoordVarying = texCoord;" +"}"; + +- (BOOL)loadShaders +{ + GLuint vertShader = 0, fragShader = 0; + + // Create the shader program. + self.program = glCreateProgram(); + + if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) { + NSLog(@"Failed to compile vertex shader"); + return NO; + } + + if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) { + NSLog(@"Failed to compile fragment shader"); + return NO; + } + + // Attach vertex shader to program. + glAttachShader(self.program, vertShader); + + // Attach fragment shader to program. + glAttachShader(self.program, fragShader); + + // Bind attribute locations. This needs to be done prior to linking. + glBindAttribLocation(self.program, ATTRIB_VERTEX, "position"); + glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord"); + + // Link the program. + if (![self linkProgram:self.program]) { + NSLog(@"Failed to link program: %d", self.program); + + if (vertShader) { + glDeleteShader(vertShader); + vertShader = 0; + } + if (fragShader) { + glDeleteShader(fragShader); + fragShader = 0; + } + if (self.program) { + glDeleteProgram(self.program); + self.program = 0; + } + + return NO; + } + + // Get uniform locations. + uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY"); + uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV"); + // uniforms[UNIFORM_LUMA_THRESHOLD] = glGetUniformLocation(self.program, "lumaThreshold"); + // uniforms[UNIFORM_CHROMA_THRESHOLD] = glGetUniformLocation(self.program, "chromaThreshold"); + uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation"); + uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix"); + + // Release vertex and fragment shaders. + if (vertShader) { + glDetachShader(self.program, vertShader); + glDeleteShader(vertShader); + } + if (fragShader) { + glDetachShader(self.program, fragShader); + glDeleteShader(fragShader); + } + + return YES; +} + +- (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString +{ + *shader = glCreateShader(type); + glShaderSource(*shader, 1, &shaderString, NULL); + glCompileShader(*shader); + +#if defined(DEBUG) + GLint logLength; + glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) { + GLchar *log = (GLchar *)malloc(logLength); + glGetShaderInfoLog(*shader, logLength, &logLength, log); + NSLog(@"Shader compile log:\n%s", log); + free(log); + } +#endif + + GLint status = 0; + glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); + if (status == 0) { + glDeleteShader(*shader); + return NO; + } + + return YES; +} + +- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL +{ + NSError *error; + NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error]; + if (sourceString == nil) { + NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]); + return NO; + } + + const GLchar *source = (GLchar *)[sourceString UTF8String]; + + return [self compileShaderString:shader type:type shaderString:source]; +} + +- (BOOL)linkProgram:(GLuint)prog +{ + GLint status; + glLinkProgram(prog); + +#if defined(DEBUG) + GLint logLength; + glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) { + GLchar *log = (GLchar *)malloc(logLength); + glGetProgramInfoLog(prog, logLength, &logLength, log); + NSLog(@"Program link log:\n%s", log); + free(log); + } +#endif + + glGetProgramiv(prog, GL_LINK_STATUS, &status); + if (status == 0) { + return NO; + } + + return YES; +} + +- (BOOL)validateProgram:(GLuint)prog +{ + GLint logLength, status; + + glValidateProgram(prog); + glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) { + GLchar *log = (GLchar *)malloc(logLength); + glGetProgramInfoLog(prog, logLength, &logLength, log); + NSLog(@"Program validate log:\n%s", log); + free(log); + } + + glGetProgramiv(prog, GL_VALIDATE_STATUS, &status); + if (status == 0) { + return NO; + } + + return YES; +} + +- (void)dealloc +{ + if (!_context || ![EAGLContext setCurrentContext:_context]) { + return; + } + + [self cleanUpTextures]; + + if(_pixelBuffer) { + CVPixelBufferRelease(_pixelBuffer); + } + + if (self.program) { + glDeleteProgram(self.program); + self.program = 0; + } + if(_context) { + //[_context release]; + _context = nil; + } + //[super dealloc]; +} + +@end diff --git a/OrderScheduling/Video/VideoTools/H264DecodeTool.h b/OrderScheduling/Video/VideoTools/H264DecodeTool.h new file mode 100644 index 0000000..264e82f --- /dev/null +++ b/OrderScheduling/Video/VideoTools/H264DecodeTool.h @@ -0,0 +1,29 @@ +// +// H264DecodeTool.h +// VideoToolBoxDecodeH264 +//made in zhongdao Copyright © 2018年 AnDong. All rights reserved. +// + +#import +#import +#import + +@protocol H264DecodeFrameCallbackDelegate + +//回调sps和pps数据 +- (void)gotDecodedFrame:(CVImageBufferRef )imageBuffer; + +@end + +@interface H264DecodeTool : NSObject + +-(BOOL)initH264Decoder; + +//解码nalu +-(void)decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize alive:(BOOL)isAlive; + +- (void)endDecode; + +@property (weak, nonatomic) id delegate; + +@end diff --git a/OrderScheduling/Video/VideoTools/H264DecodeTool.m b/OrderScheduling/Video/VideoTools/H264DecodeTool.m new file mode 100644 index 0000000..dea7173 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/H264DecodeTool.m @@ -0,0 +1,298 @@ +// +// H264DecodeTool.m +// VideoToolBoxDecodeH264 +//made in zhongdao Copyright © 2018年 AnDong. All rights reserved. +// + +#import "H264DecodeTool.h" + +const uint8_t lyStartCode[4] = {0, 0, 0, 1}; + +@interface H264DecodeTool(){ + + //解码session + VTDecompressionSessionRef _decoderSession; + + //解码format 封装了sps和pps + CMVideoFormatDescriptionRef _decoderFormatDescription; + + //sps & pps + uint8_t *_sps; + NSInteger _spsSize; + uint8_t *_pps; + NSInteger _ppsSize; + +} +@property(nonatomic,assign)BOOL isNewValue; + +@end + +@implementation H264DecodeTool + +- (BOOL)initH264Decoder{ + + if(_decoderSession){ + return YES; + } + + + const uint8_t* const parameterSetPointers[2] = { _sps, _pps }; + const size_t parameterSetSizes[2] = { _spsSize, _ppsSize }; + + //NSLog(@"----- init h264 -- sps %@ --- pps %@", [NSData dataWithBytes:_sps length:_spsSize], [NSData dataWithBytes:_pps length:_ppsSize]); + + //用sps 和pps 实例化_decoderFormatDescription + OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, + 2, //参数个数 + parameterSetPointers, + parameterSetSizes, + 4, //nal startcode开始的size + &_decoderFormatDescription); + + if(status == noErr) { + NSDictionary* destinationPixelBufferAttributes = @{ + (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], + //硬解必须是 kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + // 或者是kCVPixelFormatType_420YpCbCr8Planar + //因为iOS是 nv12 其他是nv21 + (id)kCVPixelBufferWidthKey : [NSNumber numberWithInt:1280], + (id)kCVPixelBufferHeightKey : [NSNumber numberWithInt:960], + //这里宽高和编码反的 两倍关系 + (id)kCVPixelBufferOpenGLCompatibilityKey : [NSNumber numberWithBool:YES] + }; + + + + VTDecompressionOutputCallbackRecord callBackRecord; + callBackRecord.decompressionOutputCallback = didDecompress; + callBackRecord.decompressionOutputRefCon = (__bridge void *)self; + status = VTDecompressionSessionCreate(kCFAllocatorDefault, + _decoderFormatDescription, + NULL, + (__bridge CFDictionaryRef)destinationPixelBufferAttributes, + &callBackRecord, + &_decoderSession); + VTSessionSetProperty(_decoderSession, kVTDecompressionPropertyKey_ThreadCount, (__bridge CFTypeRef)[NSNumber numberWithInt:1]); + VTSessionSetProperty(_decoderSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue); + } else { + NSLog(@"IOS8VT: reset decoder session failed status=%d", status); + return NO; + } + + return YES; +} + +//解码回调 +static void didDecompress( void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){ + CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon; + + //持有pixelBuffer数据,否则会被释放 + *outputPixelBuffer = CVPixelBufferRetain(pixelBuffer); + H264DecodeTool *decoder = (__bridge H264DecodeTool *)decompressionOutputRefCon; + if (decoder.delegate) + { + [decoder.delegate gotDecodedFrame:pixelBuffer]; + } +} + + +//解码nalu裸数据 +-(void)decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize alive:(BOOL)isAlive +{ + + int flag = 1; + uint8_t * packetBuffer = NULL; + long packetSize = 0; + + long count = 0, location = 0; + for (long i = 0 ; i < frameSize; i++) { + if (frame[i] == 0) { + count++; + } else if (frame[i] == 1 && ((isAlive && count == 3) || ( !isAlive && count >= 3)) && i > 3) { + // 0x00 0x00 0x00 0x01 如果存在多个0x00 以前的count == 3 就有问题 所以用count >= 3 正确,这里专门针对DQ001和除它之外的其他设备的实时和回看适配 + if (packetBuffer) { + free(packetBuffer); + packetBuffer = NULL; + } + packetSize = i - location - 3; + + packetBuffer = (uint8_t *)malloc(packetSize); + memcpy(packetBuffer, frame+location, packetSize); + + location = i - 3; + count = 0; + + flag = 0; + //NSLog(@"1---%@", [NSData dataWithBytes:packetBuffer length:packetSize]); + [self oneDecodeNalu:packetBuffer size:(uint32_t)packetSize]; + } else { + count = 0; + } + } + + + if (flag) { + //NSLog(@"2---%@", [NSData dataWithBytes:frame length:frameSize]); + [self oneDecodeNalu:frame size:frameSize]; + } else { + free(packetBuffer); + packetBuffer = NULL; + packetSize = frameSize - location; + + packetBuffer = (uint8_t *)malloc(packetSize); + memcpy(packetBuffer, frame+location, packetSize); + + //NSLog(@"3---%@", [NSData dataWithBytes:packetBuffer length:packetSize]); + [self oneDecodeNalu:packetBuffer size:(uint32_t)packetSize]; + } + + //NSLog(@"4---"); + + +} + + +-(void)oneDecodeNalu:(uint8_t *)frame size:(uint32_t)frameSize { + // NSLog(@"------------开始解码"); + + //获取nalu type + int nalu_type = (frame[4] & 0x1F); + CVPixelBufferRef pixelBuffer = NULL; + + //填充nalu size 去掉start code 替换成nalu size + uint32_t nalSize = (uint32_t)(frameSize - 4); + uint8_t *pNalSize = (uint8_t*)(&nalSize); + frame[0] = *(pNalSize + 3); + frame[1] = *(pNalSize + 2); + frame[2] = *(pNalSize + 1); + frame[3] = *(pNalSize); + + switch (nalu_type) + { + case 0x05: + //关键帧 + if([self initH264Decoder]) + { + pixelBuffer = [self decode:frame size:frameSize]; + } + break; + case 0x07: + //sps + _spsSize = frameSize - 4; +// uint8_t *oldsps = _sps; + + _sps = (uint8_t *)malloc(_spsSize); + memcpy(_sps, &frame[4], _spsSize); +// if (oldsps != _sps) { +// self.isNewValue = YES; +// } + break; + case 0x08: + { + //pps +// uint8_t * oldpps = _pps; + + _ppsSize = frameSize - 4; + _pps = (uint8_t *)malloc(_ppsSize); + memcpy(_pps, &frame[4], _ppsSize); + +// if (oldpps != _pps) { +// self.isNewValue = YES; +// } + break; + } + default: + { + // B/P frame + if([self initH264Decoder]) + { + pixelBuffer = [self decode:frame size:frameSize]; + } + break; + } + + + } +} + + +//解码帧数据 +- (CVPixelBufferRef)decode:(uint8_t *)frame size:(uint32_t)frameSize{ + CVPixelBufferRef outputPixelBuffer = NULL; + + CMBlockBufferRef blockBuffer = NULL; + + //创建CMBlockBufferRef + OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL, + (void *)frame, + frameSize, + kCFAllocatorNull, + NULL, + 0, + frameSize, + FALSE, + &blockBuffer); + if (status == kCMBlockBufferNoErr) { + + CMSampleBufferRef sampleBuffer = NULL; + const size_t sampleSizeArray[] = {frameSize}; + + //创建sampleBuffer + status = CMSampleBufferCreateReady(kCFAllocatorDefault, + blockBuffer, + _decoderFormatDescription , + 1, 0, NULL, 1, sampleSizeArray, + &sampleBuffer); + + if (status == kCMBlockBufferNoErr && sampleBuffer) { + VTDecodeFrameFlags flags = 0; + VTDecodeInfoFlags flagOut = 0; + //CMSampleBufferRef丢进去解码 + OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(_decoderSession, + sampleBuffer, + flags, + &outputPixelBuffer, + &flagOut); + + if(decodeStatus == kVTInvalidSessionErr) { + NSLog(@"IOS8VT: Invalid session, reset decoder session"); + } else if(decodeStatus == kVTVideoDecoderBadDataErr) { + NSLog(@"IOS8VT: decode failed status=%d(Bad data)", decodeStatus); + } else if(decodeStatus != noErr) { + NSLog(@"IOS8VT: decode failed status=%d", decodeStatus); + } + CFRelease(sampleBuffer); + } + CFRelease(blockBuffer); + } + //返回pixelBuffer数据 + return outputPixelBuffer; +} + +- (void)endDecode{ + + if(_decoderSession) { + VTDecompressionSessionInvalidate(_decoderSession); + CFRelease(_decoderSession); + _decoderSession = NULL; + } + + if(_decoderFormatDescription) { + CFRelease(_decoderFormatDescription); + _decoderFormatDescription = NULL; + } + + if (_sps) { + free(_sps); + } + + if (_pps) { + free(_pps); + } + + _ppsSize = _spsSize = 0; +} + + + +@end diff --git a/OrderScheduling/Video/VideoTools/PCMStreamPlayer.h b/OrderScheduling/Video/VideoTools/PCMStreamPlayer.h new file mode 100644 index 0000000..fbbc943 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/PCMStreamPlayer.h @@ -0,0 +1,18 @@ +// +// PCMStreamPlayer.h +// LinePlayer +//made in zhongdao Copyright © 2020 myz. All rights reserved. +// + +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface PCMStreamPlayer : NSObject + +-(void)playWithData:(Byte *)pcmData size:(int)length; +- (void)resetPlay; +//-(void)stop; +@end + +NS_ASSUME_NONNULL_END diff --git a/OrderScheduling/Video/VideoTools/PCMStreamPlayer.m b/OrderScheduling/Video/VideoTools/PCMStreamPlayer.m new file mode 100644 index 0000000..ff85fb0 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/PCMStreamPlayer.m @@ -0,0 +1,131 @@ +// +// PCMStreamPlayer.m +// LinePlayer +//made in zhongdao Copyright © 2020 myz. All rights reserved. +// + +#import "PCMStreamPlayer.h" +#import + + +#define QUEUE_BUFFER_SIZE 6 //队列缓冲个数 +#define MIN_SIZE_PER_FRAME 600 //每帧最小数据长度 + +@interface PCMStreamPlayer() { + NSLock *synlock ;//同步控制 + AudioQueueRef audioQueue;//音频播放队列 + BOOL audioQueueUsed[QUEUE_BUFFER_SIZE]; //音频缓存是否在使用中 + AudioStreamBasicDescription audioDescription;//音频参数 + AudioQueueBufferRef audioQueueBuffers[QUEUE_BUFFER_SIZE];//音频缓冲 + + int bufferSizeCount; +} + +@end + +@implementation PCMStreamPlayer + +- (instancetype)init { + if (self=[super init]) { + bufferSizeCount = 1; + synlock = [[NSLock alloc] init]; + [self reset]; + } + return self; +} + +- (void)reset { + [self stop]; + + ///设置音频参数 + audioDescription.mSampleRate = 8000; //采样率 + audioDescription.mFormatID = kAudioFormatLinearPCM; + audioDescription.mFormatFlags = (kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked); + audioDescription.mChannelsPerFrame = 1; ///单声道 + audioDescription.mFramesPerPacket = 1; //每一个packet一侦数据 + audioDescription.mBitsPerChannel = 16; //每个采样点16bit量化 + audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel / 8) * audioDescription.mChannelsPerFrame; + audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame; + AudioQueueNewOutput(&audioDescription, audioPlayerAQInputCallback, (__bridge void*)self, nil, nil, 0, &audioQueue); //使用player的内部线程播放 + //AudioQueueSetParameter(audioQueue, kAudioQueueParam_Volume, 1.0); + //初始化音频缓冲区 + for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) { + AudioQueueAllocateBuffer(audioQueue, MIN_SIZE_PER_FRAME, &audioQueueBuffers[i]); + } + +} + +- (void)dealloc { + NSLog(@"***** pcmstream player dealloc"); +} + +- (void)stop { + if (audioQueue) { + AudioQueueStop(audioQueue, true); + AudioQueueReset(audioQueue); + audioQueue = nil; + } +} + +- (void)resetPlay { + [self stop]; +} + +-(void)playWithData:(Byte *)pcmData size:(int)length { + if (audioQueue == nil) { //|| ![self checkBufferHasUsed] + // 第一次使用 + [self reset]; + AudioQueueStart(audioQueue, NULL); + } + + [synlock lock]; + AudioQueueBufferRef audioQueueBuffer = NULL; + while (true) { + audioQueueBuffer = [self getNotUsedBuffer]; + if (audioQueueBuffer != NULL) { + break; + } + } + + audioQueueBuffer->mAudioDataByteSize = length; + memcpy(audioQueueBuffer->mAudioData, pcmData, length); + AudioQueueEnqueueBuffer(audioQueue, audioQueueBuffer, 0, NULL); + [synlock unlock]; +} +static void audioPlayerAQInputCallback(void *input, AudioQueueRef audioQueue, AudioQueueBufferRef audioQueueBuffers) { + PCMStreamPlayer *player = (__bridge PCMStreamPlayer*)input; + [player playerCallback:audioQueueBuffers]; +} + +// 是不是有缓冲在使用中 +- (BOOL)checkBufferHasUsed +{ + for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) { + if (YES == audioQueueUsed[i]) { + return YES; + } + } + return NO; +} +// 获取没有在使用的缓冲 +- (AudioQueueBufferRef)getNotUsedBuffer +{ + for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) { + if (NO == audioQueueUsed[i]) { + audioQueueUsed[i] = YES; + return audioQueueBuffers[i]; + } + } + return NULL; +} + +// 标志缓冲空闲中 +- (void)playerCallback:(AudioQueueBufferRef)outQB { + for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) { + if (outQB == audioQueueBuffers[i]) { + audioQueueUsed[i] = NO; + } + } +} + +@end diff --git a/OrderScheduling/Video/VideoTools/SRWebSocket.h b/OrderScheduling/Video/VideoTools/SRWebSocket.h new file mode 100644 index 0000000..ca3a2c0 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/SRWebSocket.h @@ -0,0 +1,154 @@ +// +// Copyright 2012 Square Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import +#import + +typedef NS_ENUM(NSInteger, SRReadyState) { + SR_CONNECTING = 0, + SR_OPEN = 1, + SR_CLOSING = 2, + SR_CLOSED = 3, +}; + +typedef enum SRStatusCode : NSInteger { + // 0–999: Reserved and not used. + SRStatusCodeNormal = 1000, + SRStatusCodeGoingAway = 1001, + SRStatusCodeProtocolError = 1002, + SRStatusCodeUnhandledType = 1003, + // 1004 reserved. + SRStatusNoStatusReceived = 1005, + SRStatusCodeAbnormal = 1006, + SRStatusCodeInvalidUTF8 = 1007, + SRStatusCodePolicyViolated = 1008, + SRStatusCodeMessageTooBig = 1009, + SRStatusCodeMissingExtension = 1010, + SRStatusCodeInternalError = 1011, + SRStatusCodeServiceRestart = 1012, + SRStatusCodeTryAgainLater = 1013, + // 1014: Reserved for future use by the WebSocket standard. + SRStatusCodeTLSHandshake = 1015, + // 1016–1999: Reserved for future use by the WebSocket standard. + // 2000–2999: Reserved for use by WebSocket extensions. + // 3000–3999: Available for use by libraries and frameworks. May not be used by applications. Available for registration at the IANA via first-come, first-serve. + // 4000–4999: Available for use by applications. +} SRStatusCode; + +@class SRWebSocket; + +extern NSString *const SRWebSocketErrorDomain; +extern NSString *const SRHTTPResponseErrorKey; + +#pragma mark - SRWebSocketDelegate + +@protocol SRWebSocketDelegate; + +#pragma mark - SRWebSocket + +@interface SRWebSocket : NSObject + +@property (nonatomic, weak) id delegate; + +@property (nonatomic, readonly) SRReadyState readyState; +@property (nonatomic, readonly, retain) NSURL *url; + + +@property (nonatomic, readonly) CFHTTPMessageRef receivedHTTPHeaders; + +// Optional array of cookies (NSHTTPCookie objects) to apply to the connections +@property (nonatomic, readwrite) NSArray * requestCookies; + +// This returns the negotiated protocol. +// It will be nil until after the handshake completes. +@property (nonatomic, readonly, copy) NSString *protocol; + +// Protocols should be an array of strings that turn into Sec-WebSocket-Protocol. +- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates; +- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols; +- (id)initWithURLRequest:(NSURLRequest *)request; + +// Some helper constructors. +- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates; +- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols; +- (id)initWithURL:(NSURL *)url; + +// Delegate queue will be dispatch_main_queue by default. +// You cannot set both OperationQueue and dispatch_queue. +- (void)setDelegateOperationQueue:(NSOperationQueue*) queue; +- (void)setDelegateDispatchQueue:(dispatch_queue_t) queue; + +// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using defaultModes. +- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; +- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; + +// SRWebSockets are intended for one-time-use only. Open should be called once and only once. +- (void)open; + +- (void)close; +- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason; + +// Send a UTF8 String or Data. +- (void)send:(id)data; + +// Send Data (can be nil) in a ping message. +- (void)sendPing:(NSData *)data; + +@end + +#pragma mark - SRWebSocketDelegate + +@protocol SRWebSocketDelegate + +// message will either be an NSString if the server is using text +// or NSData if the server is using binary. +- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message; + +@optional + +- (void)webSocketDidOpen:(SRWebSocket *)webSocket; +- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error; +- (void)webSocket:(SRWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean; +- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload; + +// Return YES to convert messages sent as Text to an NSString. Return NO to skip NSData -> NSString conversion for Text messages. Defaults to YES. +- (BOOL)webSocketShouldConvertTextFrameToString:(SRWebSocket *)webSocket; + +@end + +#pragma mark - NSURLRequest (SRCertificateAdditions) + +@interface NSURLRequest (SRCertificateAdditions) + +@property (nonatomic, retain, readonly) NSArray *SR_SSLPinnedCertificates; + +@end + +#pragma mark - NSMutableURLRequest (SRCertificateAdditions) + +@interface NSMutableURLRequest (SRCertificateAdditions) + +@property (nonatomic, retain) NSArray *SR_SSLPinnedCertificates; + +@end + +#pragma mark - NSRunLoop (SRWebSocket) + +@interface NSRunLoop (SRWebSocket) + ++ (NSRunLoop *)SR_networkRunLoop; + +@end diff --git a/OrderScheduling/Video/VideoTools/SRWebSocket.m b/OrderScheduling/Video/VideoTools/SRWebSocket.m new file mode 100644 index 0000000..8673d10 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/SRWebSocket.m @@ -0,0 +1,1921 @@ +// +// Copyright 2012 Square Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + + +#import "SRWebSocket.h" + +#if TARGET_OS_IPHONE +#define HAS_ICU +#endif + +#ifdef HAS_ICU +#import +#endif + +#if TARGET_OS_IPHONE +#import +#else +#import +#endif + +#import +#import + +#if OS_OBJECT_USE_OBJC_RETAIN_RELEASE +#define sr_dispatch_retain(x) +#define sr_dispatch_release(x) +#define maybe_bridge(x) ((__bridge void *) x) +#else +#define sr_dispatch_retain(x) dispatch_retain(x) +#define sr_dispatch_release(x) dispatch_release(x) +#define maybe_bridge(x) (x) +#endif + +#if !__has_feature(objc_arc) +#error SocketRocket must be compiled with ARC enabled +#endif + + +typedef enum { + SROpCodeTextFrame = 0x1, + SROpCodeBinaryFrame = 0x2, + // 3-7 reserved. + SROpCodeConnectionClose = 0x8, + SROpCodePing = 0x9, + SROpCodePong = 0xA, + // B-F reserved. +} SROpCode; + +typedef struct { + BOOL fin; +// BOOL rsv1; +// BOOL rsv2; +// BOOL rsv3; + uint8_t opcode; + BOOL masked; + uint64_t payload_length; +} frame_header; + +static NSString *const SRWebSocketAppendToSecKeyString = @"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; + +static inline int32_t validate_dispatch_data_partial_string(NSData *data); +static inline void SRFastLog(NSString *format, ...); + + +static NSString *newSHA1String(const char *bytes, size_t length) { + uint8_t md[CC_SHA1_DIGEST_LENGTH]; + + assert(length >= 0); + assert(length <= UINT32_MAX); + CC_SHA1(bytes, (CC_LONG)length, md); + + NSData *data = [NSData dataWithBytes:md length:CC_SHA1_DIGEST_LENGTH]; + + if ([data respondsToSelector:@selector(base64EncodedStringWithOptions:)]) { + return [data base64EncodedStringWithOptions:0]; + } + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + return [data base64Encoding]; +#pragma clang diagnostic pop +} + + +@interface NSData (SRWebSocket) + +- (NSString *)stringBySHA1ThenBase64Encoding; + +@end + +@implementation NSData (SRWebSocket) + +- (NSString *)stringBySHA1ThenBase64Encoding; { + return newSHA1String((char *)self.bytes, self.length); +} + +@end + + + + +@interface NSString (SRWebSocket) + +- (NSString *)stringBySHA1ThenBase64Encoding; + +@end + + +@interface NSURL (SRWebSocket) + +// The origin isn't really applicable for a native application. +// So instead, just map ws -> http and wss -> https. +- (NSString *)SR_origin; + +@end + + +@interface _SRRunLoopThread : NSThread + +@property (nonatomic, readonly) NSRunLoop *runLoop; + +@end + + + + + +@implementation NSString (SRWebSocket) + +- (NSString *)stringBySHA1ThenBase64Encoding; +{ + return newSHA1String(self.UTF8String, self.length); +} + +@end + +NSString *const SRWebSocketErrorDomain = @"SRWebSocketErrorDomain"; +NSString *const SRHTTPResponseErrorKey = @"HTTPResponseStatusCode"; + +// Returns number of bytes consumed. Returning 0 means you didn't match. +// Sends bytes to callback handler; +typedef size_t (^stream_scanner)(NSData *collected_data); + +typedef void (^data_callback)(SRWebSocket *webSocket, NSData *data); + +@interface SRIOConsumer : NSObject { + stream_scanner _scanner; + data_callback _handler; + size_t _bytesNeeded; + BOOL _readToCurrentFrame; + BOOL _unmaskBytes; +} +@property (nonatomic, copy, readonly) stream_scanner consumer; +@property (nonatomic, copy, readonly) data_callback handler; +@property (nonatomic, assign) size_t bytesNeeded; +@property (nonatomic, assign, readonly) BOOL readToCurrentFrame; +@property (nonatomic, assign, readonly) BOOL unmaskBytes; + +@end + +// This class is not thread-safe, and is expected to always be run on the same queue. +@interface SRIOConsumerPool : NSObject + +- (id)initWithBufferCapacity:(NSUInteger)poolSize; + +- (SRIOConsumer *)consumerWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes; +- (void)returnConsumer:(SRIOConsumer *)consumer; + +@end + +@interface SRWebSocket () + +@property (nonatomic) SRReadyState readyState; + +@property (nonatomic) NSOperationQueue *delegateOperationQueue; +@property (nonatomic) dispatch_queue_t delegateDispatchQueue; + +// Specifies whether SSL trust chain should NOT be evaluated. +// By default this flag is set to NO, meaning only secure SSL connections are allowed. +// For DEBUG builds this flag is ignored, and SSL connections are allowed regardless +// of the certificate trust configuration +@property (nonatomic, readwrite) BOOL allowsUntrustedSSLCertificates; + +@end + + +@implementation SRWebSocket { + NSInteger _webSocketVersion; + + NSOperationQueue *_delegateOperationQueue; + dispatch_queue_t _delegateDispatchQueue; + + dispatch_queue_t _workQueue; + NSMutableArray *_consumers; + + NSInputStream *_inputStream; + NSOutputStream *_outputStream; + + NSMutableData *_readBuffer; + NSUInteger _readBufferOffset; + + NSMutableData *_outputBuffer; + NSUInteger _outputBufferOffset; + + uint8_t _currentFrameOpcode; + size_t _currentFrameCount; + size_t _readOpCount; + uint32_t _currentStringScanPosition; + NSMutableData *_currentFrameData; + + NSString *_closeReason; + + NSString *_secKey; + NSString *_basicAuthorizationString; + + BOOL _pinnedCertFound; + + uint8_t _currentReadMaskKey[4]; + size_t _currentReadMaskOffset; + + BOOL _consumerStopped; + + BOOL _closeWhenFinishedWriting; + BOOL _failed; + + BOOL _secure; + NSURLRequest *_urlRequest; + + BOOL _sentClose; + BOOL _didFail; + BOOL _cleanupScheduled; + int _closeCode; + + BOOL _isPumping; + + NSMutableSet *_scheduledRunloops; + + // We use this to retain ourselves. + __strong SRWebSocket *_selfRetain; + + NSArray *_requestedProtocols; + SRIOConsumerPool *_consumerPool; +} + +@synthesize delegate = _delegate; +@synthesize url = _url; +@synthesize readyState = _readyState; +@synthesize protocol = _protocol; + +static __strong NSData *CRLFCRLF; + ++ (void)initialize; +{ + CRLFCRLF = [[NSData alloc] initWithBytes:"\r\n\r\n" length:4]; +} + +- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates; +{ + self = [super init]; + if (self) { + assert(request.URL); + _url = request.URL; + _urlRequest = request; + _allowsUntrustedSSLCertificates = allowsUntrustedSSLCertificates; + + _requestedProtocols = [protocols copy]; + + [self _SR_commonInit]; + } + + return self; +} + +- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols; +{ + return [self initWithURLRequest:request protocols:protocols allowsUntrustedSSLCertificates:NO]; +} + +- (id)initWithURLRequest:(NSURLRequest *)request; +{ + return [self initWithURLRequest:request protocols:nil]; +} + +- (id)initWithURL:(NSURL *)url; +{ + return [self initWithURL:url protocols:nil]; +} + +- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols; +{ + NSMutableURLRequest *request = [[NSMutableURLRequest alloc] initWithURL:url]; + return [self initWithURLRequest:request protocols:protocols]; +} + +- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates; +{ + NSMutableURLRequest *request = [[NSMutableURLRequest alloc] initWithURL:url]; + return [self initWithURLRequest:request protocols:protocols allowsUntrustedSSLCertificates:allowsUntrustedSSLCertificates]; +} + +- (void)_SR_commonInit; +{ + NSString *scheme = _url.scheme.lowercaseString; + assert([scheme isEqualToString:@"ws"] || [scheme isEqualToString:@"http"] || [scheme isEqualToString:@"wss"] || [scheme isEqualToString:@"https"]); + + if ([scheme isEqualToString:@"wss"] || [scheme isEqualToString:@"https"]) { + _secure = YES; + } + + _readyState = SR_CONNECTING; + _consumerStopped = YES; + _webSocketVersion = 13; + + _workQueue = dispatch_queue_create(NULL, DISPATCH_QUEUE_SERIAL); + + // Going to set a specific on the queue so we can validate we're on the work queue + dispatch_queue_set_specific(_workQueue, (__bridge void *)self, maybe_bridge(_workQueue), NULL); + + _delegateDispatchQueue = dispatch_get_main_queue(); + sr_dispatch_retain(_delegateDispatchQueue); + + _readBuffer = [[NSMutableData alloc] init]; + _outputBuffer = [[NSMutableData alloc] init]; + + _currentFrameData = [[NSMutableData alloc] init]; + + _consumers = [[NSMutableArray alloc] init]; + + _consumerPool = [[SRIOConsumerPool alloc] init]; + + _scheduledRunloops = [[NSMutableSet alloc] init]; + + [self _initializeStreams]; + + // default handlers +} + +- (void)assertOnWorkQueue; +{ + assert(dispatch_get_specific((__bridge void *)self) == maybe_bridge(_workQueue)); +} + +- (void)dealloc +{ + _inputStream.delegate = nil; + _outputStream.delegate = nil; + + [_inputStream close]; + [_outputStream close]; + + if (_workQueue) { + sr_dispatch_release(_workQueue); + _workQueue = NULL; + } + + if (_receivedHTTPHeaders) { + CFRelease(_receivedHTTPHeaders); + _receivedHTTPHeaders = NULL; + } + + if (_delegateDispatchQueue) { + sr_dispatch_release(_delegateDispatchQueue); + _delegateDispatchQueue = NULL; + } +} + +#ifndef NDEBUG + +- (void)setReadyState:(SRReadyState)aReadyState; +{ + assert(aReadyState > _readyState); + _readyState = aReadyState; +} + +#endif + +- (void)open; +{ + assert(_url); + NSAssert(_readyState == SR_CONNECTING, @"Cannot call -(void)open on SRWebSocket more than once"); + + _selfRetain = self; + + if (_urlRequest.timeoutInterval > 0) + { + dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, _urlRequest.timeoutInterval * NSEC_PER_SEC); + dispatch_after(popTime, dispatch_get_main_queue(), ^(void){ + if (self.readyState == SR_CONNECTING) + [self _failWithError:[NSError errorWithDomain:@"com.squareup.SocketRocket" code:504 userInfo:@{NSLocalizedDescriptionKey: @"Timeout Connecting to Server"}]]; + }); + } + + [self openConnection]; +} + +// Calls block on delegate queue +- (void)_performDelegateBlock:(dispatch_block_t)block; +{ + if (_delegateOperationQueue) { + [_delegateOperationQueue addOperationWithBlock:block]; + } else { + assert(_delegateDispatchQueue); + dispatch_async(_delegateDispatchQueue, block); + } +} + +- (void)setDelegateDispatchQueue:(dispatch_queue_t)queue; +{ + if (queue) { + sr_dispatch_retain(queue); + } + + if (_delegateDispatchQueue) { + sr_dispatch_release(_delegateDispatchQueue); + } + + _delegateDispatchQueue = queue; +} + +- (BOOL)_checkHandshake:(CFHTTPMessageRef)httpMessage; +{ + NSString *acceptHeader = CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(httpMessage, CFSTR("Sec-WebSocket-Accept"))); + + if (acceptHeader == nil) { + return NO; + } + + NSString *concattedString = [_secKey stringByAppendingString:SRWebSocketAppendToSecKeyString]; + NSString *expectedAccept = [concattedString stringBySHA1ThenBase64Encoding]; + + return [acceptHeader isEqualToString:expectedAccept]; +} + +- (void)_HTTPHeadersDidFinish; +{ + NSInteger responseCode = CFHTTPMessageGetResponseStatusCode(_receivedHTTPHeaders); + + if (responseCode >= 400) { + SRFastLog(@"Request failed with response code %d", responseCode); + [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2132 userInfo:@{NSLocalizedDescriptionKey:[NSString stringWithFormat:@"received bad response code from server %ld", (long)responseCode], SRHTTPResponseErrorKey:@(responseCode)}]]; + return; + } + + if(![self _checkHandshake:_receivedHTTPHeaders]) { + [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2133 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Invalid Sec-WebSocket-Accept response"] forKey:NSLocalizedDescriptionKey]]]; + return; + } + + NSString *negotiatedProtocol = CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_receivedHTTPHeaders, CFSTR("Sec-WebSocket-Protocol"))); + if (negotiatedProtocol) { + // Make sure we requested the protocol + if ([_requestedProtocols indexOfObject:negotiatedProtocol] == NSNotFound) { + [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2133 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Server specified Sec-WebSocket-Protocol that wasn't requested"] forKey:NSLocalizedDescriptionKey]]]; + return; + } + + _protocol = negotiatedProtocol; + } + + self.readyState = SR_OPEN; + + if (!_didFail) { + [self _readFrameNew]; + } + + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocketDidOpen:)]) { + [self.delegate webSocketDidOpen:self]; + }; + }]; +} + + +- (void)_readHTTPHeader; +{ + if (_receivedHTTPHeaders == NULL) { + _receivedHTTPHeaders = CFHTTPMessageCreateEmpty(NULL, NO); + } + + [self _readUntilHeaderCompleteWithCallback:^(SRWebSocket *self, NSData *data) { + CFHTTPMessageAppendBytes(_receivedHTTPHeaders, (const UInt8 *)data.bytes, data.length); + + if (CFHTTPMessageIsHeaderComplete(_receivedHTTPHeaders)) { + SRFastLog(@"Finished reading headers %@", CFBridgingRelease(CFHTTPMessageCopyAllHeaderFields(_receivedHTTPHeaders))); + [self _HTTPHeadersDidFinish]; + } else { + [self _readHTTPHeader]; + } + }]; +} + +- (void)didConnect; +{ + SRFastLog(@"Connected"); + CFHTTPMessageRef request = CFHTTPMessageCreateRequest(NULL, CFSTR("GET"), (__bridge CFURLRef)_url, kCFHTTPVersion1_1); + + // Set host first so it defaults + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Host"), (__bridge CFStringRef)(_url.port ? [NSString stringWithFormat:@"%@:%@", _url.host, _url.port] : _url.host)); + + NSMutableData *keyBytes = [[NSMutableData alloc] initWithLength:16]; + SecRandomCopyBytes(kSecRandomDefault, keyBytes.length, keyBytes.mutableBytes); + + if ([keyBytes respondsToSelector:@selector(base64EncodedStringWithOptions:)]) { + _secKey = [keyBytes base64EncodedStringWithOptions:0]; + } else { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + _secKey = [keyBytes base64Encoding]; +#pragma clang diagnostic pop + } + + assert([_secKey length] == 24); + + // Apply cookies if any have been provided + NSDictionary * cookies = [NSHTTPCookie requestHeaderFieldsWithCookies:[self requestCookies]]; + for (NSString * cookieKey in cookies) { + NSString * cookieValue = [cookies objectForKey:cookieKey]; + if ([cookieKey length] && [cookieValue length]) { + CFHTTPMessageSetHeaderFieldValue(request, (__bridge CFStringRef)cookieKey, (__bridge CFStringRef)cookieValue); + } + } + + // set header for http basic auth + if (_url.user.length && _url.password.length) { + NSData *userAndPassword = [[NSString stringWithFormat:@"%@:%@", _url.user, _url.password] dataUsingEncoding:NSUTF8StringEncoding]; + NSString *userAndPasswordBase64Encoded; + if ([keyBytes respondsToSelector:@selector(base64EncodedStringWithOptions:)]) { + userAndPasswordBase64Encoded = [userAndPassword base64EncodedStringWithOptions:0]; + } else { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + userAndPasswordBase64Encoded = [userAndPassword base64Encoding]; +#pragma clang diagnostic pop + } + _basicAuthorizationString = [NSString stringWithFormat:@"Basic %@", userAndPasswordBase64Encoded]; + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Authorization"), (__bridge CFStringRef)_basicAuthorizationString); + } + + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Upgrade"), CFSTR("websocket")); + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Connection"), CFSTR("Upgrade")); + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Key"), (__bridge CFStringRef)_secKey); + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Version"), (__bridge CFStringRef)[NSString stringWithFormat:@"%ld", (long)_webSocketVersion]); + + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Origin"), (__bridge CFStringRef)_url.SR_origin); + + if (_requestedProtocols) { + CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Protocol"), (__bridge CFStringRef)[_requestedProtocols componentsJoinedByString:@", "]); + } + + [_urlRequest.allHTTPHeaderFields enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop) { + CFHTTPMessageSetHeaderFieldValue(request, (__bridge CFStringRef)key, (__bridge CFStringRef)obj); + }]; + + NSData *message = CFBridgingRelease(CFHTTPMessageCopySerializedMessage(request)); + + CFRelease(request); + + [self _writeData:message]; + [self _readHTTPHeader]; +} + +- (void)_initializeStreams; +{ + assert(_url.port.unsignedIntValue <= UINT32_MAX); + uint32_t port = _url.port.unsignedIntValue; + if (port == 0) { + if (!_secure) { + port = 80; + } else { + port = 443; + } + } + NSString *host = _url.host; + + CFReadStreamRef readStream = NULL; + CFWriteStreamRef writeStream = NULL; + + CFStreamCreatePairWithSocketToHost(NULL, (__bridge CFStringRef)host, port, &readStream, &writeStream); + + _outputStream = CFBridgingRelease(writeStream); + _inputStream = CFBridgingRelease(readStream); + + _inputStream.delegate = self; + _outputStream.delegate = self; +} + +- (void)_updateSecureStreamOptions; +{ + if (_secure) { + NSMutableDictionary *SSLOptions = [[NSMutableDictionary alloc] init]; + + [_outputStream setProperty:(__bridge id)kCFStreamSocketSecurityLevelNegotiatedSSL forKey:(__bridge id)kCFStreamPropertySocketSecurityLevel]; + + // If we're using pinned certs, don't validate the certificate chain + if ([_urlRequest SR_SSLPinnedCertificates].count) { + [SSLOptions setValue:@NO forKey:(__bridge id)kCFStreamSSLValidatesCertificateChain]; + } + +#if DEBUG + self.allowsUntrustedSSLCertificates = YES; +#endif + + if (self.allowsUntrustedSSLCertificates) { + [SSLOptions setValue:@NO forKey:(__bridge id)kCFStreamSSLValidatesCertificateChain]; + SRFastLog(@"Allowing connection to any root cert"); + } + + [_outputStream setProperty:SSLOptions + forKey:(__bridge id)kCFStreamPropertySSLSettings]; + } + + _inputStream.delegate = self; + _outputStream.delegate = self; + + [self setupNetworkServiceType:_urlRequest.networkServiceType]; +} + +- (void)setupNetworkServiceType:(NSURLRequestNetworkServiceType)requestNetworkServiceType +{ + NSString *networkServiceType; + switch (requestNetworkServiceType) { + case NSURLNetworkServiceTypeDefault: + break; + case NSURLNetworkServiceTypeVoIP: { + networkServiceType = NSStreamNetworkServiceTypeVoIP; +#if TARGET_OS_IPHONE && __IPHONE_9_0 + if (floor(NSFoundationVersionNumber) > NSFoundationVersionNumber_iOS_8_3) { + static dispatch_once_t predicate; + dispatch_once(&predicate, ^{ + NSLog(@"SocketRocket: %@ - this service type is deprecated in favor of using PushKit for VoIP control", networkServiceType); + }); + } +#endif + break; + } + case NSURLNetworkServiceTypeVideo: + networkServiceType = NSStreamNetworkServiceTypeVideo; + break; + case NSURLNetworkServiceTypeBackground: + networkServiceType = NSStreamNetworkServiceTypeBackground; + break; + case NSURLNetworkServiceTypeVoice: + networkServiceType = NSStreamNetworkServiceTypeVoice; + break; + } + + if (networkServiceType != nil) { + [_inputStream setProperty:networkServiceType forKey:NSStreamNetworkServiceType]; + [_outputStream setProperty:networkServiceType forKey:NSStreamNetworkServiceType]; + } +} + +- (void)openConnection; +{ + [self _updateSecureStreamOptions]; + + if (!_scheduledRunloops.count) { + [self scheduleInRunLoop:[NSRunLoop SR_networkRunLoop] forMode:NSDefaultRunLoopMode]; + } + + + [_outputStream open]; + [_inputStream open]; +} + +- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; +{ + [_outputStream scheduleInRunLoop:aRunLoop forMode:mode]; + [_inputStream scheduleInRunLoop:aRunLoop forMode:mode]; + + [_scheduledRunloops addObject:@[aRunLoop, mode]]; +} + +- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; +{ + [_outputStream removeFromRunLoop:aRunLoop forMode:mode]; + [_inputStream removeFromRunLoop:aRunLoop forMode:mode]; + + [_scheduledRunloops removeObject:@[aRunLoop, mode]]; +} + +- (void)close; +{ + [self closeWithCode:SRStatusCodeNormal reason:nil]; +} + +- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason; +{ + assert(code); + dispatch_async(_workQueue, ^{ + if (self.readyState == SR_CLOSING || self.readyState == SR_CLOSED) { + return; + } + + BOOL wasConnecting = self.readyState == SR_CONNECTING; + + self.readyState = SR_CLOSING; + + SRFastLog(@"Closing with code %d reason %@", code, reason); + + if (wasConnecting) { + [self closeConnection]; + return; + } + + size_t maxMsgSize = [reason maximumLengthOfBytesUsingEncoding:NSUTF8StringEncoding]; + NSMutableData *mutablePayload = [[NSMutableData alloc] initWithLength:sizeof(uint16_t) + maxMsgSize]; + NSData *payload = mutablePayload; + + ((uint16_t *)mutablePayload.mutableBytes)[0] = EndianU16_BtoN(code); + + if (reason) { + NSRange remainingRange = {0}; + + NSUInteger usedLength = 0; + + BOOL success = [reason getBytes:(char *)mutablePayload.mutableBytes + sizeof(uint16_t) maxLength:payload.length - sizeof(uint16_t) usedLength:&usedLength encoding:NSUTF8StringEncoding options:NSStringEncodingConversionExternalRepresentation range:NSMakeRange(0, reason.length) remainingRange:&remainingRange]; + #pragma unused (success) + + assert(success); + assert(remainingRange.length == 0); + + if (usedLength != maxMsgSize) { + payload = [payload subdataWithRange:NSMakeRange(0, usedLength + sizeof(uint16_t))]; + } + } + + + [self _sendFrameWithOpcode:SROpCodeConnectionClose data:payload]; + }); +} + +- (void)_closeWithProtocolError:(NSString *)message; +{ + // Need to shunt this on the _callbackQueue first to see if they received any messages + [self _performDelegateBlock:^{ + [self closeWithCode:SRStatusCodeProtocolError reason:message]; + dispatch_async(_workQueue, ^{ + [self closeConnection]; + }); + }]; +} + +- (void)_failWithError:(NSError *)error; +{ + dispatch_async(_workQueue, ^{ + if (self.readyState != SR_CLOSED) { + _failed = YES; + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocket:didFailWithError:)]) { + [self.delegate webSocket:self didFailWithError:error]; + } + }]; + + self.readyState = SR_CLOSED; + + SRFastLog(@"Failing with error %@", error.localizedDescription); + + [self closeConnection]; + [self _scheduleCleanup]; + } + }); +} + +- (void)_writeData:(NSData *)data; +{ + [self assertOnWorkQueue]; + + if (_closeWhenFinishedWriting) { + return; + } + [_outputBuffer appendData:data]; + [self _pumpWriting]; +} + +- (void)send:(id)data; +{ + NSAssert(self.readyState != SR_CONNECTING, @"Invalid State: Cannot call send: until connection is open"); + // TODO: maybe not copy this for performance + data = [data copy]; + dispatch_async(_workQueue, ^{ + if ([data isKindOfClass:[NSString class]]) { + [self _sendFrameWithOpcode:SROpCodeTextFrame data:[(NSString *)data dataUsingEncoding:NSUTF8StringEncoding]]; + } else if ([data isKindOfClass:[NSData class]]) { + [self _sendFrameWithOpcode:SROpCodeBinaryFrame data:data]; + } else if (data == nil) { + [self _sendFrameWithOpcode:SROpCodeTextFrame data:data]; + } else { + assert(NO); + } + }); +} + +- (void)sendPing:(NSData *)data; +{ + NSAssert(self.readyState == SR_OPEN, @"Invalid State: Cannot call send: until connection is open"); + // TODO: maybe not copy this for performance + data = [data copy] ?: [NSData data]; // It's okay for a ping to be empty + dispatch_async(_workQueue, ^{ + [self _sendFrameWithOpcode:SROpCodePing data:data]; + }); +} + +- (void)handlePing:(NSData *)pingData; +{ + // Need to pingpong this off _callbackQueue first to make sure messages happen in order + [self _performDelegateBlock:^{ + dispatch_async(_workQueue, ^{ + [self _sendFrameWithOpcode:SROpCodePong data:pingData]; + }); + }]; +} + +- (void)handlePong:(NSData *)pongData; +{ + SRFastLog(@"Received pong"); + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocket:didReceivePong:)]) { + [self.delegate webSocket:self didReceivePong:pongData]; + } + }]; +} + +- (void)_handleMessage:(id)message +{ + SRFastLog(@"Received message"); + [self _performDelegateBlock:^{ + [self.delegate webSocket:self didReceiveMessage:message]; + }]; +} + + +static inline BOOL closeCodeIsValid(int closeCode) { + if (closeCode < 1000) { + return NO; + } + + if (closeCode >= 1000 && closeCode <= 1011) { + if (closeCode == 1004 || + closeCode == 1005 || + closeCode == 1006) { + return NO; + } + return YES; + } + + if (closeCode >= 3000 && closeCode <= 3999) { + return YES; + } + + if (closeCode >= 4000 && closeCode <= 4999) { + return YES; + } + + return NO; +} + +// Note from RFC: +// +// If there is a body, the first two +// bytes of the body MUST be a 2-byte unsigned integer (in network byte +// order) representing a status code with value /code/ defined in +// Section 7.4. Following the 2-byte integer the body MAY contain UTF-8 +// encoded data with value /reason/, the interpretation of which is not +// defined by this specification. + +- (void)handleCloseWithData:(NSData *)data; +{ + size_t dataSize = data.length; + __block uint16_t closeCode = 0; + + SRFastLog(@"Received close frame"); + + if (dataSize == 1) { + // TODO handle error + [self _closeWithProtocolError:@"Payload for close must be larger than 2 bytes"]; + return; + } else if (dataSize >= 2) { + [data getBytes:&closeCode length:sizeof(closeCode)]; + _closeCode = EndianU16_BtoN(closeCode); + if (!closeCodeIsValid(_closeCode)) { + [self _closeWithProtocolError:[NSString stringWithFormat:@"Cannot have close code of %d", _closeCode]]; + return; + } + if (dataSize > 2) { + _closeReason = [[NSString alloc] initWithData:[data subdataWithRange:NSMakeRange(2, dataSize - 2)] encoding:NSUTF8StringEncoding]; + if (!_closeReason) { + [self _closeWithProtocolError:@"Close reason MUST be valid UTF-8"]; + return; + } + } + } else { + _closeCode = SRStatusNoStatusReceived; + } + + [self assertOnWorkQueue]; + + if (self.readyState == SR_OPEN) { + [self closeWithCode:1000 reason:nil]; + } + dispatch_async(_workQueue, ^{ + [self closeConnection]; + }); +} + +- (void)closeConnection; +{ + [self assertOnWorkQueue]; + SRFastLog(@"Trying to disconnect"); + _closeWhenFinishedWriting = YES; + [self _pumpWriting]; +} + +- (void)_handleFrameWithData:(NSData *)frameData opCode:(NSInteger)opcode; +{ + // Check that the current data is valid UTF8 + + BOOL isControlFrame = (opcode == SROpCodePing || opcode == SROpCodePong || opcode == SROpCodeConnectionClose); + if (!isControlFrame) { + [self _readFrameNew]; + } else { + dispatch_async(_workQueue, ^{ + [self _readFrameContinue]; + }); + } + + //frameData will be copied before passing to handlers + //otherwise there can be misbehaviours when value at the pointer is changed + switch (opcode) { + case SROpCodeTextFrame: { + if ([self.delegate respondsToSelector:@selector(webSocketShouldConvertTextFrameToString:)] && ![self.delegate webSocketShouldConvertTextFrameToString:self]) { + [self _handleMessage:[frameData copy]]; + } else { + NSString *str = [[NSString alloc] initWithData:frameData encoding:NSUTF8StringEncoding]; + if (str == nil && frameData) { + [self closeWithCode:SRStatusCodeInvalidUTF8 reason:@"Text frames must be valid UTF-8"]; + dispatch_async(_workQueue, ^{ + [self closeConnection]; + }); + return; + } + [self _handleMessage:str]; + } + break; + } + case SROpCodeBinaryFrame: + [self _handleMessage:[frameData copy]]; + break; + case SROpCodeConnectionClose: + [self handleCloseWithData:[frameData copy]]; + break; + case SROpCodePing: + [self handlePing:[frameData copy]]; + break; + case SROpCodePong: + [self handlePong:[frameData copy]]; + break; + default: + [self _closeWithProtocolError:[NSString stringWithFormat:@"Unknown opcode %ld", (long)opcode]]; + // TODO: Handle invalid opcode + break; + } +} + +- (void)_handleFrameHeader:(frame_header)frame_header curData:(NSData *)curData; +{ + assert(frame_header.opcode != 0); + + if (self.readyState == SR_CLOSED) { + return; + } + + + BOOL isControlFrame = (frame_header.opcode == SROpCodePing || frame_header.opcode == SROpCodePong || frame_header.opcode == SROpCodeConnectionClose); + + if (isControlFrame && !frame_header.fin) { + [self _closeWithProtocolError:@"Fragmented control frames not allowed"]; + return; + } + + if (isControlFrame && frame_header.payload_length >= 126) { + [self _closeWithProtocolError:@"Control frames cannot have payloads larger than 126 bytes"]; + return; + } + + if (!isControlFrame) { + _currentFrameOpcode = frame_header.opcode; + _currentFrameCount += 1; + } + + if (frame_header.payload_length == 0) { + if (isControlFrame) { + [self _handleFrameWithData:curData opCode:frame_header.opcode]; + } else { + if (frame_header.fin) { + [self _handleFrameWithData:_currentFrameData opCode:frame_header.opcode]; + } else { + // TODO add assert that opcode is not a control; + [self _readFrameContinue]; + } + } + } else { + assert(frame_header.payload_length <= SIZE_T_MAX); + [self _addConsumerWithDataLength:(size_t)frame_header.payload_length callback:^(SRWebSocket *self, NSData *newData) { + if (isControlFrame) { + [self _handleFrameWithData:newData opCode:frame_header.opcode]; + } else { + if (frame_header.fin) { + [self _handleFrameWithData:self->_currentFrameData opCode:frame_header.opcode]; + } else { + // TODO add assert that opcode is not a control; + [self _readFrameContinue]; + } + + } + } readToCurrentFrame:!isControlFrame unmaskBytes:frame_header.masked]; + } +} + +/* From RFC: + + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-------+-+-------------+-------------------------------+ + |F|R|R|R| opcode|M| Payload len | Extended payload length | + |I|S|S|S| (4) |A| (7) | (16/64) | + |N|V|V|V| |S| | (if payload len==126/127) | + | |1|2|3| |K| | | + +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - + + | Extended payload length continued, if payload len == 127 | + + - - - - - - - - - - - - - - - +-------------------------------+ + | |Masking-key, if MASK set to 1 | + +-------------------------------+-------------------------------+ + | Masking-key (continued) | Payload Data | + +-------------------------------- - - - - - - - - - - - - - - - + + : Payload Data continued ... : + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + | Payload Data continued ... | + +---------------------------------------------------------------+ + */ + +static const uint8_t SRFinMask = 0x80; +static const uint8_t SROpCodeMask = 0x0F; +static const uint8_t SRRsvMask = 0x70; +static const uint8_t SRMaskMask = 0x80; +static const uint8_t SRPayloadLenMask = 0x7F; + + +- (void)_readFrameContinue; +{ + assert((_currentFrameCount == 0 && _currentFrameOpcode == 0) || (_currentFrameCount > 0 && _currentFrameOpcode > 0)); + + [self _addConsumerWithDataLength:2 callback:^(SRWebSocket *self, NSData *data) { + __block frame_header header = {0}; + + const uint8_t *headerBuffer = (uint8_t *)data.bytes; + assert(data.length >= 2); + + if (headerBuffer[0] & SRRsvMask) { + [self _closeWithProtocolError:@"Server used RSV bits"]; + return; + } + + uint8_t receivedOpcode = (SROpCodeMask & headerBuffer[0]); + + BOOL isControlFrame = (receivedOpcode == SROpCodePing || receivedOpcode == SROpCodePong || receivedOpcode == SROpCodeConnectionClose); + + if (!isControlFrame && receivedOpcode != 0 && self->_currentFrameCount > 0) { + [self _closeWithProtocolError:@"all data frames after the initial data frame must have opcode 0"]; + return; + } + + if (receivedOpcode == 0 && self->_currentFrameCount == 0) { + [self _closeWithProtocolError:@"cannot continue a message"]; + return; + } + + header.opcode = receivedOpcode == 0 ? self->_currentFrameOpcode : receivedOpcode; + + header.fin = !!(SRFinMask & headerBuffer[0]); + + + header.masked = !!(SRMaskMask & headerBuffer[1]); + header.payload_length = SRPayloadLenMask & headerBuffer[1]; + + headerBuffer = NULL; + + if (header.masked) { + [self _closeWithProtocolError:@"Client must receive unmasked data"]; + } + + size_t extra_bytes_needed = header.masked ? sizeof(_currentReadMaskKey) : 0; + + if (header.payload_length == 126) { + extra_bytes_needed += sizeof(uint16_t); + } else if (header.payload_length == 127) { + extra_bytes_needed += sizeof(uint64_t); + } + + if (extra_bytes_needed == 0) { + [self _handleFrameHeader:header curData:self->_currentFrameData]; + } else { + [self _addConsumerWithDataLength:extra_bytes_needed callback:^(SRWebSocket *self, NSData *data) { + size_t mapped_size = data.length; + #pragma unused (mapped_size) + const void *mapped_buffer = data.bytes; + size_t offset = 0; + + if (header.payload_length == 126) { + assert(mapped_size >= sizeof(uint16_t)); + uint16_t newLen = EndianU16_BtoN(*(uint16_t *)(mapped_buffer)); + header.payload_length = newLen; + offset += sizeof(uint16_t); + } else if (header.payload_length == 127) { + assert(mapped_size >= sizeof(uint64_t)); + header.payload_length = EndianU64_BtoN(*(uint64_t *)(mapped_buffer)); + offset += sizeof(uint64_t); + } else { + assert(header.payload_length < 126 && header.payload_length >= 0); + } + + if (header.masked) { + assert(mapped_size >= sizeof(_currentReadMaskOffset) + offset); + memcpy(self->_currentReadMaskKey, ((uint8_t *)mapped_buffer) + offset, sizeof(self->_currentReadMaskKey)); + } + + [self _handleFrameHeader:header curData:self->_currentFrameData]; + } readToCurrentFrame:NO unmaskBytes:NO]; + } + } readToCurrentFrame:NO unmaskBytes:NO]; +} + +- (void)_readFrameNew; +{ + dispatch_async(_workQueue, ^{ + [_currentFrameData setLength:0]; + + _currentFrameOpcode = 0; + _currentFrameCount = 0; + _readOpCount = 0; + _currentStringScanPosition = 0; + + [self _readFrameContinue]; + }); +} + +- (void)_pumpWriting; +{ + [self assertOnWorkQueue]; + + NSUInteger dataLength = _outputBuffer.length; + if (dataLength - _outputBufferOffset > 0 && _outputStream.hasSpaceAvailable) { + NSInteger bytesWritten = [_outputStream write:(uint8_t *)_outputBuffer.bytes + _outputBufferOffset maxLength:dataLength - _outputBufferOffset]; + if (bytesWritten == -1) { + [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2145 userInfo:[NSDictionary dictionaryWithObject:@"Error writing to stream" forKey:NSLocalizedDescriptionKey]]]; + return; + } + + _outputBufferOffset += bytesWritten; + + if (_outputBufferOffset > 4096 && _outputBufferOffset > (_outputBuffer.length >> 1)) { + _outputBuffer = [[NSMutableData alloc] initWithBytes:(char *)_outputBuffer.bytes + _outputBufferOffset length:_outputBuffer.length - _outputBufferOffset]; + _outputBufferOffset = 0; + } + } + + if (_closeWhenFinishedWriting && + _outputBuffer.length - _outputBufferOffset == 0 && + (_inputStream.streamStatus != NSStreamStatusNotOpen && + _inputStream.streamStatus != NSStreamStatusClosed) && + !_sentClose) { + _sentClose = YES; + + @synchronized(self) { + [_outputStream close]; + [_inputStream close]; + + + for (NSArray *runLoop in [_scheduledRunloops copy]) { + [self unscheduleFromRunLoop:[runLoop objectAtIndex:0] forMode:[runLoop objectAtIndex:1]]; + } + } + + if (!_failed) { + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocket:didCloseWithCode:reason:wasClean:)]) { + [self.delegate webSocket:self didCloseWithCode:_closeCode reason:_closeReason wasClean:YES]; + } + }]; + } + + [self _scheduleCleanup]; + } +} + +- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback; +{ + [self assertOnWorkQueue]; + [self _addConsumerWithScanner:consumer callback:callback dataLength:0]; +} + +- (void)_addConsumerWithDataLength:(size_t)dataLength callback:(data_callback)callback readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes; +{ + [self assertOnWorkQueue]; + assert(dataLength); + + [_consumers addObject:[_consumerPool consumerWithScanner:nil handler:callback bytesNeeded:dataLength readToCurrentFrame:readToCurrentFrame unmaskBytes:unmaskBytes]]; + [self _pumpScanner]; +} + +- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback dataLength:(size_t)dataLength; +{ + [self assertOnWorkQueue]; + [_consumers addObject:[_consumerPool consumerWithScanner:consumer handler:callback bytesNeeded:dataLength readToCurrentFrame:NO unmaskBytes:NO]]; + [self _pumpScanner]; +} + + +- (void)_scheduleCleanup +{ + @synchronized(self) { + if (_cleanupScheduled) { + return; + } + + _cleanupScheduled = YES; + + // Cleanup NSStream delegate's in the same RunLoop used by the streams themselves: + // This way we'll prevent race conditions between handleEvent and SRWebsocket's dealloc + NSTimer *timer = [NSTimer timerWithTimeInterval:(0.0f) target:self selector:@selector(_cleanupSelfReference:) userInfo:nil repeats:NO]; + [[NSRunLoop SR_networkRunLoop] addTimer:timer forMode:NSDefaultRunLoopMode]; + } +} + +- (void)_cleanupSelfReference:(NSTimer *)timer +{ + @synchronized(self) { + // Nuke NSStream delegate's + _inputStream.delegate = nil; + _outputStream.delegate = nil; + + // Remove the streams, right now, from the networkRunLoop + [_inputStream close]; + [_outputStream close]; + } + + // Cleanup selfRetain in the same GCD queue as usual + dispatch_async(_workQueue, ^{ + _selfRetain = nil; + }); +} + + +static const char CRLFCRLFBytes[] = {'\r', '\n', '\r', '\n'}; + +- (void)_readUntilHeaderCompleteWithCallback:(data_callback)dataHandler; +{ + [self _readUntilBytes:CRLFCRLFBytes length:sizeof(CRLFCRLFBytes) callback:dataHandler]; +} + +- (void)_readUntilBytes:(const void *)bytes length:(size_t)length callback:(data_callback)dataHandler; +{ + // TODO optimize so this can continue from where we last searched + stream_scanner consumer = ^size_t(NSData *data) { + __block size_t found_size = 0; + __block size_t match_count = 0; + + size_t size = data.length; + const unsigned char *buffer = (unsigned char *)data.bytes; + for (size_t i = 0; i < size; i++ ) { + if (((const unsigned char *)buffer)[i] == ((const unsigned char *)bytes)[match_count]) { + match_count += 1; + if (match_count == length) { + found_size = i + 1; + break; + } + } else { + match_count = 0; + } + } + return found_size; + }; + [self _addConsumerWithScanner:consumer callback:dataHandler]; +} + + +// Returns true if did work +- (BOOL)_innerPumpScanner { + + BOOL didWork = NO; + + if (self.readyState >= SR_CLOSED) { + return didWork; + } + + if (!_consumers.count) { + return didWork; + } + + size_t curSize = _readBuffer.length - _readBufferOffset; + if (!curSize) { + return didWork; + } + + SRIOConsumer *consumer = [_consumers objectAtIndex:0]; + + size_t bytesNeeded = consumer.bytesNeeded; + + size_t foundSize = 0; + if (consumer.consumer) { + NSData *tempView = [NSData dataWithBytesNoCopy:(char *)_readBuffer.bytes + _readBufferOffset length:_readBuffer.length - _readBufferOffset freeWhenDone:NO]; + foundSize = consumer.consumer(tempView); + } else { + assert(consumer.bytesNeeded); + if (curSize >= bytesNeeded) { + foundSize = bytesNeeded; + } else if (consumer.readToCurrentFrame) { + foundSize = curSize; + } + } + + NSData *slice = nil; + if (consumer.readToCurrentFrame || foundSize) { + NSRange sliceRange = NSMakeRange(_readBufferOffset, foundSize); + slice = [_readBuffer subdataWithRange:sliceRange]; + + _readBufferOffset += foundSize; + + if (_readBufferOffset > 4096 && _readBufferOffset > (_readBuffer.length >> 1)) { + _readBuffer = [[NSMutableData alloc] initWithBytes:(char *)_readBuffer.bytes + _readBufferOffset length:_readBuffer.length - _readBufferOffset]; _readBufferOffset = 0; + } + + if (consumer.unmaskBytes) { + NSMutableData *mutableSlice = [slice mutableCopy]; + + NSUInteger len = mutableSlice.length; + uint8_t *bytes = (uint8_t *)mutableSlice.mutableBytes; + + for (NSUInteger i = 0; i < len; i++) { + bytes[i] = bytes[i] ^ _currentReadMaskKey[_currentReadMaskOffset % sizeof(_currentReadMaskKey)]; + _currentReadMaskOffset += 1; + } + + slice = mutableSlice; + } + + if (consumer.readToCurrentFrame) { + [_currentFrameData appendData:slice]; + + _readOpCount += 1; + + if (_currentFrameOpcode == SROpCodeTextFrame) { + // Validate UTF8 stuff. + size_t currentDataSize = _currentFrameData.length; + if (_currentFrameOpcode == SROpCodeTextFrame && currentDataSize > 0) { + // TODO: Optimize the crap out of this. Don't really have to copy all the data each time + + size_t scanSize = currentDataSize - _currentStringScanPosition; + + NSData *scan_data = [_currentFrameData subdataWithRange:NSMakeRange(_currentStringScanPosition, scanSize)]; + int32_t valid_utf8_size = validate_dispatch_data_partial_string(scan_data); + + if (valid_utf8_size == -1) { + [self closeWithCode:SRStatusCodeInvalidUTF8 reason:@"Text frames must be valid UTF-8"]; + dispatch_async(_workQueue, ^{ + [self closeConnection]; + }); + return didWork; + } else { + _currentStringScanPosition += valid_utf8_size; + } + } + + } + + consumer.bytesNeeded -= foundSize; + + if (consumer.bytesNeeded == 0) { + [_consumers removeObjectAtIndex:0]; + consumer.handler(self, nil); + [_consumerPool returnConsumer:consumer]; + didWork = YES; + } + } else if (foundSize) { + [_consumers removeObjectAtIndex:0]; + consumer.handler(self, slice); + [_consumerPool returnConsumer:consumer]; + didWork = YES; + } + } + return didWork; +} + +-(void)_pumpScanner; +{ + [self assertOnWorkQueue]; + + if (!_isPumping) { + _isPumping = YES; + } else { + return; + } + + while ([self _innerPumpScanner]) { + + } + + _isPumping = NO; +} + +//#define NOMASK + +static const size_t SRFrameHeaderOverhead = 32; + +- (void)_sendFrameWithOpcode:(SROpCode)opcode data:(id)data; +{ + [self assertOnWorkQueue]; + + if (nil == data) { + return; + } + + NSAssert([data isKindOfClass:[NSData class]] || [data isKindOfClass:[NSString class]], @"NSString or NSData"); + + size_t payloadLength = [data isKindOfClass:[NSString class]] ? [(NSString *)data lengthOfBytesUsingEncoding:NSUTF8StringEncoding] : [data length]; + + NSMutableData *frame = [[NSMutableData alloc] initWithLength:payloadLength + SRFrameHeaderOverhead]; + if (!frame) { + [self closeWithCode:SRStatusCodeMessageTooBig reason:@"Message too big"]; + return; + } + uint8_t *frame_buffer = (uint8_t *)[frame mutableBytes]; + + // set fin + frame_buffer[0] = SRFinMask | opcode; + + BOOL useMask = YES; +#ifdef NOMASK + useMask = NO; +#endif + + if (useMask) { + // set the mask and header + frame_buffer[1] |= SRMaskMask; + } + + size_t frame_buffer_size = 2; + + const uint8_t *unmasked_payload = NULL; + if ([data isKindOfClass:[NSData class]]) { + unmasked_payload = (uint8_t *)[data bytes]; + } else if ([data isKindOfClass:[NSString class]]) { + unmasked_payload = (const uint8_t *)[data UTF8String]; + } else { + return; + } + + if (payloadLength < 126) { + frame_buffer[1] |= payloadLength; + } else if (payloadLength <= UINT16_MAX) { + frame_buffer[1] |= 126; + *((uint16_t *)(frame_buffer + frame_buffer_size)) = EndianU16_BtoN((uint16_t)payloadLength); + frame_buffer_size += sizeof(uint16_t); + } else { + frame_buffer[1] |= 127; + *((uint64_t *)(frame_buffer + frame_buffer_size)) = EndianU64_BtoN((uint64_t)payloadLength); + frame_buffer_size += sizeof(uint64_t); + } + + if (!useMask) { + for (size_t i = 0; i < payloadLength; i++) { + frame_buffer[frame_buffer_size] = unmasked_payload[i]; + frame_buffer_size += 1; + } + } else { + uint8_t *mask_key = frame_buffer + frame_buffer_size; + SecRandomCopyBytes(kSecRandomDefault, sizeof(uint32_t), (uint8_t *)mask_key); + frame_buffer_size += sizeof(uint32_t); + + // TODO: could probably optimize this with SIMD + for (size_t i = 0; i < payloadLength; i++) { + frame_buffer[frame_buffer_size] = unmasked_payload[i] ^ mask_key[i % sizeof(uint32_t)]; + frame_buffer_size += 1; + } + } + + assert(frame_buffer_size <= [frame length]); + frame.length = frame_buffer_size; + + [self _writeData:frame]; +} + +- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode; +{ + __weak typeof(self) weakSelf = self; + + if (_secure && !_pinnedCertFound && (eventCode == NSStreamEventHasBytesAvailable || eventCode == NSStreamEventHasSpaceAvailable)) { + + NSArray *sslCerts = [_urlRequest SR_SSLPinnedCertificates]; + if (sslCerts) { + SecTrustRef secTrust = (__bridge SecTrustRef)[aStream propertyForKey:(__bridge id)kCFStreamPropertySSLPeerTrust]; + if (secTrust) { + NSInteger numCerts = SecTrustGetCertificateCount(secTrust); + for (NSInteger i = 0; i < numCerts && !_pinnedCertFound; i++) { + SecCertificateRef cert = SecTrustGetCertificateAtIndex(secTrust, i); + NSData *certData = CFBridgingRelease(SecCertificateCopyData(cert)); + + for (id ref in sslCerts) { + SecCertificateRef trustedCert = (__bridge SecCertificateRef)ref; + NSData *trustedCertData = CFBridgingRelease(SecCertificateCopyData(trustedCert)); + + if ([trustedCertData isEqualToData:certData]) { + _pinnedCertFound = YES; + break; + } + } + } + } + + if (!_pinnedCertFound) { + dispatch_async(_workQueue, ^{ + NSDictionary *userInfo = @{ NSLocalizedDescriptionKey : @"Invalid server cert" }; + [weakSelf _failWithError:[NSError errorWithDomain:@"org.lolrus.SocketRocket" code:23556 userInfo:userInfo]]; + }); + return; + } else if (aStream == _outputStream) { + dispatch_async(_workQueue, ^{ + [self didConnect]; + }); + } + } + } + + dispatch_async(_workQueue, ^{ + [weakSelf safeHandleEvent:eventCode stream:aStream]; + }); +} + +- (void)safeHandleEvent:(NSStreamEvent)eventCode stream:(NSStream *)aStream +{ + switch (eventCode) { + case NSStreamEventOpenCompleted: { + SRFastLog(@"NSStreamEventOpenCompleted %@", aStream); + if (self.readyState >= SR_CLOSING) { + return; + } + assert(_readBuffer); + + // didConnect fires after certificate verification if we're using pinned certificates. + BOOL usingPinnedCerts = [[_urlRequest SR_SSLPinnedCertificates] count] > 0; + if ((!_secure || !usingPinnedCerts) && self.readyState == SR_CONNECTING && aStream == _inputStream) { + [self didConnect]; + } + [self _pumpWriting]; + [self _pumpScanner]; + break; + } + + case NSStreamEventErrorOccurred: { + SRFastLog(@"NSStreamEventErrorOccurred %@ %@", aStream, [[aStream streamError] copy]); + /// TODO specify error better! + [self _failWithError:aStream.streamError]; + _readBufferOffset = 0; + [_readBuffer setLength:0]; + break; + + } + + case NSStreamEventEndEncountered: { + [self _pumpScanner]; + SRFastLog(@"NSStreamEventEndEncountered %@", aStream); + if (aStream.streamError) { + [self _failWithError:aStream.streamError]; + } else { + dispatch_async(_workQueue, ^{ + if (self.readyState != SR_CLOSED) { + self.readyState = SR_CLOSED; + [self _scheduleCleanup]; + } + + if (!_sentClose && !_failed) { + _sentClose = YES; + // If we get closed in this state it's probably not clean because we should be sending this when we send messages + [self _performDelegateBlock:^{ + if ([self.delegate respondsToSelector:@selector(webSocket:didCloseWithCode:reason:wasClean:)]) { + [self.delegate webSocket:self didCloseWithCode:SRStatusCodeGoingAway reason:@"Stream end encountered" wasClean:NO]; + } + }]; + } + }); + } + + break; + } + + case NSStreamEventHasBytesAvailable: { + SRFastLog(@"NSStreamEventHasBytesAvailable %@", aStream); + const int bufferSize = 2048; + uint8_t buffer[bufferSize]; + + while (_inputStream.hasBytesAvailable) { + NSInteger bytes_read = [_inputStream read:buffer maxLength:bufferSize]; + + if (bytes_read > 0) { + [_readBuffer appendBytes:buffer length:bytes_read]; + } else if (bytes_read < 0) { + [self _failWithError:_inputStream.streamError]; + } + + if (bytes_read != bufferSize) { + break; + } + }; + [self _pumpScanner]; + break; + } + + case NSStreamEventHasSpaceAvailable: { + SRFastLog(@"NSStreamEventHasSpaceAvailable %@", aStream); + [self _pumpWriting]; + break; + } + + default: + SRFastLog(@"(default) %@", aStream); + break; + } +} + +@end + + +@implementation SRIOConsumer + +@synthesize bytesNeeded = _bytesNeeded; +@synthesize consumer = _scanner; +@synthesize handler = _handler; +@synthesize readToCurrentFrame = _readToCurrentFrame; +@synthesize unmaskBytes = _unmaskBytes; + +- (void)setupWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes; +{ + _scanner = [scanner copy]; + _handler = [handler copy]; + _bytesNeeded = bytesNeeded; + _readToCurrentFrame = readToCurrentFrame; + _unmaskBytes = unmaskBytes; + assert(_scanner || _bytesNeeded); +} + + +@end + + +@implementation SRIOConsumerPool { + NSUInteger _poolSize; + NSMutableArray *_bufferedConsumers; +} + +- (id)initWithBufferCapacity:(NSUInteger)poolSize; +{ + self = [super init]; + if (self) { + _poolSize = poolSize; + _bufferedConsumers = [[NSMutableArray alloc] initWithCapacity:poolSize]; + } + return self; +} + +- (id)init +{ + return [self initWithBufferCapacity:8]; +} + +- (SRIOConsumer *)consumerWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes; +{ + SRIOConsumer *consumer = nil; + if (_bufferedConsumers.count) { + consumer = [_bufferedConsumers lastObject]; + [_bufferedConsumers removeLastObject]; + } else { + consumer = [[SRIOConsumer alloc] init]; + } + + [consumer setupWithScanner:scanner handler:handler bytesNeeded:bytesNeeded readToCurrentFrame:readToCurrentFrame unmaskBytes:unmaskBytes]; + + return consumer; +} + +- (void)returnConsumer:(SRIOConsumer *)consumer; +{ + if (_bufferedConsumers.count < _poolSize) { + [_bufferedConsumers addObject:consumer]; + } +} + +@end + + +@implementation NSURLRequest (SRCertificateAdditions) + +- (NSArray *)SR_SSLPinnedCertificates; +{ + return [NSURLProtocol propertyForKey:@"SR_SSLPinnedCertificates" inRequest:self]; +} + +@end + +@implementation NSMutableURLRequest (SRCertificateAdditions) + +- (NSArray *)SR_SSLPinnedCertificates; +{ + return [NSURLProtocol propertyForKey:@"SR_SSLPinnedCertificates" inRequest:self]; +} + +- (void)setSR_SSLPinnedCertificates:(NSArray *)SR_SSLPinnedCertificates; +{ + [NSURLProtocol setProperty:SR_SSLPinnedCertificates forKey:@"SR_SSLPinnedCertificates" inRequest:self]; +} + +@end + +@implementation NSURL (SRWebSocket) + +- (NSString *)SR_origin; +{ + NSString *scheme = [self.scheme lowercaseString]; + + if ([scheme isEqualToString:@"wss"]) { + scheme = @"https"; + } else if ([scheme isEqualToString:@"ws"]) { + scheme = @"http"; + } + + BOOL portIsDefault = !self.port || + ([scheme isEqualToString:@"http"] && self.port.integerValue == 80) || + ([scheme isEqualToString:@"https"] && self.port.integerValue == 443); + + if (!portIsDefault) { + return [NSString stringWithFormat:@"%@://%@:%@", scheme, self.host, self.port]; + } else { + return [NSString stringWithFormat:@"%@://%@", scheme, self.host]; + } +} + +@end + +//#define SR_ENABLE_LOG + +static inline void SRFastLog(NSString *format, ...) { +#ifdef SR_ENABLE_LOG + __block va_list arg_list; + va_start (arg_list, format); + + NSString *formattedString = [[NSString alloc] initWithFormat:format arguments:arg_list]; + + va_end(arg_list); + + NSLog(@"[SR] %@", formattedString); +#endif +} + + +#ifdef HAS_ICU + +static inline int32_t validate_dispatch_data_partial_string(NSData *data) { + if ([data length] > INT32_MAX) { + // INT32_MAX is the limit so long as this Framework is using 32 bit ints everywhere. + return -1; + } + + int32_t size = (int32_t)[data length]; + + const void * contents = [data bytes]; + const uint8_t *str = (const uint8_t *)contents; + + UChar32 codepoint = 1; + int32_t offset = 0; + int32_t lastOffset = 0; + while(offset < size && codepoint > 0) { + lastOffset = offset; + U8_NEXT(str, offset, size, codepoint); + } + + if (codepoint == -1) { + // Check to see if the last byte is valid or whether it was just continuing + if (!U8_IS_LEAD(str[lastOffset]) || U8_COUNT_TRAIL_BYTES(str[lastOffset]) + lastOffset < (int32_t)size) { + + size = -1; + } else { + uint8_t leadByte = str[lastOffset]; + U8_MASK_LEAD_BYTE(leadByte, U8_COUNT_TRAIL_BYTES(leadByte)); + + for (int i = lastOffset + 1; i < offset; i++) { + if (U8_IS_SINGLE(str[i]) || U8_IS_LEAD(str[i]) || !U8_IS_TRAIL(str[i])) { + size = -1; + } + } + + if (size != -1) { + size = lastOffset; + } + } + } + + if (size != -1 && ![[NSString alloc] initWithBytesNoCopy:(char *)[data bytes] length:size encoding:NSUTF8StringEncoding freeWhenDone:NO]) { + size = -1; + } + + return size; +} + +#else + +// This is a hack, and probably not optimal +static inline int32_t validate_dispatch_data_partial_string(NSData *data) { + static const int maxCodepointSize = 3; + + for (int i = 0; i < maxCodepointSize; i++) { + NSString *str = [[NSString alloc] initWithBytesNoCopy:(char *)data.bytes length:data.length - i encoding:NSUTF8StringEncoding freeWhenDone:NO]; + if (str) { + return (int32_t)data.length - i; + } + } + + return -1; +} + +#endif + +static _SRRunLoopThread *networkThread = nil; +static NSRunLoop *networkRunLoop = nil; + +@implementation NSRunLoop (SRWebSocket) + ++ (NSRunLoop *)SR_networkRunLoop { + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + networkThread = [[_SRRunLoopThread alloc] init]; + networkThread.name = @"com.squareup.SocketRocket.NetworkThread"; + [networkThread start]; + networkRunLoop = networkThread.runLoop; + }); + + return networkRunLoop; +} + +@end + + +@implementation _SRRunLoopThread { + dispatch_group_t _waitGroup; +} + +@synthesize runLoop = _runLoop; + +- (void)dealloc +{ + sr_dispatch_release(_waitGroup); +} + +- (id)init +{ + self = [super init]; + if (self) { + _waitGroup = dispatch_group_create(); + dispatch_group_enter(_waitGroup); + } + return self; +} + +- (void)main; +{ + @autoreleasepool { + _runLoop = [NSRunLoop currentRunLoop]; + dispatch_group_leave(_waitGroup); + + // Add an empty run loop source to prevent runloop from spinning. + CFRunLoopSourceContext sourceCtx = { + .version = 0, + .info = NULL, + .retain = NULL, + .release = NULL, + .copyDescription = NULL, + .equal = NULL, + .hash = NULL, + .schedule = NULL, + .cancel = NULL, + .perform = NULL + }; + CFRunLoopSourceRef source = CFRunLoopSourceCreate(NULL, 0, &sourceCtx); + CFRunLoopAddSource(CFRunLoopGetCurrent(), source, kCFRunLoopDefaultMode); + CFRelease(source); + + while ([_runLoop runMode:NSDefaultRunLoopMode beforeDate:[NSDate distantFuture]]) { + + } + assert(NO); + } +} + +- (NSRunLoop *)runLoop; +{ + dispatch_group_wait(_waitGroup, DISPATCH_TIME_FOREVER); + return _runLoop; +} + +@end diff --git a/OrderScheduling/Video/VideoTools/g711.h b/OrderScheduling/Video/VideoTools/g711.h new file mode 100644 index 0000000..646b4b6 --- /dev/null +++ b/OrderScheduling/Video/VideoTools/g711.h @@ -0,0 +1,30 @@ +/* + Copyright (c) 2013-2016 EasyDarwin.ORG. All rights reserved. + Github: https://github.com/EasyDarwin + WEChat: EasyDarwin + Website: http://www.easydarwin.org +*/ + +#ifndef __G_711_H_ +#define __G_711_H_ + +#include + +enum _e_g711_tp +{ + TP_ALAW, //G711A + TP_ULAW //G711U +}; + +unsigned char linear2alaw(int pcm_val); /* 2's complement (16-bit range) */ +int alaw2linear(unsigned char a_val); + +unsigned char linear2ulaw(int pcm_val); /* 2's complement (16-bit range) */ +int ulaw2linear(unsigned char u_val); + +unsigned char alaw2ulaw(unsigned char aval); +unsigned char ulaw2alaw(unsigned char uval); + +int g711_decode(void *pout_buf, int *pout_len, const void *pin_buf, const int in_len , int type); + +#endif diff --git a/OrderScheduling/Video/VideoTools/g711.m b/OrderScheduling/Video/VideoTools/g711.m new file mode 100644 index 0000000..102ef1c --- /dev/null +++ b/OrderScheduling/Video/VideoTools/g711.m @@ -0,0 +1,306 @@ +/* + * g711.c + * + * u-law, A-law and linear PCM conversions. + */ + +//#include "stdafx.h" +#include +#include +#include "g711.h" + +#define SIGN_BIT (0x80) /* Sign bit for a A-law byte. */ +#define QUANT_MASK (0xf) /* Quantization field mask. */ +#define NSEGS (8) /* Number of A-law segments. */ +#define SEG_SHIFT (4) /* Left shift for segment number. */ +#define SEG_MASK (0x70) /* Segment field mask. */ + +static short seg_end[8] = {0xFF, 0x1FF, 0x3FF, 0x7FF, + 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF}; + +/* copy from CCITT G.711 specifications */ +unsigned char _u2a[128] = { /* u- to A-law conversions */ + 1, 1, 2, 2, 3, 3, 4, 4, + 5, 5, 6, 6, 7, 7, 8, 8, + 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, + 25, 27, 29, 31, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, + 46, 48, 49, 50, 51, 52, 53, 54, + 55, 56, 57, 58, 59, 60, 61, 62, + 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 74, 75, 76, 77, 78, 79, + 81, 82, 83, 84, 85, 86, 87, 88, + 89, 90, 91, 92, 93, 94, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, + 113, 114, 115, 116, 117, 118, 119, 120, + 121, 122, 123, 124, 125, 126, 127, 128}; + +unsigned char _a2u[128] = { /* A- to u-law conversions */ + 1, 3, 5, 7, 9, 11, 13, 15, + 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, + 32, 32, 33, 33, 34, 34, 35, 35, + 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 48, 49, 49, + 50, 51, 52, 53, 54, 55, 56, 57, + 58, 59, 60, 61, 62, 63, 64, 64, + 65, 66, 67, 68, 69, 70, 71, 72, + 73, 74, 75, 76, 77, 78, 79, 79, + 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, + 96, 97, 98, 99, 100, 101, 102, 103, + 104, 105, 106, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 119, + 120, 121, 122, 123, 124, 125, 126, 127}; + +static int +search( + int val, + short *table, + int size) +{ + int i; + + for (i = 0; i < size; i++) { + if (val <= *table++) + return (i); + } + return (size); +} + +/* + * linear2alaw() - Convert a 16-bit linear PCM value to 8-bit A-law + * + * linear2alaw() accepts an 16-bit integer and encodes it as A-law data. + * + * Linear Input Code Compressed Code + * ------------------------ --------------- + * 0000000wxyza 000wxyz + * 0000001wxyza 001wxyz + * 000001wxyzab 010wxyz + * 00001wxyzabc 011wxyz + * 0001wxyzabcd 100wxyz + * 001wxyzabcde 101wxyz + * 01wxyzabcdef 110wxyz + * 1wxyzabcdefg 111wxyz + * + * For further information see John C. Bellamy's Digital Telephony, 1982, + * John Wiley & Sons, pps 98-111 and 472-476. + */ +unsigned char +linear2alaw( + int pcm_val) /* 2's complement (16-bit range) */ +{ + int mask; + int seg; + unsigned char aval; + + if (pcm_val >= 0) { + mask = 0xD5; /* sign (7th) bit = 1 */ + } else { + mask = 0x55; /* sign bit = 0 */ + pcm_val = -pcm_val - 8; + } + + /* Convert the scaled magnitude to segment number. */ + seg = search(pcm_val, seg_end, 8); + + /* Combine the sign, segment, and quantization bits. */ + + if (seg >= 8) /* out of range, return maximum value. */ + return (0x7F ^ mask); + else { + aval = seg << SEG_SHIFT; + if (seg < 2) + aval |= (pcm_val >> 4) & QUANT_MASK; + else + aval |= (pcm_val >> (seg + 3)) & QUANT_MASK; + return (aval ^ mask); + } +} + +/* + * alaw2linear() - Convert an A-law value to 16-bit linear PCM + * + */ +int +alaw2linear( + unsigned char a_val) +{ + int t; + int seg; + + a_val ^= 0x55; + + t = (a_val & QUANT_MASK) << 4; + seg = ((unsigned)a_val & SEG_MASK) >> SEG_SHIFT; + switch (seg) { + case 0: + t += 8; + break; + case 1: + t += 0x108; + break; + default: + t += 0x108; + t <<= seg - 1; + } + return ((a_val & SIGN_BIT) ? t : -t); +} + +#define BIAS (0x84) /* Bias for linear code. */ + +/* + * linear2ulaw() - Convert a linear PCM value to u-law + * + * In order to simplify the encoding process, the original linear magnitude + * is biased by adding 33 which shifts the encoding range from (0 - 8158) to + * (33 - 8191). The result can be seen in the following encoding table: + * + * Biased Linear Input Code Compressed Code + * ------------------------ --------------- + * 00000001wxyza 000wxyz + * 0000001wxyzab 001wxyz + * 000001wxyzabc 010wxyz + * 00001wxyzabcd 011wxyz + * 0001wxyzabcde 100wxyz + * 001wxyzabcdef 101wxyz + * 01wxyzabcdefg 110wxyz + * 1wxyzabcdefgh 111wxyz + * + * Each biased linear code has a leading 1 which identifies the segment + * number. The value of the segment number is equal to 7 minus the number + * of leading 0's. The quantization interval is directly available as the + * four bits wxyz. * The trailing bits (a - h) are ignored. + * + * Ordinarily the complement of the resulting code word is used for + * transmission, and so the code word is complemented before it is returned. + * + * For further information see John C. Bellamy's Digital Telephony, 1982, + * John Wiley & Sons, pps 98-111 and 472-476. + */ +unsigned char +linear2ulaw( + int pcm_val) /* 2's complement (16-bit range) */ +{ + int mask; + int seg; + unsigned char uval; + + /* Get the sign and the magnitude of the value. */ + if (pcm_val < 0) { + pcm_val = BIAS - pcm_val; + mask = 0x7F; + } else { + pcm_val += BIAS; + mask = 0xFF; + } + + /* Convert the scaled magnitude to segment number. */ + seg = search(pcm_val, seg_end, 8); + + /* + * Combine the sign, segment, quantization bits; + * and complement the code word. + */ + if (seg >= 8) /* out of range, return maximum value. */ + return (0x7F ^ mask); + else { + uval = (seg << 4) | ((pcm_val >> (seg + 3)) & 0xF); + return (uval ^ mask); + } + +} + +/* + * ulaw2linear() - Convert a u-law value to 16-bit linear PCM + * + * First, a biased linear code is derived from the code word. An unbiased + * output can then be obtained by subtracting 33 from the biased code. + * + * Note that this function expects to be passed the complement of the + * original code word. This is in keeping with ISDN conventions. + */ +int +ulaw2linear( + unsigned char u_val) +{ + int t; + + /* Complement to obtain normal u-law value. */ + u_val = ~u_val; + + /* + * Extract and bias the quantization bits. Then + * shift up by the segment number and subtract out the bias. + */ + t = ((u_val & QUANT_MASK) << 3) + BIAS; + t <<= ((unsigned)u_val & SEG_MASK) >> SEG_SHIFT; + + return ((u_val & SIGN_BIT) ? (BIAS - t) : (t - BIAS)); +} + +/* A-law to u-law conversion */ +unsigned char +alaw2ulaw( + unsigned char aval) +{ + aval &= 0xff; + return ((aval & 0x80) ? (0xFF ^ _a2u[aval ^ 0xD5]) : + (0x7F ^ _a2u[aval ^ 0x55])); +} + +/* u-law to A-law conversion */ +unsigned char +ulaw2alaw( + unsigned char uval) +{ + uval &= 0xff; + return ((uval & 0x80) ? (0xD5 ^ (_u2a[0xFF ^ uval] - 1)) : + (0x55 ^ (_u2a[0x7F ^ uval] - 1))); +} + +int g711_decode(void *pout_buf, int *pout_len, const void *pin_buf, const int in_len , int type) +{ + int16_t *dst = (int16_t *) pout_buf; + uint8_t *src = (uint8_t *) pin_buf; + uint32_t i = 0; + int Ret = 0; + + if ((NULL == pout_buf) || \ + (NULL == pout_len) || \ + (NULL == pin_buf) || \ + (0 == in_len)) + { + return -1; + } + + if (*pout_len < 2 * in_len) + { + return -2; + } + //---{{{ + if (TP_ALAW == type) + { + for (i = 0; i < in_len; i++) + { + //*(dst++) = alawtos16[*(src++)]; + *(dst++) = (int16_t)alaw2linear(*(src++)); + } + }else + { + for (i = 0; i < in_len; i++) + { + //*(dst++) = alawtos16[*(src++)]; + *(dst++) = (int16_t)ulaw2linear(*(src++)); + } + } + + //---}}} + *pout_len = 2 * in_len; + + Ret = 2 * in_len; + return Ret; +} \ No newline at end of file diff --git a/OrderScheduling/Video/VideoTools/g726.h b/OrderScheduling/Video/VideoTools/g726.h new file mode 100644 index 0000000..db7afbb --- /dev/null +++ b/OrderScheduling/Video/VideoTools/g726.h @@ -0,0 +1,188 @@ + +/*! Bitstream handler state */ +typedef struct bitstream_state_s +{ + /*! The bit stream. */ + unsigned int bitstream; + /*! The residual bits in bitstream. */ + int residue; +}bitstream_state_t; + +typedef struct g726_state_s g726_state_t; +typedef short (*g726_decoder_func_t)(g726_state_t *s, unsigned char code); +typedef unsigned char (*g726_encoder_func_t)(g726_state_t *s, short amp); + + +/*! +* The following is the definition of the state structure +* used by the G.726 encoder and decoder to preserve their internal +* state between successive calls. The meanings of the majority +* of the state structure fields are explained in detail in the +* CCITT Recommendation G.726. The field names are essentially indentical +* to variable names in the bit level description of the coding algorithm +* included in this recommendation. +*/ +struct g726_state_s +{ + /*! The bit rate */ + int rate; + /*! The external coding, for tandem operation */ + //int ext_coding; + /*! The number of bits per sample */ + int bits_per_sample; + /*! One of the G.726_PACKING_xxx options */ + //int packing; + + /*! Locked or steady state step size multiplier. */ + int yl; + /*! Unlocked or non-steady state step size multiplier. */ + short yu; + /*! short term energy estimate. */ + short dms; + /*! Long term energy estimate. */ + short dml; + /*! Linear weighting coefficient of 'yl' and 'yu'. */ + short ap; + + /*! Coefficients of pole portion of prediction filter. */ + short a[2]; + /*! Coefficients of zero portion of prediction filter. */ + short b[6]; + /*! Signs of previous two samples of a partially reconstructed signal. */ + short pk[2]; + /*! Previous 6 samples of the quantized difference signal represented in + an internal floating point format. */ + short dq[6]; + /*! Previous 2 samples of the quantized difference signal represented in an + internal floating point format. */ + short sr[2]; + /*! Delayed tone detect */ + int td; + + /*! \brief The bit stream processing context. */ + bitstream_state_t bs; + + /*! \brief The current encoder function. */ + g726_encoder_func_t enc_func; + /*! \brief The current decoder function. */ + g726_decoder_func_t dec_func; +}; + +/* +* Maps G.726_16 code word to reconstructed scale factor normalized log +* magnitude values. +*/ +static const int g726_16_dqlntab[4] = +{ + 116, 365, 365, 116 +}; + +/* Maps G.726_16 code word to log of scale factor multiplier. */ +static const int g726_16_witab[4] = +{ + -704, 14048, 14048, -704 +}; + +/* +* Maps G.726_16 code words to a set of values whose long and short +* term averages are computed and then compared to give an indication +* how stationary (steady state) the signal is. +*/ +static const int g726_16_fitab[4] = +{ + 0x000, 0xE00, 0xE00, 0x000 +}; + +/* +* Maps G.726_24 code word to reconstructed scale factor normalized log +* magnitude values. +*/ +static const int g726_24_dqlntab[8] = +{ + -2048, 135, 273, 373, 373, 273, 135, -2048 +}; + +/* Maps G.726_24 code word to log of scale factor multiplier. */ +static const int g726_24_witab[8] = +{ + -128, 960, 4384, 18624, 18624, 4384, 960, -128 +}; + +/* +* Maps G.726_24 code words to a set of values whose long and short +* term averages are computed and then compared to give an indication +* how stationary (steady state) the signal is. +*/ +static const int g726_24_fitab[8] = +{ + 0x000, 0x200, 0x400, 0xE00, 0xE00, 0x400, 0x200, 0x000 +}; + +/* +* Maps G.726_32 code word to reconstructed scale factor normalized log +* magnitude values. +*/ +static const int g726_32_dqlntab[16] = +{ + -2048, 4, 135, 213, 273, 323, 373, 425, + 425, 373, 323, 273, 213, 135, 4, -2048 +}; + +/* Maps G.726_32 code word to log of scale factor multiplier. */ +static const int g726_32_witab[16] = +{ + -384, 576, 1312, 2048, 3584, 6336, 11360, 35904, + 35904, 11360, 6336, 3584, 2048, 1312, 576, -384 +}; + +/* +* Maps G.726_32 code words to a set of values whose long and short +* term averages are computed and then compared to give an indication +* how stationary (steady state) the signal is. +*/ +static const int g726_32_fitab[16] = +{ + 0x000, 0x000, 0x000, 0x200, 0x200, 0x200, 0x600, 0xE00, + 0xE00, 0x600, 0x200, 0x200, 0x200, 0x000, 0x000, 0x000 +}; + +/* +* Maps G.726_40 code word to ructeconstructed scale factor normalized log +* magnitude values. +*/ +static const int g726_40_dqlntab[32] = +{ + -2048, -66, 28, 104, 169, 224, 274, 318, + 358, 395, 429, 459, 488, 514, 539, 566, + 566, 539, 514, 488, 459, 429, 395, 358, + 318, 274, 224, 169, 104, 28, -66, -2048 +}; + +/* Maps G.726_40 code word to log of scale factor multiplier. */ +static const int g726_40_witab[32] = +{ + 448, 448, 768, 1248, 1280, 1312, 1856, 3200, + 4512, 5728, 7008, 8960, 11456, 14080, 16928, 22272, + 22272, 16928, 14080, 11456, 8960, 7008, 5728, 4512, + 3200, 1856, 1312, 1280, 1248, 768, 448, 448 +}; + +/* +* Maps G.726_40 code words to a set of values whose long and short +* term averages are computed and then compared to give an indication +* how stationary (steady state) the signal is. +*/ +static const int g726_40_fitab[32] = +{ + 0x000, 0x000, 0x000, 0x000, 0x000, 0x200, 0x200, 0x200, + 0x200, 0x200, 0x400, 0x600, 0x800, 0xA00, 0xC00, 0xC00, + 0xC00, 0xC00, 0xA00, 0x800, 0x600, 0x400, 0x200, 0x200, + 0x200, 0x200, 0x200, 0x000, 0x000, 0x000, 0x000, 0x000 +}; + + +g726_state_t *g726_init(g726_state_t *s, int bit_rate); + +int g726_decode(g726_state_t *s, short amp[], const unsigned char g726_data[], int g726_bytes); + +int g726_encode(g726_state_t *s, unsigned char g726_data[], const short amp[], int len); diff --git a/OrderScheduling/Video/VideoTools/g726.m b/OrderScheduling/Video/VideoTools/g726.m new file mode 100644 index 0000000..ba6920f --- /dev/null +++ b/OrderScheduling/Video/VideoTools/g726.m @@ -0,0 +1,889 @@ +/* + Copyright (c) 2013-2016 EasyDarwin.ORG. All rights reserved. + Github: https://github.com/EasyDarwin + WEChat: EasyDarwin + Website: http://www.easydarwin.org +*/ + +#include +#include +#include +#include "g726.h" + +static const int qtab_726_16[1] = +{ + 261 +}; + +static const int qtab_726_24[3] = +{ + 8, 218, 331 +}; + +static const int qtab_726_32[7] = +{ + -124, 80, 178, 246, 300, 349, 400 +}; + +static const int qtab_726_40[15] = +{ + -122, -16, 68, 139, 198, 250, 298, 339, + 378, 413, 445, 475, 502, 528, 553 +}; + + +static __inline int top_bit(unsigned int bits) +{ +#if defined(__i386__) || defined(__x86_64__) + int res; + + __asm__ (" xorl %[res],%[res];\n" + " decl %[res];\n" + " bsrl %[bits],%[res]\n" + : [res] "=&r" (res) + : [bits] "rm" (bits)); + return res; +#elif defined(__ppc__) || defined(__powerpc__) + int res; + + __asm__ ("cntlzw %[res],%[bits];\n" + : [res] "=&r" (res) + : [bits] "r" (bits)); + return 31 - res; +#elif defined(_M_IX86) // Visual Studio x86 + __asm + { + xor eax, eax + dec eax + bsr eax, bits + } +#else + int res; + + if (bits == 0) + return -1; + res = 0; + if (bits & 0xFFFF0000) + { + bits &= 0xFFFF0000; + res += 16; + } + if (bits & 0xFF00FF00) + { + bits &= 0xFF00FF00; + res += 8; + } + if (bits & 0xF0F0F0F0) + { + bits &= 0xF0F0F0F0; + res += 4; + } + if (bits & 0xCCCCCCCC) + { + bits &= 0xCCCCCCCC; + res += 2; + } + if (bits & 0xAAAAAAAA) + { + bits &= 0xAAAAAAAA; + res += 1; + } + return res; +#endif +} + + +static bitstream_state_t *bitstream_init(bitstream_state_t *s) +{ + if (s == NULL) + return NULL; + s->bitstream = 0; + s->residue = 0; + return s; +} + +/* + * Given a raw sample, 'd', of the difference signal and a + * quantization step size scale factor, 'y', this routine returns the + * ADPCM codeword to which that sample gets quantized. The step + * size scale factor division operation is done in the log base 2 domain + * as a subtraction. + */ +static short quantize(int d, /* Raw difference signal sample */ + int y, /* Step size multiplier */ + const int table[], /* quantization table */ + int quantizer_states) /* table size of short integers */ +{ + short dqm; /* Magnitude of 'd' */ + short exp; /* Integer part of base 2 log of 'd' */ + short mant; /* Fractional part of base 2 log */ + short dl; /* Log of magnitude of 'd' */ + short dln; /* Step size scale factor normalized log */ + int i; + int size; + + /* + * LOG + * + * Compute base 2 log of 'd', and store in 'dl'. + */ + dqm = (short) abs(d); + exp = (short) (top_bit(dqm >> 1) + 1); + /* Fractional portion. */ + mant = ((dqm << 7) >> exp) & 0x7F; + dl = (exp << 7) + mant; + + /* + * SUBTB + * + * "Divide" by step size multiplier. + */ + dln = dl - (short) (y >> 2); + + /* + * QUAN + * + * Search for codword i for 'dln'. + */ + size = (quantizer_states - 1) >> 1; + for (i = 0; i < size; i++) + { + if (dln < table[i]) + break; + } + if (d < 0) + { + /* Take 1's complement of i */ + return (short) ((size << 1) + 1 - i); + } + if (i == 0 && (quantizer_states & 1)) + { + /* Zero is only valid if there are an even number of states, so + take the 1's complement if the code is zero. */ + return (short) quantizer_states; + } + return (short) i; +} +/*- End of function --------------------------------------------------------*/ + + +/* +* returns the integer product of the 14-bit integer "an" and +* "floating point" representation (4-bit exponent, 6-bit mantessa) "srn". +*/ +static short fmult(short an, short srn) +{ + short anmag; + short anexp; + short anmant; + short wanexp; + short wanmant; + short retval; + + anmag = (an > 0) ? an : ((-an) & 0x1FFF); + anexp = (short) (top_bit(anmag) - 5); + anmant = (anmag == 0) ? 32 : (anexp >= 0) ? (anmag >> anexp) : (anmag << -anexp); + wanexp = anexp + ((srn >> 6) & 0xF) - 13; + + wanmant = (anmant*(srn & 0x3F) + 0x30) >> 4; + retval = (wanexp >= 0) ? ((wanmant << wanexp) & 0x7FFF) : (wanmant >> -wanexp); + + return (((an ^ srn) < 0) ? -retval : retval); +} + +/* +* Compute the estimated signal from the 6-zero predictor. +*/ +static __inline short predictor_zero(g726_state_t *s) +{ + int i; + int sezi; + + sezi = fmult(s->b[0] >> 2, s->dq[0]); + /* ACCUM */ + for (i = 1; i < 6; i++) + sezi += fmult(s->b[i] >> 2, s->dq[i]); + return (short) sezi; +} +/*- End of function --------------------------------------------------------*/ + +/* +* Computes the estimated signal from the 2-pole predictor. +*/ +static __inline short predictor_pole(g726_state_t *s) +{ + return (fmult(s->a[1] >> 2, s->sr[1]) + fmult(s->a[0] >> 2, s->sr[0])); +} + +/* +* Computes the quantization step size of the adaptive quantizer. +*/ +static int step_size(g726_state_t *s) +{ + int y; + int dif; + int al; + + if (s->ap >= 256) + return s->yu; + y = s->yl >> 6; + dif = s->yu - y; + al = s->ap >> 2; + if (dif > 0) + y += (dif*al) >> 6; + else if (dif < 0) + y += (dif*al + 0x3F) >> 6; + return y; +} +/*- End of function --------------------------------------------------------*/ + +/* +* Returns reconstructed difference signal 'dq' obtained from +* codeword 'i' and quantization step size scale factor 'y'. +* Multiplication is performed in log base 2 domain as addition. +*/ +static short reconstruct(int sign, /* 0 for non-negative value */ + int dqln, /* G.72x codeword */ + int y) /* Step size multiplier */ +{ + short dql; /* Log of 'dq' magnitude */ + short dex; /* Integer part of log */ + short dqt; + short dq; /* Reconstructed difference signal sample */ + + dql = (short) (dqln + (y >> 2)); /* ADDA */ + + if (dql < 0) + return ((sign) ? -0x8000 : 0); + /* ANTILOG */ + dex = (dql >> 7) & 15; + dqt = 128 + (dql & 127); + dq = (dqt << 7) >> (14 - dex); + return ((sign) ? (dq - 0x8000) : dq); +} +/*- End of function --------------------------------------------------------*/ + +/* +* updates the state variables for each output code +*/ +static void update(g726_state_t *s, + int y, /* quantizer step size */ + int wi, /* scale factor multiplier */ + int fi, /* for long/short term energies */ + int dq, /* quantized prediction difference */ + int sr, /* reconstructed signal */ + int dqsez) /* difference from 2-pole predictor */ +{ + short mag; + short exp; + short a2p; /* LIMC */ + short a1ul; /* UPA1 */ + short pks1; /* UPA2 */ + short fa1; + short ylint; + short dqthr; + short ylfrac; + short thr; + short pk0; + int i; + int tr; + + a2p = 0; + /* Needed in updating predictor poles */ + pk0 = (dqsez < 0) ? 1 : 0; + + /* prediction difference magnitude */ + mag = (short) (dq & 0x7FFF); + /* TRANS */ + ylint = (short) (s->yl >> 15); /* exponent part of yl */ + ylfrac = (short) ((s->yl >> 10) & 0x1F); /* fractional part of yl */ + /* Limit threshold to 31 << 10 */ + thr = (ylint > 9) ? (31 << 10) : ((32 + ylfrac) << ylint); + dqthr = (thr + (thr >> 1)) >> 1; /* dqthr = 0.75 * thr */ + if (!s->td) /* signal supposed voice */ + tr = 0; + else if (mag <= dqthr) /* supposed data, but small mag */ + tr = 0; /* treated as voice */ + else /* signal is data (modem) */ + tr = 1; + + /* + * Quantizer scale factor adaptation. + */ + + /* FUNCTW & FILTD & DELAY */ + /* update non-steady state step size multiplier */ + s->yu = (short) (y + ((wi - y) >> 5)); + + /* LIMB */ + if (s->yu < 544) + s->yu = 544; + else if (s->yu > 5120) + s->yu = 5120; + + /* FILTE & DELAY */ + /* update steady state step size multiplier */ + s->yl += s->yu + ((-s->yl) >> 6); + + /* + * Adaptive predictor coefficients. + */ + if (tr) + { + /* Reset the a's and b's for a modem signal */ + s->a[0] = 0; + s->a[1] = 0; + s->b[0] = 0; + s->b[1] = 0; + s->b[2] = 0; + s->b[3] = 0; + s->b[4] = 0; + s->b[5] = 0; + } + else + { + /* Update the a's and b's */ + /* UPA2 */ + pks1 = pk0 ^ s->pk[0]; + + /* Update predictor pole a[1] */ + a2p = s->a[1] - (s->a[1] >> 7); + if (dqsez != 0) + { + fa1 = (pks1) ? s->a[0] : -s->a[0]; + /* a2p = function of fa1 */ + if (fa1 < -8191) + a2p -= 0x100; + else if (fa1 > 8191) + a2p += 0xFF; + else + a2p += fa1 >> 5; + + if (pk0 ^ s->pk[1]) + { + /* LIMC */ + if (a2p <= -12160) + a2p = -12288; + else if (a2p >= 12416) + a2p = 12288; + else + a2p -= 0x80; + } + else if (a2p <= -12416) + a2p = -12288; + else if (a2p >= 12160) + a2p = 12288; + else + a2p += 0x80; + } + + /* TRIGB & DELAY */ + s->a[1] = a2p; + + /* UPA1 */ + /* Update predictor pole a[0] */ + s->a[0] -= s->a[0] >> 8; + if (dqsez != 0) + { + if (pks1 == 0) + s->a[0] += 192; + else + s->a[0] -= 192; + } + /* LIMD */ + a1ul = 15360 - a2p; + if (s->a[0] < -a1ul) + s->a[0] = -a1ul; + else if (s->a[0] > a1ul) + s->a[0] = a1ul; + + /* UPB : update predictor zeros b[6] */ + for (i = 0; i < 6; i++) + { + /* Distinguish 40Kbps mode from the others */ + s->b[i] -= s->b[i] >> ((s->bits_per_sample == 5) ? 9 : 8); + if (dq & 0x7FFF) + { + /* XOR */ + if ((dq ^ s->dq[i]) >= 0) + s->b[i] += 128; + else + s->b[i] -= 128; + } + } + } + + for (i = 5; i > 0; i--) + s->dq[i] = s->dq[i - 1]; + /* FLOAT A : convert dq[0] to 4-bit exp, 6-bit mantissa f.p. */ + if (mag == 0) + { + s->dq[0] = (dq >= 0) ? 0x20 : 0xFC20; + } + else + { + exp = (short) (top_bit(mag) + 1); + s->dq[0] = (dq >= 0) + ? ((exp << 6) + ((mag << 6) >> exp)) + : ((exp << 6) + ((mag << 6) >> exp) - 0x400); + } + + s->sr[1] = s->sr[0]; + /* FLOAT B : convert sr to 4-bit exp., 6-bit mantissa f.p. */ + if (sr == 0) + { + s->sr[0] = 0x20; + } + else if (sr > 0) + { + exp = (short) (top_bit(sr) + 1); + s->sr[0] = (short) ((exp << 6) + ((sr << 6) >> exp)); + } + else if (sr > -32768) + { + mag = (short) -sr; + exp = (short) (top_bit(mag) + 1); + s->sr[0] = (exp << 6) + ((mag << 6) >> exp) - 0x400; + } + else + { + s->sr[0] = (short) 0xFC20; + } + + /* DELAY A */ + s->pk[1] = s->pk[0]; + s->pk[0] = pk0; + + /* TONE */ + if (tr) /* this sample has been treated as data */ + s->td = 0; /* next one will be treated as voice */ + else if (a2p < -11776) /* small sample-to-sample correlation */ + s->td = 1; /* signal may be data */ + else /* signal is voice */ + s->td = 0; + + /* Adaptation speed control. */ + /* FILTA */ + s->dms += ((short) fi - s->dms) >> 5; + /* FILTB */ + s->dml += (((short) (fi << 2) - s->dml) >> 7); + + if (tr) + s->ap = 256; + else if (y < 1536) /* SUBTC */ + s->ap += (0x200 - s->ap) >> 4; + else if (s->td) + s->ap += (0x200 - s->ap) >> 4; + else if (abs((s->dms << 2) - s->dml) >= (s->dml >> 3)) + s->ap += (0x200 - s->ap) >> 4; + else + s->ap += (-s->ap) >> 4; +} + +/* +* Decodes a 2-bit CCITT G.726_16 ADPCM code and returns +* the resulting 16-bit linear PCM, A-law or u-law sample value. +*/ +static short g726_16_decoder(g726_state_t *s, unsigned char code) +{ + short sezi; + short sei; + short se; + short sr; + short dq; + short dqsez; + int y; + + /* Mask to get proper bits */ + code &= 0x03; + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + + y = step_size(s); + dq = reconstruct(code & 2, g726_16_dqlntab[code], y); + + /* Reconstruct the signal */ + se = sei >> 1; + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_16_witab[code], g726_16_fitab[code], dq, sr, dqsez); + + return (sr << 2); +} +/*- End of function --------------------------------------------------------*/ + + +/* + * Encodes a linear PCM, A-law or u-law input sample and returns its 3-bit code. + */ +static unsigned char g726_16_encoder(g726_state_t *s, short amp) +{ + int y; + short sei; + short sezi; + short se; + short d; + short sr; + short dqsez; + short dq; + short i; + + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + se = sei >> 1; + d = amp - se; + + /* Quantize prediction difference */ + y = step_size(s); + i = quantize(d, y, qtab_726_16, 4); + dq = reconstruct(i & 2, g726_16_dqlntab[i], y); + + /* Reconstruct the signal */ + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_16_witab[i], g726_16_fitab[i], dq, sr, dqsez); + return (unsigned char) i; +} + +/* +* Decodes a 3-bit CCITT G.726_24 ADPCM code and returns +* the resulting 16-bit linear PCM, A-law or u-law sample value. +*/ +static short g726_24_decoder(g726_state_t *s, unsigned char code) +{ + short sezi; + short sei; + short se; + short sr; + short dq; + short dqsez; + int y; + + /* Mask to get proper bits */ + code &= 0x07; + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + + y = step_size(s); + dq = reconstruct(code & 4, g726_24_dqlntab[code], y); + + /* Reconstruct the signal */ + se = sei >> 1; + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_24_witab[code], g726_24_fitab[code], dq, sr, dqsez); + + return (sr << 2); +} +/*- End of function --------------------------------------------------------*/ + + +/* + * Encodes a linear PCM, A-law or u-law input sample and returns its 3-bit code. + */ +static unsigned char g726_24_encoder(g726_state_t *s, short amp) +{ + short sei; + short sezi; + short se; + short d; + short sr; + short dqsez; + short dq; + short i; + int y; + + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + se = sei >> 1; + d = amp - se; + + /* Quantize prediction difference */ + y = step_size(s); + i = quantize(d, y, qtab_726_24, 7); + dq = reconstruct(i & 4, g726_24_dqlntab[i], y); + + /* Reconstruct the signal */ + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_24_witab[i], g726_24_fitab[i], dq, sr, dqsez); + return (unsigned char) i; +} + + +/* +* Decodes a 4-bit CCITT G.726_32 ADPCM code and returns +* the resulting 16-bit linear PCM, A-law or u-law sample value. +*/ +static short g726_32_decoder(g726_state_t *s, unsigned char code) +{ + short sezi; + short sei; + short se; + short sr; + short dq; + short dqsez; + int y; + + /* Mask to get proper bits */ + code &= 0x0F; + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + + y = step_size(s); + dq = reconstruct(code & 8, g726_32_dqlntab[code], y); + + /* Reconstruct the signal */ + se = sei >> 1; + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_32_witab[code], g726_32_fitab[code], dq, sr, dqsez); + + return (sr << 2); +} +/*- End of function --------------------------------------------------------*/ + +/* + * Encodes a linear input sample and returns its 4-bit code. + */ +static unsigned char g726_32_encoder(g726_state_t *s, short amp) +{ + short sei; + short sezi; + short se; + short d; + short sr; + short dqsez; + short dq; + short i; + int y; + + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + se = sei >> 1; + d = amp - se; + + /* Quantize the prediction difference */ + y = step_size(s); + i = quantize(d, y, qtab_726_32, 15); + dq = reconstruct(i & 8, g726_32_dqlntab[i], y); + + /* Reconstruct the signal */ + sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_32_witab[i], g726_32_fitab[i], dq, sr, dqsez); + return (unsigned char) i; +} + +/* +* Decodes a 5-bit CCITT G.726 40Kbps code and returns +* the resulting 16-bit linear PCM, A-law or u-law sample value. +*/ +static short g726_40_decoder(g726_state_t *s, unsigned char code) +{ + short sezi; + short sei; + short se; + short sr; + short dq; + short dqsez; + int y; + + /* Mask to get proper bits */ + code &= 0x1F; + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + + y = step_size(s); + dq = reconstruct(code & 0x10, g726_40_dqlntab[code], y); + + /* Reconstruct the signal */ + se = sei >> 1; + sr = (dq < 0) ? (se - (dq & 0x7FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_40_witab[code], g726_40_fitab[code], dq, sr, dqsez); + + return (sr << 2); +} +/*- End of function --------------------------------------------------------*/ + + +/* + * Encodes a 16-bit linear PCM, A-law or u-law input sample and retuens + * the resulting 5-bit CCITT G.726 40Kbps code. + */ +static unsigned char g726_40_encoder(g726_state_t *s, short amp) +{ + short sei; + short sezi; + short se; + short d; + short sr; + short dqsez; + short dq; + short i; + int y; + + sezi = predictor_zero(s); + sei = sezi + predictor_pole(s); + se = sei >> 1; + d = amp - se; + + /* Quantize prediction difference */ + y = step_size(s); + i = quantize(d, y, qtab_726_40, 31); + dq = reconstruct(i & 0x10, g726_40_dqlntab[i], y); + + /* Reconstruct the signal */ + sr = (dq < 0) ? (se - (dq & 0x7FFF)) : (se + dq); + + /* Pole prediction difference */ + dqsez = sr + (sezi >> 1) - se; + + update(s, y, g726_40_witab[i], g726_40_fitab[i], dq, sr, dqsez); + return (unsigned char) i; +} + +g726_state_t *g726_init(g726_state_t *s, int bit_rate) +{ + int i; + + if (bit_rate != 16000 && bit_rate != 24000 && bit_rate != 32000 && bit_rate != 40000) + return NULL; + + s->yl = 34816; + s->yu = 544; + s->dms = 0; + s->dml = 0; + s->ap = 0; + s->rate = bit_rate; + + for (i = 0; i < 2; i++) + { + s->a[i] = 0; + s->pk[i] = 0; + s->sr[i] = 32; + } + for (i = 0; i < 6; i++) + { + s->b[i] = 0; + s->dq[i] = 32; + } + s->td = 0; + switch (bit_rate) + { + case 16000: + s->enc_func = g726_16_encoder; + s->dec_func = g726_16_decoder; + s->bits_per_sample = 2; + break; + case 24000: + s->enc_func = g726_24_encoder; + s->dec_func = g726_24_decoder; + s->bits_per_sample = 3; + break; + case 32000: + default: + s->enc_func = g726_32_encoder; + s->dec_func = g726_32_decoder; + s->bits_per_sample = 4; + break; + case 40000: + s->enc_func = g726_40_encoder; + s->dec_func = g726_40_decoder; + s->bits_per_sample = 5; + break; + } + bitstream_init(&s->bs); + return s; +} + +int g726_decode(g726_state_t *s, + short amp[], + const unsigned char g726_data[], + int g726_bytes) +{ + int i; + int samples; + unsigned char code; + int sl; + + for (samples = i = 0; ; ) + { + if (s->bs.residue < s->bits_per_sample) + { + if (i >= g726_bytes) + break; + s->bs.bitstream = (s->bs.bitstream << 8) | g726_data[g726_bytes-i-1]; i++; +// s->bs.bitstream = (s->bs.bitstream << 8) | g726_data[i++]; + s->bs.residue += 8; + } + code = (unsigned char) ((s->bs.bitstream >> (s->bs.residue - s->bits_per_sample)) & ((1 << s->bits_per_sample) - 1)); + + s->bs.residue -= s->bits_per_sample; + + sl = s->dec_func(s, code); + + amp[samples++] = (short) sl; + } + return samples; +} + + +int g726_encode(g726_state_t *s, + unsigned char g726_data[], + const short amp[], + int len) +{ + int i; + int g726_bytes; + short sl; + unsigned char code; + + for (g726_bytes = i = 0; i < len; i++) + { + sl = amp[i] >> 2; + + code = s->enc_func(s, sl); + + s->bs.bitstream = (s->bs.bitstream << s->bits_per_sample) | code; + s->bs.residue += s->bits_per_sample; + if (s->bs.residue >= 8) + { + g726_data[g726_bytes++] = (unsigned char) ((s->bs.bitstream >> (s->bs.residue - 8)) & 0xFF); + s->bs.residue -= 8; + } + + } + + int j = 0, k = g726_bytes - 1; + unsigned char temp = 0; + while (j < k) { + temp = g726_data[j]; g726_data[j] = g726_data[k]; g726_data[k] = temp; + j++; k--; + } + + return g726_bytes; +} + diff --git a/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.h b/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.h new file mode 100644 index 0000000..c5788a2 --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.h @@ -0,0 +1,85 @@ +// +// YFProgressHUD.h +// LoadingViewAnimation +//made in zhongdao Copyright © 2017年 tracy wang. All rights reserved. +// + +#import + +@interface YFProgressHUD : UIView + +#pragma mark ====== 添加在window上 ======= + ++(YFProgressHUD *) showToastTitle:(NSString *)titleString; + +/** + 显示title + + @param titleString 加载时展示的文字(可选) + @param block 加载完成后的操作 + */ ++(YFProgressHUD *) showToastTitle:(NSString *)titleString completionBlock:(void(^)(void))block; + +/** + 显示UIActivityIndicatorView 和 title + + @param titleString 加载时展示的文字(可选) + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString; + +/** + 带有下落动画HUD + + @param titleString 加载时展示的文字(可选) + @param arr 动画的图片 + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString imagesArr:(NSArray *)arr; + + +/** + gif动画HUD + + @param titleString 加载时展示的文字(可选) + @param gifName gif动画的图片 + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString gifImg:(NSString *)gifName; + +/** + 移除HUD + */ ++(void) hiddenProgressHUD; + +#pragma mark ====== 添加在view上 ======= +/** + 显示UIActivityIndicatorView 和 title + + @param titleString 加载时展示的文字(可选) + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString; + +/** + 带有下落动画HUD + + @param view 需要展示HUD的view + @param titleString 加载时展示的文字(可选) + @param arr 动画的图片 + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString imagesArr:(NSArray *)arr; + +/** + gif动画HUD + @param view 需要展示HUD的view + @param titleString 加载时展示的文字(可选) + @param gifName gif动画的图片 + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view withTitle:(NSString *)titleString gifImg:(NSString *)gifName; +/** + 移除HUD + */ ++(void) hiddenProgressHUDforView:(UIView *)view; + +/** + 设置文字 + */ ++ (void)reSetTitleString:(NSString *)titleString forView:(UIView *)view; +@end diff --git a/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.m b/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.m new file mode 100644 index 0000000..e6b11ba --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YFProgressHUD.m @@ -0,0 +1,564 @@ +// +// YFProgressHUD.m +// LoadingViewAnimation +//made in zhongdao Copyright © 2017年 tracy wang. All rights reserved. +// + +#import "YFProgressHUD.h" + +#import "YLImageView.h" +#import "YLGIFImage.h" + + + +#define ANIMATION_DURATION_SECS 0.5f + +#define KW 120 +#define KH 120 + +typedef NS_ENUM(NSUInteger, YFProgressHUDType) { + YFProgressHUDTypeGif, // gifHUD + YFProgressHUDTypeRotAni,// 旋转下落HUD + YFProgressHUDTypeNormal,// 普通的HUD + YFProgressHUDTypeToast,// Toast +}; + +@interface YFProgressHUD () +// 动画处理的定时器 +@property (nonatomic, strong) NSTimer *timer; +// 动画的view +@property(nonatomic,strong)UIImageView * shapView; +// 阴影view +@property(nonatomic,strong)UIImageView * shadowView; +// gif +@property (nonatomic, copy) NSString *gifName; +// 加载的文字 +@property (nonatomic, copy) NSString *titleString; +// 加载的文字的label +@property (nonatomic, strong) UILabel *titleLabel; +// 加载的图片数组 +@property(nonatomic,strong)NSArray *imagesArr; +// 切换不同的图片 +@property (nonatomic, assign) int stepNumber; +// 是否正在动画中 +@property (nonatomic, assign) BOOL isAnimating; +// 记录下降动画开始的位置 +@property(nonatomic,assign)float fromValue; +// 记录下降动画结束的位置 +@property(nonatomic,assign)float toValue; +// 记录阴影缩放开始的值 +@property(nonatomic,assign)float scalefromValue; +// 记录阴影缩放结束的值 +@property(nonatomic,assign)float scaletoValue; +// HUD动画类型 +@property(nonatomic,assign)YFProgressHUDType hudType; +@property(nonatomic,strong)UIWindow *window; +@end + +@implementation YFProgressHUD + ++(UIWindow *)getWindow{ + + static __weak UIWindow *cachedKeyWindow = nil; + /* (Bug ID: #23, #25, #73) */ + UIWindow *originalKeyWindow = nil; + + #if __IPHONE_OS_VERSION_MAX_ALLOWED >= 130000 + if (@available(iOS 13.0, *)) { + NSSet *connectedScenes = [UIApplication sharedApplication].connectedScenes; + for (UIScene *scene in connectedScenes) { + if (scene.activationState == UISceneActivationStateForegroundActive && [scene isKindOfClass:[UIWindowScene class]]) { + UIWindowScene *windowScene = (UIWindowScene *)scene; + for (UIWindow *window in windowScene.windows) { + if (window.isKeyWindow) { + originalKeyWindow = window; + break; + } + } + } + } + } else + #endif + { + #if __IPHONE_OS_VERSION_MIN_REQUIRED < 130000 + originalKeyWindow = [UIApplication sharedApplication].keyWindow; + #endif + } + + //If original key window is not nil and the cached keywindow is also not original keywindow then changing keywindow. + if (originalKeyWindow) + { + cachedKeyWindow = originalKeyWindow; + } + + return cachedKeyWindow; +} + + +#pragma mark ====== 添加在window上 ======= ++(YFProgressHUD *) showToastTitle:(NSString *)titleString{ + + __block YFProgressHUD *hud; + __block UIWindow *window; + + dispatch_async(dispatch_get_main_queue(), ^{ + window = [YFProgressHUD getWindow]; + // [YFProgressHUD hiddenProgressHUDforView:window]; + + hud = [[YFProgressHUD alloc] initWithFrame:window.bounds]; + hud.hudType = YFProgressHUDTypeToast; + hud.titleString = titleString; + [hud setupView]; + [window addSubview:hud]; + }); + + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ + [hud removeFromSuperview]; + [YFProgressHUD hiddenProgressHUDforView:window]; + }); + + return hud; +} + ++(YFProgressHUD *) showToastTitle:(NSString *)titleString completionBlock:(void(^)(void))block +{ + __block YFProgressHUD *hud; + __block UIWindow *window; + + dispatch_async(dispatch_get_main_queue(), ^{ + window = [YFProgressHUD getWindow]; + // [YFProgressHUD hiddenProgressHUDforView:window]; + + hud = [[YFProgressHUD alloc] initWithFrame:window.bounds]; + hud.hudType = YFProgressHUDTypeToast; + hud.titleString = titleString; + [hud setupView]; + [window addSubview:hud]; + }); + + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ + [hud removeFromSuperview]; + [YFProgressHUD hiddenProgressHUDforView:window]; + if (block) { + block(); + } + }); + + return hud; +} +/** + 显示UIActivityIndicatorView 和 title + + @param titleString 加载时展示的文字(可选) + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString{ + UIWindow *window = [YFProgressHUD getWindow]; + return [YFProgressHUD showProgressHUDinView:window title:titleString]; +} + +/** + 带有下落动画HUD + + @param titleString 加载时展示的文字(可选) + @param arr 动画的图片 + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString imagesArr:(NSArray *)arr{ + + UIWindow *window = [YFProgressHUD getWindow]; + return [YFProgressHUD showProgressHUDinView:window title:titleString imagesArr:arr]; +} + +/** + gif动画HUD + + @param titleString 加载时展示的文字(可选) + @param gifName gif动画的图片 + */ ++(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString gifImg:(NSString *)gifName{ + UIWindow *window = [YFProgressHUD getWindow]; + return [YFProgressHUD showProgressHUDinView:window withTitle:titleString gifImg:gifName]; +} + +/** + 移除HUD + */ ++(void) hiddenProgressHUD{ + UIWindow *window = [YFProgressHUD getWindow]; + [YFProgressHUD hiddenProgressHUDforView:window]; +} + +#pragma mark ====== 添加在View上 ======= +/** + 显示UIActivityIndicatorView 和 title + + @param titleString 加载时展示的文字(可选) + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString{ + + [YFProgressHUD hiddenProgressHUDforView:view]; + YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds]; + hud.hudType = YFProgressHUDTypeNormal; + hud.titleString = titleString; + [hud setupView]; + dispatch_async(dispatch_get_main_queue(), ^{ + [view addSubview:hud]; + }); + [hud startAnimating]; + + return hud; + +} + +/** + 带有下落动画HUD + + @param view 需要展示HUD的view + @param titleString 加载时展示的文字(可选) + @param arr 动画的图片 + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString imagesArr:(NSArray *)arr{ + + [YFProgressHUD hiddenProgressHUDforView:view]; + + YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds]; + hud.hudType = YFProgressHUDTypeRotAni; + hud.titleString = titleString; + hud.imagesArr = arr; + [hud setupView]; + [view addSubview:hud]; + [hud startAnimating]; + return hud; +} + +/** + gif动画HUD + @param view 需要展示HUD的view + @param titleString 加载时展示的文字(可选) + @param gifName gif动画的图片 + */ ++(YFProgressHUD *) showProgressHUDinView:(UIView *)view withTitle:(NSString *)titleString gifImg:(NSString *)gifName{ + + [YFProgressHUD hiddenProgressHUDforView:view]; + + YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds]; + hud.hudType = YFProgressHUDTypeGif; + hud.titleString = titleString; + hud.gifName = gifName; + [hud setupView]; + [view addSubview:hud]; + [hud startAnimating]; + return hud; +} + +/** + 移除HUD + */ ++(void) hiddenProgressHUDforView:(UIView *)view{ + if (!view && [view isMemberOfClass:[UIWindow class]]) { + view = [YFProgressHUD getWindow]; + } + dispatch_async(dispatch_get_main_queue(), ^{ + for (UIView *subView in view.subviews) { + if ([subView isKindOfClass:[YFProgressHUD class]]) { + [subView removeFromSuperview]; + break; + } + } + }); + +} + ++ (void)reSetTitleString:(NSString *)titleString forView:(UIView *)view; +{ + if (!view && [view isMemberOfClass:[UIWindow class]]) { + view = [YFProgressHUD getWindow]; + } + dispatch_async(dispatch_get_main_queue(), ^{ + for (UIView *subView in view.subviews) { + if ([subView isKindOfClass:[YFProgressHUD class]]) { + YFProgressHUD *hud = (YFProgressHUD *)subView; + hud.titleString = titleString; + hud.titleLabel.text = titleString; + } + } + }); +} + +#pragma mark ====== 初始化======= +-(void)setupView +{ + + self.userInteractionEnabled = YES; + + switch (self.hudType) { + case YFProgressHUDTypeToast: + [self setUpToast]; + break; + case YFProgressHUDTypeNormal: + [self setupNormal]; + break; + case YFProgressHUDTypeGif: + [self setupGif]; + break; + case YFProgressHUDTypeRotAni: + [self setupRotAni]; + break; + } + +} + + + +-(void)setUpToast{ + + CGFloat strW = [self.titleString boundingRectWithSize:CGSizeMake(10000, 30) options:NSStringDrawingUsesLineFragmentOrigin attributes:@{NSFontAttributeName:[UIFont systemFontOfSize:14]} context:nil].size.width; + +// CGFloat width = [UIScreen mainScreen].bounds.size.width; +// CGFloat scale = width/375; +// CGFloat w = 80 * scale; +// +// UIView *centerView = [UIView new]; +// centerView.bounds = CGRectMake(0, 0, w, w); +// centerView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 20); +// centerView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.3]; +// centerView.layer.cornerRadius = 4; +// centerView.layer.masksToBounds = YES; +// + + UILabel *label=[[UILabel alloc] init]; + if (strW + 40 > [UIScreen mainScreen].bounds.size.width - 60) { + CGFloat strH = [self.titleString boundingRectWithSize:CGSizeMake([UIScreen mainScreen].bounds.size.width - 60, 10000) options:NSStringDrawingUsesLineFragmentOrigin attributes:@{NSFontAttributeName:[UIFont systemFontOfSize:14]} context:nil].size.height; + label.frame=CGRectMake(0, 0 , [UIScreen mainScreen].bounds.size.width - 60 , strH + 20); + }else{ + label.frame=CGRectMake(0, 0 , strW + 20 , 40); + } + label.backgroundColor = [UIColor colorWithWhite:0 alpha:1.0]; + label.textColor = [UIColor whiteColor]; + label.textAlignment=NSTextAlignmentCenter; + label.numberOfLines = 0; + label.layer.cornerRadius = 4.0; + label.layer.masksToBounds = YES; + label.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 40); + label.text=_titleString; + label.font=[UIFont systemFontOfSize:14.0f]; + [self addSubview:label]; + self.titleLabel = label; + +} + +-(void)setupNormal{ + + CGFloat width = [UIScreen mainScreen].bounds.size.width; + CGFloat scale = width/375; + CGFloat w = 80 * scale; + UIView *centerView = [UIView new]; + centerView.bounds = CGRectMake(0, 0, w, w); + centerView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 20); + centerView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.3]; + centerView.layer.cornerRadius = 4; + centerView.layer.masksToBounds = YES; + + UIActivityIndicatorView *indicatorView = [[UIActivityIndicatorView alloc]init]; + indicatorView.frame = CGRectMake(0, 0, w, w); + indicatorView.center = CGPointMake(w/2, w/2); + indicatorView.color = [UIColor colorWithWhite:1.0 alpha:0.8]; + indicatorView.transform = CGAffineTransformMakeScale(1.6 * scale, 1.6 * scale); + indicatorView.hidesWhenStopped = NO; + [centerView addSubview:indicatorView]; + [indicatorView startAnimating]; + [self addSubview:centerView]; + + if (_titleString.length != 0) { + UILabel *label=[[UILabel alloc] init]; + label.frame=CGRectMake(0, 0 , KW , 35); + label.textColor=[UIColor grayColor]; + label.numberOfLines = 2; + label.textAlignment=NSTextAlignmentCenter; + label.center=CGPointMake(self.frame.size.width/2, self.frame.size.height/2 + w/2); + label.text=_titleString; + label.font=[UIFont boldSystemFontOfSize:14.0f]; + [self addSubview:label]; + self.titleLabel = label; + } +} + +-(void)setupRotAni{ + + _shapView=[[UIImageView alloc] init]; + _shapView.frame = CGRectMake(KW/2-31/2, 0, 31, 31); + _shapView.image = [UIImage imageNamed:self.imagesArr[0]]; + _shapView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2-100); + _shapView.contentMode = UIViewContentModeScaleAspectFit; + [self addSubview:_shapView]; + + //阴影 + _shadowView = [[UIImageView alloc] init]; + _shadowView.frame = CGRectMake(KW/2-37/2, KH-2.5-30, 37, 2.5); + _shadowView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2); + _shadowView.image = [UIImage imageNamed:@"loading_shadow"]; + [self addSubview:_shadowView]; + + if (_titleString.length != 0) { + UILabel *_label=[[UILabel alloc] init]; + _label.frame=CGRectMake(0, 0 , KW , 20); + _label.textColor=[UIColor grayColor]; + _label.textAlignment=NSTextAlignmentCenter; + _label.center=CGPointMake(self.frame.size.width/2, self.frame.size.height/2+20); + _label.text=_titleString; + _label.font=[UIFont systemFontOfSize:14.0f]; + [self addSubview:_label]; + self.titleLabel = _label; + } + + _fromValue=self.frame.size.height/2-100; + _toValue=self.frame.size.height/2.0-37/2.0; + _scalefromValue=0.1f; + _scaletoValue=1.0f; +} + +-(void)setupGif{ + +// NSString *url = [[NSBundle mainBundle] pathForResource:self.gifName ofType:@""]; + UIImage *gifImg = [YLGIFImage imageNamed:self.gifName];//[UIImage sd_animatedGIFWithData:[NSData dataWithContentsOfFile:url]]; + + CGSize size = gifImg.size; + YLImageView *gifView=[[YLImageView alloc] init]; + gifView.frame = CGRectMake(0, 0, size.width, size.height); + gifView.image = [YLGIFImage imageNamed:self.gifName]; + gifView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 30); + gifView.contentMode = UIViewContentModeScaleAspectFit; + [self addSubview:gifView]; + + if (_titleString.length != 0) { + UILabel *label=[[UILabel alloc] init]; + label.frame=CGRectMake(0, gifView.bounds.size.height , KW , 20); + label.textColor=[UIColor grayColor]; + label.textAlignment=NSTextAlignmentCenter; + label.center = CGPointMake(gifView.center.x, gifView.center.y + size.height/2 + 20); + label.text=_titleString; + label.font=[UIFont systemFontOfSize:14.0f]; + [self addSubview:label]; + self.titleLabel = label; + } +} + +#pragma mark ====== 动画处理 ======= +// 开始动画 +-(void) startAnimating +{ + if (!_isAnimating) + { + _isAnimating = YES; + if (self.hudType == YFProgressHUDTypeRotAni) { + _timer = [NSTimer scheduledTimerWithTimeInterval:ANIMATION_DURATION_SECS target:self selector:@selector(animateNextStep) userInfo:nil repeats:YES]; + [[NSRunLoop mainRunLoop] addTimer:_timer forMode:NSDefaultRunLoopMode]; + [self animateNextStep]; + } + + } + +} + +// 结束动画 +-(void) stopAnimating +{ + _isAnimating = NO; + if (self.hudType == YFProgressHUDTypeRotAni) { + [_timer invalidate]; + _timer=nil; + _stepNumber = 0; + [_shapView.layer removeAllAnimations]; + [_shadowView.layer removeAllAnimations]; + } +} + +// 动画方法 +-(void)animateNextStep +{ + + if (_stepNumber%2==0) { + [self loadingAnimation:_toValue toValue:_fromValue timingFunction:kCAMediaTimingFunctionEaseOut]; + [self scaleAnimation:_scaletoValue toValue:_scalefromValue timingFunction:kCAMediaTimingFunctionEaseIn]; + _shapView.image=[UIImage imageNamed:self.imagesArr[_stepNumber]]; + }else { + [self loadingAnimation:_fromValue toValue:_toValue timingFunction:kCAMediaTimingFunctionEaseIn]; + [self scaleAnimation:_scalefromValue toValue:_scaletoValue timingFunction:kCAMediaTimingFunctionEaseOut]; + } + + if (_stepNumber==self.imagesArr.count-1) { + _stepNumber = -1; + } + _stepNumber++; +} + +// 下落动画 +-(void) loadingAnimation:(float)fromValue toValue:(float)toValue timingFunction:(NSString * const)tf +{ + //位置 + CABasicAnimation *panimation = [CABasicAnimation animation]; + panimation.keyPath = @"position.y"; + panimation.fromValue =@(fromValue); + panimation.toValue = @(toValue); + panimation.duration = ANIMATION_DURATION_SECS; + panimation.timingFunction = [CAMediaTimingFunction functionWithName:tf]; + + //旋转 + CABasicAnimation *ranimation = [CABasicAnimation animation]; + ranimation.keyPath = @"transform.rotation"; + ranimation.fromValue =@(0); + ranimation.toValue = @(M_PI_2); + ranimation.duration = ANIMATION_DURATION_SECS; + + ranimation.timingFunction = [CAMediaTimingFunction functionWithName:tf]; + + //组合 + CAAnimationGroup *group = [[CAAnimationGroup alloc] init]; + group.animations = @[panimation,ranimation]; + group.duration = ANIMATION_DURATION_SECS; + group.beginTime = 0; + group.fillMode=kCAFillModeForwards; + group.removedOnCompletion = NO; + + [_shapView.layer addAnimation:group forKey:@"basic"]; + +} + +// 缩放动画 +-(void) scaleAnimation:(float) fromeValue toValue:(float)toValue timingFunction:(NSString * const)tf +{ + + CABasicAnimation *sanimation = [CABasicAnimation animation]; + sanimation.keyPath = @"transform.scale"; + sanimation.fromValue =@(fromeValue); + sanimation.toValue = @(toValue); + sanimation.duration = ANIMATION_DURATION_SECS; + sanimation.fillMode = kCAFillModeForwards; + sanimation.timingFunction = [CAMediaTimingFunction functionWithName:tf]; + sanimation.removedOnCompletion = NO; + [_shadowView.layer addAnimation:sanimation forKey:@"shadow"]; + +} + +#pragma mark ====== setter ======= +-(void)setImagesArr:(NSArray *)imagesArr{ + NSMutableArray *arr=[NSMutableArray array]; + for (int i=0; i + +@interface YLGIFImage : UIImage + +///----------------------- +/// @name Image Attributes +///----------------------- + +/** + A C array containing the frame durations. + + The number of frames is defined by the count of the `images` array property. + */ +@property (nonatomic, readonly) NSTimeInterval *frameDurations; + +/** + Total duration of the animated image. + */ +@property (nonatomic, readonly) NSTimeInterval totalDuration; + +/** + Number of loops the image can do before it stops + */ +@property (nonatomic, readonly) NSUInteger loopCount; + +- (UIImage*)getFrameWithIndex:(NSUInteger)idx; + +@end diff --git a/OrderScheduling/Video/YFProgressHUD/YLGIFImage.m b/OrderScheduling/Video/YFProgressHUD/YLGIFImage.m new file mode 100755 index 0000000..030fb02 --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YLGIFImage.m @@ -0,0 +1,305 @@ +// +// YLGIFImage.m +// YLGIFImage +//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved. +// + +#import "YLGIFImage.h" +#import +#import + + +//Define FLT_EPSILON because, reasons. +//Actually, I don't know why but it seems under certain circumstances it is not defined +#ifndef FLT_EPSILON +#define FLT_EPSILON __FLT_EPSILON__ +#endif + +inline static NSTimeInterval CGImageSourceGetGifFrameDelay(CGImageSourceRef imageSource, NSUInteger index) +{ + NSTimeInterval frameDuration = 0; + CFDictionaryRef theImageProperties; + if ((theImageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, index, NULL))) { + CFDictionaryRef gifProperties; + if (CFDictionaryGetValueIfPresent(theImageProperties, kCGImagePropertyGIFDictionary, (const void **)&gifProperties)) { + const void *frameDurationValue; + if (CFDictionaryGetValueIfPresent(gifProperties, kCGImagePropertyGIFUnclampedDelayTime, &frameDurationValue)) { + frameDuration = [(__bridge NSNumber *)frameDurationValue doubleValue]; + if (frameDuration <= 0) { + if (CFDictionaryGetValueIfPresent(gifProperties, kCGImagePropertyGIFDelayTime, &frameDurationValue)) { + frameDuration = [(__bridge NSNumber *)frameDurationValue doubleValue]; + } + } + } + } + CFRelease(theImageProperties); + } + +#ifndef OLExactGIFRepresentation + //Implement as Browsers do. + //See: http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser-compatibility + //Also: http://blogs.msdn.com/b/ieinternals/archive/2010/06/08/animated-gifs-slow-down-to-under-20-frames-per-second.aspx + + if (frameDuration < 0.02 - FLT_EPSILON) { + frameDuration = 0.1; + } +#endif + return frameDuration; +} + +inline static BOOL CGImageSourceContainsAnimatedGif(CGImageSourceRef imageSource) +{ + return imageSource && UTTypeConformsTo(CGImageSourceGetType(imageSource), kUTTypeGIF) && CGImageSourceGetCount(imageSource) > 1; +} + +inline static BOOL isRetinaFilePath(NSString *path) +{ + NSRange retinaSuffixRange = [[path lastPathComponent] rangeOfString:@"@2x" options:NSCaseInsensitiveSearch]; + return retinaSuffixRange.length && retinaSuffixRange.location != NSNotFound; +} + +@interface YLGIFImage () + +@property (nonatomic, readwrite) NSMutableArray *images; +@property (nonatomic, readwrite) NSTimeInterval *frameDurations; +@property (nonatomic, readwrite) NSTimeInterval totalDuration; +@property (nonatomic, readwrite) NSUInteger loopCount; +@property (nonatomic, readwrite) CGImageSourceRef incrementalSource; +@property(nonatomic,assign)CGImageSourceRef imageSourceRef; +@end + +static NSUInteger _prefetchedNum = 10; + +@implementation YLGIFImage +{ + dispatch_queue_t readFrameQueue; +// CGImageSourceRef _imageSourceRef; + CGFloat _scale; +} + +@synthesize images; + +#pragma mark - Class Methods + ++ (id)imageNamed:(NSString *)name +{ + NSString *path = [[NSBundle mainBundle] pathForResource:name ofType:nil]; + + return ([[NSFileManager defaultManager] fileExistsAtPath:path]) ? [self imageWithContentsOfFile:path] : nil; +} + ++ (id)imageWithContentsOfFile:(NSString *)path +{ + return [self imageWithData:[NSData dataWithContentsOfFile:path] + scale:isRetinaFilePath(path) ? 2.0f : 1.0f]; +} + ++ (id)imageWithData:(NSData *)data +{ + return [self imageWithData:data scale:1.0f]; +} + ++ (id)imageWithData:(NSData *)data scale:(CGFloat)scale +{ + if (!data) { + return nil; + } + + CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)(data), NULL); + UIImage *image; + + if (CGImageSourceContainsAnimatedGif(imageSource)) { + image = [[self alloc] initWithCGImageSource:imageSource scale:scale]; + } else { + image = [super imageWithData:data scale:scale]; + } + + if (imageSource) { + CFRelease(imageSource); + } + + return image; +} + +#pragma mark - Initialization methods + +- (id)initWithContentsOfFile:(NSString *)path +{ + return [self initWithData:[NSData dataWithContentsOfFile:path] + scale:isRetinaFilePath(path) ? 2.0f : 1.0f]; +} + +- (id)initWithData:(NSData *)data +{ + return [self initWithData:data scale:1.0f]; +} + +- (id)initWithData:(NSData *)data scale:(CGFloat)scale +{ + if (!data) { + return nil; + } + + CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)(data), NULL); + + if (CGImageSourceContainsAnimatedGif(imageSource)) { + self = [self initWithCGImageSource:imageSource scale:scale]; + } else { + if (scale == 1.0f) { + self = [super initWithData:data]; + } else { + self = [super initWithData:data scale:scale]; + } + } + + if (imageSource) { + CFRelease(imageSource); + } + + return self; +} + +- (id)initWithCGImageSource:(CGImageSourceRef)imageSource scale:(CGFloat)scale +{ + self = [super init]; + if (!imageSource || !self) { + return nil; + } + + CFRetain(imageSource); + + NSUInteger numberOfFrames = CGImageSourceGetCount(imageSource); + + NSDictionary *imageProperties = CFBridgingRelease(CGImageSourceCopyProperties(imageSource, NULL)); + NSDictionary *gifProperties = [imageProperties objectForKey:(NSString *)kCGImagePropertyGIFDictionary]; + + self.frameDurations = (NSTimeInterval *)malloc(numberOfFrames * sizeof(NSTimeInterval)); + self.loopCount = [gifProperties[(NSString *)kCGImagePropertyGIFLoopCount] unsignedIntegerValue]; + self.images = [NSMutableArray arrayWithCapacity:numberOfFrames]; + + NSNull *aNull = [NSNull null]; + for (NSUInteger i = 0; i < numberOfFrames; ++i) { + [self.images addObject:aNull]; + NSTimeInterval frameDuration = CGImageSourceGetGifFrameDelay(imageSource, i); + self.frameDurations[i] = frameDuration; + self.totalDuration += frameDuration; + } + //CFTimeInterval start = CFAbsoluteTimeGetCurrent(); + // Load first frame + NSUInteger num = MIN(_prefetchedNum, numberOfFrames); + for (NSUInteger i=0; i _prefetchedNum) { + if(idx != 0) { + [self.images replaceObjectAtIndex:idx withObject:[NSNull null]]; + } + __weak typeof(self) weakSelf=self; + NSUInteger nextReadIdx = (idx + _prefetchedNum); + for(NSUInteger i=idx+1; i<=nextReadIdx; i++) { + NSUInteger _idx = i%self.images.count; + CGFloat scale = _scale; + if([self.images[_idx] isKindOfClass:[NSNull class]]) { + dispatch_async(readFrameQueue, ^{ + CGImageRef image = CGImageSourceCreateImageAtIndex(weakSelf.imageSourceRef, _idx, NULL); + @synchronized(weakSelf.images) { + if (image != NULL) { + [weakSelf.images replaceObjectAtIndex:_idx withObject:[UIImage imageWithCGImage:image scale:scale orientation:UIImageOrientationUp]]; + CFRelease(image); + } else { + [weakSelf.images replaceObjectAtIndex:_idx withObject:[NSNull null]]; + } + } + }); + } + } + } + return frame; +} + +#pragma mark - Compatibility methods + +- (CGSize)size +{ + if (self.images.count) { + + return [(UIImage *)[self.images objectAtIndex:0] size]; + } + return [super size]; +} + +- (CGImageRef)CGImage +{ + if (self.images.count) { + return [[self.images objectAtIndex:0] CGImage]; + } else { + return [super CGImage]; + } +} + +- (UIImageOrientation)imageOrientation +{ + if (self.images.count) { + return [[self.images objectAtIndex:0] imageOrientation]; + } else { + return [super imageOrientation]; + } +} + +- (CGFloat)scale +{ + if (self.images.count) { + return [(UIImage *)[self.images objectAtIndex:0] scale]; + } else { + return [super scale]; + } +} + +- (NSTimeInterval)duration +{ + return self.images ? self.totalDuration : [super duration]; +} + +- (void)dealloc { + if(_imageSourceRef) { + CFRelease(_imageSourceRef); + } + free(_frameDurations); + if (_incrementalSource) { + CFRelease(_incrementalSource); + } +} + +@end diff --git a/OrderScheduling/Video/YFProgressHUD/YLImageView.h b/OrderScheduling/Video/YFProgressHUD/YLImageView.h new file mode 100755 index 0000000..71e804e --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YLImageView.h @@ -0,0 +1,13 @@ +// +// YLImageView.h +// YLGIFImage +//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved. +// + +#import + +@interface YLImageView : UIImageView + +@property (nonatomic, copy) NSString *runLoopMode; + +@end diff --git a/OrderScheduling/Video/YFProgressHUD/YLImageView.m b/OrderScheduling/Video/YFProgressHUD/YLImageView.m new file mode 100755 index 0000000..8f924b3 --- /dev/null +++ b/OrderScheduling/Video/YFProgressHUD/YLImageView.m @@ -0,0 +1,219 @@ +// +// YLImageView.m +// YLGIFImage +//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved. +// + +#import "YLImageView.h" +#import "YLGIFImage.h" +#import + +@interface YLImageView () + +@property (nonatomic, strong) YLGIFImage *animatedImage; +@property (nonatomic, strong) CADisplayLink *displayLink; +@property (nonatomic) NSTimeInterval accumulator; +@property (nonatomic) NSUInteger currentFrameIndex; +@property (nonatomic, strong) UIImage* currentFrame; +@property (nonatomic) NSUInteger loopCountdown; + +@end + +@implementation YLImageView + +const NSTimeInterval kMaxTimeStep = 1; // note: To avoid spiral-o-death + +@synthesize runLoopMode = _runLoopMode; +@synthesize displayLink = _displayLink; + +- (id)init +{ + self = [super init]; + if (self) { + self.currentFrameIndex = 0; + } + return self; +} + +- (CADisplayLink *)displayLink +{ + if (self.superview) { + if (!_displayLink && self.animatedImage) { + _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(changeKeyframe:)]; + [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:self.runLoopMode]; + } + } else { + [_displayLink invalidate]; + _displayLink = nil; + } + return _displayLink; +} + +- (NSString *)runLoopMode +{ + return _runLoopMode ?: NSRunLoopCommonModes; +} + +- (void)setRunLoopMode:(NSString *)runLoopMode +{ + if (runLoopMode != _runLoopMode) { + [self stopAnimating]; + + NSRunLoop *runloop = [NSRunLoop mainRunLoop]; + [self.displayLink removeFromRunLoop:runloop forMode:_runLoopMode]; + [self.displayLink addToRunLoop:runloop forMode:runLoopMode]; + + _runLoopMode = runLoopMode; + + [self startAnimating]; + } +} + +- (void)setImage:(UIImage *)image +{ + if (image == self.image) { + return; + } + + [self stopAnimating]; + + self.currentFrameIndex = 0; + self.loopCountdown = 0; + self.accumulator = 0; + + if ([image isKindOfClass:[YLGIFImage class]] && image.images) { + if([image.images[0] isKindOfClass:UIImage.class]) + [super setImage:image.images[0]]; + else + [super setImage:nil]; + self.currentFrame = nil; + self.animatedImage = (YLGIFImage *)image; + self.loopCountdown = self.animatedImage.loopCount ?: NSUIntegerMax; + [self startAnimating]; + } else { + self.animatedImage = nil; + [super setImage:image]; + } + [self.layer setNeedsDisplay]; +} + +- (void)setAnimatedImage:(YLGIFImage *)animatedImage +{ + _animatedImage = animatedImage; + if (animatedImage == nil) { + self.layer.contents = nil; + } +} + +- (BOOL)isAnimating +{ + return [super isAnimating] || (self.displayLink && !self.displayLink.isPaused); +} + +- (void)stopAnimating +{ + if (!self.animatedImage) { + [super stopAnimating]; + return; + } + + self.loopCountdown = 0; + + self.displayLink.paused = YES; +} + +- (void)startAnimating +{ + if (!self.animatedImage) { + [super startAnimating]; + return; + } + + if (self.isAnimating) { + return; + } + + self.loopCountdown = self.animatedImage.loopCount ?: NSUIntegerMax; + + self.displayLink.paused = NO; +} + +- (void)changeKeyframe:(CADisplayLink *)displayLink +{ + if (self.currentFrameIndex >= [self.animatedImage.images count]) { + return; + } + self.accumulator += fmin(displayLink.duration, kMaxTimeStep); + + while (self.accumulator >= self.animatedImage.frameDurations[self.currentFrameIndex]) { + self.accumulator -= self.animatedImage.frameDurations[self.currentFrameIndex]; + if (++self.currentFrameIndex >= [self.animatedImage.images count]) { + if (--self.loopCountdown == 0) { + [self stopAnimating]; + return; + } + self.currentFrameIndex = 0; + } + self.currentFrameIndex = MIN(self.currentFrameIndex, [self.animatedImage.images count] - 1); + self.currentFrame = [self.animatedImage getFrameWithIndex:self.currentFrameIndex]; + [self.layer setNeedsDisplay]; + } +} + +- (void)displayLayer:(CALayer *)layer +{ + if (!self.animatedImage || [self.animatedImage.images count] == 0) { + return; + } + //NSLog(@"display index: %luu", (unsigned long)self.currentFrameIndex); + if(self.currentFrame && ![self.currentFrame isKindOfClass:[NSNull class]]) + layer.contents = (__bridge id)([self.currentFrame CGImage]); +} + +- (void)didMoveToWindow +{ + [super didMoveToWindow]; + if (self.window) { + [self startAnimating]; + } else { + dispatch_async(dispatch_get_main_queue(), ^{ + if (!self.window) { + [self stopAnimating]; + } + }); + } +} + +- (void)didMoveToSuperview +{ + [super didMoveToSuperview]; + if (self.superview) { + //Has a superview, make sure it has a displayLink + [self displayLink]; + } else { + //Doesn't have superview, let's check later if we need to remove the displayLink + dispatch_async(dispatch_get_main_queue(), ^{ + [self displayLink]; + }); + } +} + +- (void)setHighlighted:(BOOL)highlighted +{ + if (!self.animatedImage) { + [super setHighlighted:highlighted]; + } +} + +- (UIImage *)image +{ + return self.animatedImage ?: [super image]; +} + +- (CGSize)sizeThatFits:(CGSize)size +{ + return self.image.size; +} + +@end + diff --git a/OrderScheduling/Video/YFTimerTool/YFTimer.h b/OrderScheduling/Video/YFTimerTool/YFTimer.h new file mode 100644 index 0000000..13678c5 --- /dev/null +++ b/OrderScheduling/Video/YFTimerTool/YFTimer.h @@ -0,0 +1,32 @@ +// +// YFTimer.h +// Timer_Demo +//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved. +// + +#import + +@protocol YFTimerDelegate +@optional +-(void)toDoThingsWhenTimeCome:(NSTimeInterval)interval; + +@end + +@interface YFTimer : NSObject +// 定时器的间隔 +@property(nonatomic,assign)NSTimeInterval interval; + +// 添加代理 +-(void)timerAddDelegate:(id)delegate; + +// 取消代理 +-(void)timerDeleteDelegate:(id)delegate; + +// 创建定时器 +-(void)fireTimeWithInterval:(NSTimeInterval)interval; + +// 取消定时器 +-(void)invalidate; + +@end + diff --git a/OrderScheduling/Video/YFTimerTool/YFTimer.m b/OrderScheduling/Video/YFTimerTool/YFTimer.m new file mode 100644 index 0000000..ba02f28 --- /dev/null +++ b/OrderScheduling/Video/YFTimerTool/YFTimer.m @@ -0,0 +1,87 @@ +// +// YFTimer.m +// Timer_Demo +//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved. +// + +#import "YFTimer.h" +#import "YFTimerManager.h" + +@interface YFTimer() +// NSPointerArray 可以让数组中的引用是弱引用 +// 关于NSPointerArray的使用 https://blog.csdn.net/weixin_34387468/article/details/90334534 +// 所有定时器的代理 +@property(nonatomic,strong)NSPointerArray *delegates; +// 定时器 +@property (nonatomic,strong)dispatch_source_t timer; + +@end + +@implementation YFTimer + +// 添加定时器 +-(void)fireTimeWithInterval:(NSTimeInterval)interval{ + + self.interval = interval; + + dispatch_source_t timer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, dispatch_get_main_queue()); + dispatch_source_set_timer(timer, dispatch_walltime(NULL, 0), interval * NSEC_PER_SEC, 0); //每多少秒触发timer,误差多少秒 + dispatch_source_set_event_handler(timer, ^{ + // 定时器触发时执行的 block + [self isTimeToDoThing]; + }); + dispatch_resume(timer); + + self.timer = timer; + +} + +// 取消定时器 +-(void)invalidate{ + self.delegates = nil; + self.timer = nil; +} + +// 添加代理 +-(void)timerAddDelegate:(id)delegate{ + if (![self.delegates.allObjects containsObject:delegate]) { + + [self.delegates addPointer:NULL]; + [self.delegates compact]; + + [self.delegates addPointer:(__bridge void * _Nullable)(delegate)]; + } +} + +// 取消代理 +-(void)timerDeleteDelegate:(id)delegate{ + if ([self.delegates.allObjects containsObject:delegate]) { + NSInteger index = [self.delegates.allObjects indexOfObject:delegate]; + [self.delegates removePointerAtIndex:index]; + } +} + +// 倒计时要做的事 +-(void)isTimeToDoThing{ + + if (self.delegates.allObjects.count == 0) { + [YFTimerManager invalidateTimerForTimeInterval:self.interval]; + return; + } + + for (iddelegate in self.delegates.allObjects) { + if (delegate && [delegate respondsToSelector:@selector(toDoThingsWhenTimeCome:)]) { + [delegate toDoThingsWhenTimeCome:self.interval]; + } + } + +} + +-(NSPointerArray *)delegates{ + if (!_delegates) { + _delegates = [NSPointerArray weakObjectsPointerArray]; + } + return _delegates; +} + +@end diff --git a/OrderScheduling/Video/YFTimerTool/YFTimerManager.h b/OrderScheduling/Video/YFTimerTool/YFTimerManager.h new file mode 100644 index 0000000..90cd869 --- /dev/null +++ b/OrderScheduling/Video/YFTimerTool/YFTimerManager.h @@ -0,0 +1,58 @@ +// +// YFTimerManager.h +// Timer_Demo +//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved. +// + +#import +#import "YFTimer.h" + +//.h文件 +#define YFSingleTonH(ClassName) +(instancetype)share##ClassName; + +//.m文件 +#define YFSingleTonM(ClassName) \ +static id _instance=nil;\ ++(instancetype)allocWithZone:(struct _NSZone *)zone{\ + static dispatch_once_t onceToken;\ + dispatch_once(&onceToken, ^{\ + _instance=[super allocWithZone:zone];\ + });\ + return _instance;\ +}\ ++(instancetype)share##ClassName{\ + static dispatch_once_t onceToken;\ + dispatch_once(&onceToken, ^{\ + _instance=[[self alloc] init];\ + });\ + return _instance;\ +}\ +-(instancetype)copyWithZone:(NSZone *)zone{\ + return _instance;\ +} + +@protocol YFTimerDelegate; + +@interface YFTimerManager : NSObject + +@property(nonatomic,strong)NSMutableArray *timers; + +YFSingleTonH(YFTimerManager) + +// 添加一个时间间隔是interval的定时器 ++(void)addTimerWithTimeInterval:(NSTimeInterval)interval; + +// 给时间间隔是interval的定时器设置代理 ++(void)addTimerDelegate:(id)delegate forTimeInterval:(NSTimeInterval)interval; + +// 给时间间隔是interval的定时器取消代理 ++(void)deleteTimerDelegate:(id)delegate forTimeInterval:(NSTimeInterval)interval; + +// 取消一个时间间隔是interval的定时器 ++(void)invalidateTimerForTimeInterval:(NSTimeInterval)interval; + +// 取消所有的定时器 ++(void)invalidateAllTimer; + +@end + diff --git a/OrderScheduling/Video/YFTimerTool/YFTimerManager.m b/OrderScheduling/Video/YFTimerTool/YFTimerManager.m new file mode 100644 index 0000000..e5127fc --- /dev/null +++ b/OrderScheduling/Video/YFTimerTool/YFTimerManager.m @@ -0,0 +1,90 @@ +// +// YFTimerManager.m +// Timer_Demo +//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved. +// + +#import "YFTimerManager.h" + +@interface YFTimerManager () + +@end + +@implementation YFTimerManager + +YFSingleTonM(YFTimerManager) + +// 添加一个时间间隔是interval的定时器 ++(void)addTimerWithTimeInterval:(NSTimeInterval)interval{ + + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + if (timer.interval == interval) {// 防止重复添加定时器 + return; + } + } + + YFTimer *timer = [[YFTimer alloc] init]; + [timer fireTimeWithInterval:interval]; + + [[YFTimerManager shareYFTimerManager].timers addObject:timer]; + +} + +// 取消一个时间间隔是interval的定时器 ++(void)invalidateTimerForTimeInterval:(NSTimeInterval)interval{ + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + if (timer.interval == interval) {// 防止重复添加定时器 + [timer invalidate]; + [[YFTimerManager shareYFTimerManager].timers removeObject:timer]; + return; + } + } +} + +// 取消所有的定时器 ++(void)invalidateAllTimer{ + + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + [timer invalidate]; + } + [[YFTimerManager shareYFTimerManager].timers removeAllObjects]; +} + +// 给时间间隔是interval的定时器设置代理 ++(void)addTimerDelegate:(id)delegate forTimeInterval :(NSTimeInterval)interval{ + + if ([YFTimerManager shareYFTimerManager].timers.count == 0) { + [self addTimerWithTimeInterval:interval]; + [[YFTimerManager shareYFTimerManager].timers.firstObject timerAddDelegate:delegate]; + }else{ + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + if (timer.interval == interval) {// 防止重复添加定时器 + [timer timerAddDelegate:delegate]; + return; + }else{ + [self addTimerWithTimeInterval:interval]; + [[YFTimerManager shareYFTimerManager].timers.lastObject timerAddDelegate:delegate]; + } + } + } + +} + +// 给时间间隔是interval的定时器取消代理 ++(void)deleteTimerDelegate:(id)delegate forTimeInterval:(NSTimeInterval)interval{ + for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) { + if (timer.interval == interval) {// 防止重复添加定时器 + [timer timerDeleteDelegate:delegate]; + return; + } + } +} + +-(NSMutableArray *)timers{ + if (_timers==nil) { + _timers=[[NSMutableArray alloc] init]; + } + return _timers; +} + +@end