报警相关
@@ -35,6 +35,20 @@
|
||||
791887C62A84D9DF007EA0C1 /* DispatchOrderController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 791887C52A84D9DF007EA0C1 /* DispatchOrderController.swift */; };
|
||||
792EE0952AA74E0A00A212AB /* PushNotiCommonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 792EE0942AA74E0A00A212AB /* PushNotiCommonView.swift */; };
|
||||
792EE0972AA74E5800A212AB /* PushNotiCommonTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 792EE0962AA74E5800A212AB /* PushNotiCommonTool.swift */; };
|
||||
7938A6502E3B51270017508A /* VehicleMonitorHistoryController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */; };
|
||||
7938A8252E4055800017508A /* YFTimer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A81E2E4055800017508A /* YFTimer.m */; };
|
||||
7938A8292E4055800017508A /* VideoPlayView.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8062E4055800017508A /* VideoPlayView.m */; };
|
||||
7938A82B2E4055800017508A /* YFTimerManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8202E4055800017508A /* YFTimerManager.m */; };
|
||||
7938A82C2E4055800017508A /* AAPLEAGLLayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A80E2E4055800017508A /* AAPLEAGLLayer.m */; };
|
||||
7938A82E2E4055800017508A /* PCMStreamPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8162E4055800017508A /* PCMStreamPlayer.m */; };
|
||||
7938A82F2E4055800017508A /* SRWebSocket.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8182E4055800017508A /* SRWebSocket.m */; };
|
||||
7938A8312E4055800017508A /* g711.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8102E4055800017508A /* g711.m */; };
|
||||
7938A8332E4055800017508A /* g726.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8122E4055800017508A /* g726.m */; };
|
||||
7938A8352E4055800017508A /* H264DecodeTool.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8142E4055800017508A /* H264DecodeTool.m */; };
|
||||
7938A83D2E4055D50017508A /* YFProgressHUD.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8372E4055D50017508A /* YFProgressHUD.m */; };
|
||||
7938A83E2E4055D50017508A /* YLGIFImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8392E4055D50017508A /* YLGIFImage.m */; };
|
||||
7938A83F2E4055D50017508A /* YLImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 7938A83B2E4055D50017508A /* YLImageView.m */; };
|
||||
7938A8452E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */; };
|
||||
7940277A2B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */; };
|
||||
7940277C2B3E9ECB00EC52D4 /* ConditionalSearchView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7940277B2B3E9ECB00EC52D4 /* ConditionalSearchView.swift */; };
|
||||
7940277E2B43B9B600EC52D4 /* ConditionalSearchTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7940277D2B43B9B600EC52D4 /* ConditionalSearchTool.swift */; };
|
||||
@@ -60,7 +74,6 @@
|
||||
79CECC222A8A2A2900B95D8B /* VehicleMonitoringController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */; };
|
||||
79CECC242A8B16D400B95D8B /* VehicleMonitoringListController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */; };
|
||||
79CECC262A8C749B00B95D8B /* VehicleMonitorVideoController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */; };
|
||||
79CECC282A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */; };
|
||||
79DD0DAA2A9481BC00768FE7 /* NotificationAuthTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DA92A9481BC00768FE7 /* NotificationAuthTool.swift */; };
|
||||
79DD0DB12A94B3DB00768FE7 /* EmptyView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DB02A94B3DB00768FE7 /* EmptyView.swift */; };
|
||||
79DD0DB42A95F00B00768FE7 /* Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79DD0DB32A95F00B00768FE7 /* Extension.swift */; };
|
||||
@@ -68,6 +81,7 @@
|
||||
79E434252AA1919400AEB16C /* CommonAlertView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434242AA1919400AEB16C /* CommonAlertView.swift */; };
|
||||
79E434282AA1EFA500AEB16C /* SystemCall.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434272AA1EFA500AEB16C /* SystemCall.swift */; };
|
||||
79E4342A2AA5833F00AEB16C /* CustomPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E434292AA5833F00AEB16C /* CustomPicker.swift */; };
|
||||
79EA0A912E3753D100320195 /* VerticalLoopScrollLabel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */; };
|
||||
79EAD8142A7B86610036E093 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 79EAD8132A7B86610036E093 /* Assets.xcassets */; };
|
||||
79EAD8172A7B86610036E093 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 79EAD8152A7B86610036E093 /* LaunchScreen.storyboard */; };
|
||||
79FB75EC2A988EC000DB00A4 /* MessageCenterTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79FB75EB2A988EC000DB00A4 /* MessageCenterTool.swift */; };
|
||||
@@ -145,6 +159,32 @@
|
||||
791887C52A84D9DF007EA0C1 /* DispatchOrderController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DispatchOrderController.swift; sourceTree = "<group>"; };
|
||||
792EE0942AA74E0A00A212AB /* PushNotiCommonView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PushNotiCommonView.swift; sourceTree = "<group>"; };
|
||||
792EE0962AA74E5800A212AB /* PushNotiCommonTool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PushNotiCommonTool.swift; sourceTree = "<group>"; };
|
||||
7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitorHistoryController.swift; sourceTree = "<group>"; };
|
||||
7938A8052E4055800017508A /* VideoPlayView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VideoPlayView.h; sourceTree = "<group>"; };
|
||||
7938A8062E4055800017508A /* VideoPlayView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VideoPlayView.m; sourceTree = "<group>"; };
|
||||
7938A80D2E4055800017508A /* AAPLEAGLLayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AAPLEAGLLayer.h; sourceTree = "<group>"; };
|
||||
7938A80E2E4055800017508A /* AAPLEAGLLayer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AAPLEAGLLayer.m; sourceTree = "<group>"; };
|
||||
7938A80F2E4055800017508A /* g711.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = g711.h; sourceTree = "<group>"; };
|
||||
7938A8102E4055800017508A /* g711.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = g711.m; sourceTree = "<group>"; };
|
||||
7938A8112E4055800017508A /* g726.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = g726.h; sourceTree = "<group>"; };
|
||||
7938A8122E4055800017508A /* g726.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = g726.m; sourceTree = "<group>"; };
|
||||
7938A8132E4055800017508A /* H264DecodeTool.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = H264DecodeTool.h; sourceTree = "<group>"; };
|
||||
7938A8142E4055800017508A /* H264DecodeTool.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = H264DecodeTool.m; sourceTree = "<group>"; };
|
||||
7938A8152E4055800017508A /* PCMStreamPlayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PCMStreamPlayer.h; sourceTree = "<group>"; };
|
||||
7938A8162E4055800017508A /* PCMStreamPlayer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = PCMStreamPlayer.m; sourceTree = "<group>"; };
|
||||
7938A8172E4055800017508A /* SRWebSocket.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SRWebSocket.h; sourceTree = "<group>"; };
|
||||
7938A8182E4055800017508A /* SRWebSocket.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SRWebSocket.m; sourceTree = "<group>"; };
|
||||
7938A81D2E4055800017508A /* YFTimer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFTimer.h; sourceTree = "<group>"; };
|
||||
7938A81E2E4055800017508A /* YFTimer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFTimer.m; sourceTree = "<group>"; };
|
||||
7938A81F2E4055800017508A /* YFTimerManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFTimerManager.h; sourceTree = "<group>"; };
|
||||
7938A8202E4055800017508A /* YFTimerManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFTimerManager.m; sourceTree = "<group>"; };
|
||||
7938A8362E4055D50017508A /* YFProgressHUD.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YFProgressHUD.h; sourceTree = "<group>"; };
|
||||
7938A8372E4055D50017508A /* YFProgressHUD.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YFProgressHUD.m; sourceTree = "<group>"; };
|
||||
7938A8382E4055D50017508A /* YLGIFImage.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YLGIFImage.h; sourceTree = "<group>"; };
|
||||
7938A8392E4055D50017508A /* YLGIFImage.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YLGIFImage.m; sourceTree = "<group>"; };
|
||||
7938A83A2E4055D50017508A /* YLImageView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YLImageView.h; sourceTree = "<group>"; };
|
||||
7938A83B2E4055D50017508A /* YLImageView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YLImageView.m; sourceTree = "<group>"; };
|
||||
7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringVideoDetailController.swift; sourceTree = "<group>"; };
|
||||
794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringConfigView.swift; sourceTree = "<group>"; };
|
||||
7940277B2B3E9ECB00EC52D4 /* ConditionalSearchView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConditionalSearchView.swift; sourceTree = "<group>"; };
|
||||
7940277D2B43B9B600EC52D4 /* ConditionalSearchTool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConditionalSearchTool.swift; sourceTree = "<group>"; };
|
||||
@@ -170,7 +210,6 @@
|
||||
79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringController.swift; sourceTree = "<group>"; };
|
||||
79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringListController.swift; sourceTree = "<group>"; };
|
||||
79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitorVideoController.swift; sourceTree = "<group>"; };
|
||||
79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleMonitoringVideoDetailController.swift; sourceTree = "<group>"; };
|
||||
79CECC9D2A8E03C200B95D8B /* MediaPlayer.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MediaPlayer.framework; path = System/Library/Frameworks/MediaPlayer.framework; sourceTree = SDKROOT; };
|
||||
79CECC9F2A8E03CF00B95D8B /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
|
||||
79CECCA12A8E03D900B95D8B /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
|
||||
@@ -196,6 +235,7 @@
|
||||
79E434242AA1919400AEB16C /* CommonAlertView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CommonAlertView.swift; sourceTree = "<group>"; };
|
||||
79E434272AA1EFA500AEB16C /* SystemCall.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SystemCall.swift; sourceTree = "<group>"; };
|
||||
79E434292AA5833F00AEB16C /* CustomPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomPicker.swift; sourceTree = "<group>"; };
|
||||
79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VerticalLoopScrollLabel.swift; sourceTree = "<group>"; };
|
||||
79EAD8072A7B86600036E093 /* OrderScheduling.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = OrderScheduling.app; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
79EAD8132A7B86610036E093 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
|
||||
79EAD8162A7B86610036E093 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
|
||||
@@ -436,6 +476,77 @@
|
||||
path = Tool;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
7938A80B2E4055800017508A /* view */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
7938A8052E4055800017508A /* VideoPlayView.h */,
|
||||
7938A8062E4055800017508A /* VideoPlayView.m */,
|
||||
);
|
||||
path = view;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
7938A80C2E4055800017508A /* Video */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
7938A80B2E4055800017508A /* view */,
|
||||
);
|
||||
path = Video;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
7938A8192E4055800017508A /* VideoTools */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
7938A80D2E4055800017508A /* AAPLEAGLLayer.h */,
|
||||
7938A80E2E4055800017508A /* AAPLEAGLLayer.m */,
|
||||
7938A80F2E4055800017508A /* g711.h */,
|
||||
7938A8102E4055800017508A /* g711.m */,
|
||||
7938A8112E4055800017508A /* g726.h */,
|
||||
7938A8122E4055800017508A /* g726.m */,
|
||||
7938A8132E4055800017508A /* H264DecodeTool.h */,
|
||||
7938A8142E4055800017508A /* H264DecodeTool.m */,
|
||||
7938A8152E4055800017508A /* PCMStreamPlayer.h */,
|
||||
7938A8162E4055800017508A /* PCMStreamPlayer.m */,
|
||||
7938A8172E4055800017508A /* SRWebSocket.h */,
|
||||
7938A8182E4055800017508A /* SRWebSocket.m */,
|
||||
);
|
||||
path = VideoTools;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
7938A8212E4055800017508A /* YFTimerTool */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
7938A81D2E4055800017508A /* YFTimer.h */,
|
||||
7938A81E2E4055800017508A /* YFTimer.m */,
|
||||
7938A81F2E4055800017508A /* YFTimerManager.h */,
|
||||
7938A8202E4055800017508A /* YFTimerManager.m */,
|
||||
);
|
||||
path = YFTimerTool;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
7938A8222E4055800017508A /* Video */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
7938A80C2E4055800017508A /* Video */,
|
||||
7938A8192E4055800017508A /* VideoTools */,
|
||||
7938A8212E4055800017508A /* YFTimerTool */,
|
||||
7938A83C2E4055D50017508A /* YFProgressHUD */,
|
||||
);
|
||||
path = Video;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
7938A83C2E4055D50017508A /* YFProgressHUD */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
7938A8362E4055D50017508A /* YFProgressHUD.h */,
|
||||
7938A8372E4055D50017508A /* YFProgressHUD.m */,
|
||||
7938A8382E4055D50017508A /* YLGIFImage.h */,
|
||||
7938A8392E4055D50017508A /* YLGIFImage.m */,
|
||||
7938A83A2E4055D50017508A /* YLImageView.h */,
|
||||
7938A83B2E4055D50017508A /* YLImageView.m */,
|
||||
);
|
||||
path = YFProgressHUD;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
7949FF122B51093F00B75A21 /* CustomMap */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
@@ -658,6 +769,7 @@
|
||||
children = (
|
||||
79B966372AB0651C00308A8D /* VehicleLogoutView.swift */,
|
||||
794027792B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift */,
|
||||
79EA0A902E3753D100320195 /* VerticalLoopScrollLabel.swift */,
|
||||
);
|
||||
path = View;
|
||||
sourceTree = "<group>";
|
||||
@@ -668,7 +780,8 @@
|
||||
79CECC212A8A2A2900B95D8B /* VehicleMonitoringController.swift */,
|
||||
79CECC232A8B16D400B95D8B /* VehicleMonitoringListController.swift */,
|
||||
79CECC252A8C749B00B95D8B /* VehicleMonitorVideoController.swift */,
|
||||
79CECC272A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift */,
|
||||
7938A64F2E3B51270017508A /* VehicleMonitorHistoryController.swift */,
|
||||
7938A8442E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift */,
|
||||
);
|
||||
path = ViewController;
|
||||
sourceTree = "<group>";
|
||||
@@ -754,6 +867,7 @@
|
||||
791887732A7CD633007EA0C1 /* Rescue */,
|
||||
7918873F2A7CCCCD007EA0C1 /* Main */,
|
||||
79DD0DAB2A94A0EE00768FE7 /* Source */,
|
||||
7938A8222E4055800017508A /* Video */,
|
||||
79EAD8132A7B86610036E093 /* Assets.xcassets */,
|
||||
79EAD8152A7B86610036E093 /* LaunchScreen.storyboard */,
|
||||
79EAD8182A7B86610036E093 /* Info.plist */,
|
||||
@@ -1012,6 +1126,15 @@
|
||||
791887952A80C361007EA0C1 /* WebViewController.swift in Sources */,
|
||||
79CECC192A89EE6A00B95D8B /* ReviewFailedController.swift in Sources */,
|
||||
791887C62A84D9DF007EA0C1 /* DispatchOrderController.swift in Sources */,
|
||||
7938A8252E4055800017508A /* YFTimer.m in Sources */,
|
||||
7938A8292E4055800017508A /* VideoPlayView.m in Sources */,
|
||||
7938A82B2E4055800017508A /* YFTimerManager.m in Sources */,
|
||||
7938A82C2E4055800017508A /* AAPLEAGLLayer.m in Sources */,
|
||||
7938A82E2E4055800017508A /* PCMStreamPlayer.m in Sources */,
|
||||
7938A82F2E4055800017508A /* SRWebSocket.m in Sources */,
|
||||
7938A8312E4055800017508A /* g711.m in Sources */,
|
||||
7938A8332E4055800017508A /* g726.m in Sources */,
|
||||
7938A8352E4055800017508A /* H264DecodeTool.m in Sources */,
|
||||
7918877B2A7CDD1A007EA0C1 /* Initial.swift in Sources */,
|
||||
7940277A2B3BD46B00EC52D4 /* VehicleMonitoringConfigView.swift in Sources */,
|
||||
791887C42A84BFDB007EA0C1 /* Tool.swift in Sources */,
|
||||
@@ -1020,6 +1143,7 @@
|
||||
7918878F2A809E37007EA0C1 /* TimerStrings.swift in Sources */,
|
||||
7918878B2A7CE9E0007EA0C1 /* main.swift in Sources */,
|
||||
791887A02A80CA10007EA0C1 /* RequestList.swift in Sources */,
|
||||
7938A6502E3B51270017508A /* VehicleMonitorHistoryController.swift in Sources */,
|
||||
79FB761C2A9EEC3700DB00A4 /* GroupData.swift in Sources */,
|
||||
79FB76172A9DFC9600DB00A4 /* NotificationSetUpController.swift in Sources */,
|
||||
79CECC262A8C749B00B95D8B /* VehicleMonitorVideoController.swift in Sources */,
|
||||
@@ -1045,9 +1169,13 @@
|
||||
79FB76222A9EEED900DB00A4 /* CommonKeyStrings.swift in Sources */,
|
||||
79DD0DBB2A971EB300768FE7 /* ZDViewController.swift in Sources */,
|
||||
791887BF2A839716007EA0C1 /* EntryStrings.swift in Sources */,
|
||||
7938A83D2E4055D50017508A /* YFProgressHUD.m in Sources */,
|
||||
7938A83E2E4055D50017508A /* YLGIFImage.m in Sources */,
|
||||
7938A83F2E4055D50017508A /* YLImageView.m in Sources */,
|
||||
794FBB192A8F4AF000D57BB8 /* MessageCount.swift in Sources */,
|
||||
791887892A7CE79E007EA0C1 /* LoginController.swift in Sources */,
|
||||
791887A42A80CA30007EA0C1 /* ResponseModel.swift in Sources */,
|
||||
7938A8452E40A5F10017508A /* VehicleMonitoringVideoDetailController.swift in Sources */,
|
||||
794FBB0D2A8F040D00D57BB8 /* HistoryController.swift in Sources */,
|
||||
7918878D2A8081D4007EA0C1 /* ActionStrings.swift in Sources */,
|
||||
791887972A80C6CD007EA0C1 /* LocalizedStrings.swift in Sources */,
|
||||
@@ -1055,12 +1183,12 @@
|
||||
79CECC222A8A2A2900B95D8B /* VehicleMonitoringController.swift in Sources */,
|
||||
791887452A7CD05B007EA0C1 /* MainTabBarController.swift in Sources */,
|
||||
791887792A7CD64C007EA0C1 /* RescueController.swift in Sources */,
|
||||
79EA0A912E3753D100320195 /* VerticalLoopScrollLabel.swift in Sources */,
|
||||
79CECC122A89BD1A00B95D8B /* MessageCenterController.swift in Sources */,
|
||||
794FBB1F2A92F7C300D57BB8 /* WebViewTool.swift in Sources */,
|
||||
791887822A7CE71D007EA0C1 /* AppKeyStrings.swift in Sources */,
|
||||
7940277C2B3E9ECB00EC52D4 /* ConditionalSearchView.swift in Sources */,
|
||||
79CB07CC2AA8465A00154B61 /* UserPermission.swift in Sources */,
|
||||
79CECC282A8CADEA00B95D8B /* VehicleMonitoringVideoDetailController.swift in Sources */,
|
||||
792EE0972AA74E5800A212AB /* PushNotiCommonTool.swift in Sources */,
|
||||
79FB75F02A98A26C00DB00A4 /* AcceptOrderTool.swift in Sources */,
|
||||
791887A12A80CA10007EA0C1 /* ApiList.swift in Sources */,
|
||||
|
||||
BIN
OrderScheduling/.DS_Store
vendored
23
OrderScheduling/Assets.xcassets/VehicleMonitoring/vehicleMonitoring_alarm.imageset/Contents.json
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
|
After Width: | Height: | Size: 3.8 KiB |
|
After Width: | Height: | Size: 6.2 KiB |
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level1.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level1@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level1@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 1.9 KiB |
|
After Width: | Height: | Size: 3.6 KiB |
|
After Width: | Height: | Size: 5.8 KiB |
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level2.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level2@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level2@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
|
After Width: | Height: | Size: 3.6 KiB |
|
After Width: | Height: | Size: 6.0 KiB |
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level3.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level3@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_alarm_level3@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
|
After Width: | Height: | Size: 3.8 KiB |
|
After Width: | Height: | Size: 6.2 KiB |
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "vehicleMonitoring_channel_alarm_icon.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_channel_alarm_icon@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_channel_alarm_icon@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 347 B |
|
After Width: | Height: | Size: 617 B |
|
After Width: | Height: | Size: 1.1 KiB |
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "vehicleMonitoring_history_icon.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_history_icon@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_history_icon@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 251 B |
|
After Width: | Height: | Size: 322 B |
|
After Width: | Height: | Size: 457 B |
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "vehicleMonitoring_offline_icon.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_offline_icon@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_offline_icon@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 316 B |
|
After Width: | Height: | Size: 484 B |
|
After Width: | Height: | Size: 772 B |
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "vehicleMonitoring_video_icon.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_video_icon@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "vehicleMonitoring_video_icon@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 343 B |
|
After Width: | Height: | Size: 514 B |
|
After Width: | Height: | Size: 789 B |
22
OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/Contents.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "ww_video_paly@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "ww_video_paly@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
BIN
OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@2x.png
vendored
Normal file
|
After Width: | Height: | Size: 2.8 KiB |
BIN
OrderScheduling/Assets.xcassets/VehicleMonitoring/ww_video_paly.imageset/ww_video_paly@3x.png
vendored
Normal file
|
After Width: | Height: | Size: 5.2 KiB |
@@ -115,6 +115,14 @@ class WebViewController : ZDViewController {
|
||||
}
|
||||
webView.configuration.userContentController.removeScriptMessageHandler(forName: "nativeObject")
|
||||
}
|
||||
|
||||
override func dd_backActionPop(_ isAnimated: Bool) {
|
||||
if webView.canGoBack == true {
|
||||
webView.goBack()
|
||||
}else{
|
||||
super.dd_backActionPop(isAnimated)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension WebViewController : WKScriptMessageHandler {
|
||||
@@ -141,6 +149,11 @@ extension WebViewController : WKScriptMessageHandler {
|
||||
let vc = AdditionalPhotoController(userOrderId: Int(userOrderId) ?? 0, orderCode: orderCode, taskOrderId: Int(taskOrderId) ?? 0,canModify: canModify)
|
||||
navigationController?.pushViewController(vc, animated: true)
|
||||
}
|
||||
}else if action == "goMonitoring" {
|
||||
let params = dict?["params"] as? [String:Any]
|
||||
let code = params?["code"] as? String
|
||||
let vc = VehicleMonitorHistoryController(code: code)
|
||||
navigationController?.pushViewController(vc, animated: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,6 +40,8 @@ open class WebViewTool : NSObject {
|
||||
case invoiceListInfo = "开票信息"
|
||||
case indexList = "二手车信息"
|
||||
case reportIndex = "报备"
|
||||
case vehicleAlarmDetail = "报警详情"
|
||||
case vehicleAlarmList = "车辆报警"
|
||||
}
|
||||
|
||||
public override init() {
|
||||
@@ -144,6 +146,12 @@ open class WebViewTool : NSObject {
|
||||
case .reportIndex:
|
||||
vc = WebViewController(showNavBar:true, title: WebViewNameEnum.reportIndex.rawValue, url: "\((h5Models?.reportIndex)!)?token=\((USER.token)!)"+(appending ?? ""))
|
||||
break
|
||||
case .vehicleAlarmList:
|
||||
vc = WebViewController(showNavBar:true, title: WebViewNameEnum.vehicleAlarmList.rawValue, url: "\((h5Models?.vehicleAlarmList)!)?token=\((USER.token)!)"+(appending ?? ""))
|
||||
break
|
||||
case .vehicleAlarmDetail:
|
||||
vc = WebViewController(showNavBar:true, title: WebViewNameEnum.vehicleAlarmDetail.rawValue, url: "\((h5Models?.vehicleAlarmDetail)!)?token=\((USER.token)!)"+(appending ?? ""))
|
||||
break
|
||||
}
|
||||
|
||||
if let vc {
|
||||
|
||||
@@ -34,7 +34,11 @@ open class ApiList {
|
||||
|
||||
public let vehicleMonitorList = "/supplierAppV2/dispatchApp/order/vehicleMonitorList"
|
||||
|
||||
public let getRtspChannel = "/gps/thirdparty-vehicle-position/getRtspUrl"
|
||||
public let getRealtimeUrl = "/gps/xq-video-monitor/getRealtimeUrl"
|
||||
|
||||
public let getReplayUrl = "/gps/xq-video-monitor/getReplayUrl"
|
||||
|
||||
public let closeHistoryControl = "/gps/xq-video-monitor/closeHistoryControl"
|
||||
|
||||
public let orderPhotoList = "/supplierAppV2/dispatchApp/order/orderPhotoList"
|
||||
|
||||
@@ -69,4 +73,8 @@ open class ApiList {
|
||||
public let getConfigByCode = "/base/baseConfig/getConfigByCode"
|
||||
|
||||
public let thisWeekNumber = "/toc-user/car-admin/thisWeekNumber"
|
||||
|
||||
public let alarmList = "/supplierAppV2/dispatchApp/alarm/alarmList"
|
||||
|
||||
public let getAlarmByCode = "/supplierAppV2/dispatchApp/alarm/getAlarmByCode"
|
||||
}
|
||||
|
||||
@@ -181,6 +181,14 @@ public struct RtspChannelParameters : Encodable {
|
||||
var external : Int = 1
|
||||
}
|
||||
|
||||
public struct GetVideoUrlParameters : Encodable {
|
||||
var vehicleId : Int?
|
||||
var simNumber : String?
|
||||
var channel : Int?
|
||||
var startDate : String?
|
||||
var endDate : String?
|
||||
}
|
||||
|
||||
public struct OrderPhotoListParameters : Encodable {
|
||||
var userOrderId : Int
|
||||
var orderCode : String
|
||||
@@ -234,3 +242,18 @@ public struct GiveUpUserOrderParameters : Encodable {
|
||||
public struct ConfigByCodeParameters : Encodable {
|
||||
var code : String
|
||||
}
|
||||
|
||||
public struct AlarmListParameters : Encodable {
|
||||
var pageNum : Int
|
||||
var pageSize : Int = 50
|
||||
var orderBy : String = "create_time"
|
||||
var supplierId : Int?
|
||||
var handStatus : Int
|
||||
public enum HandStatusEnum : Int {
|
||||
case pending = 0,dealWithByTechnical,dealWithByOperations
|
||||
}
|
||||
}
|
||||
|
||||
public struct GetAlarmByCodeParameters : Encodable {
|
||||
var code : String?
|
||||
}
|
||||
|
||||
@@ -75,8 +75,16 @@ open class RequestList {
|
||||
return DDAF.post(urlString: HOST+API.vehicleMonitorList,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel<VehicleMonitorListDataModel>.self)
|
||||
}
|
||||
|
||||
func getRtspChannel<P:Encodable>(prameters:P) -> Single<ResponseModel<[String]>?> {
|
||||
return DDAF.post(urlString: HOST+API.getRtspChannel,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel<[String]>.self)
|
||||
func getRealtimeUrl<P:Encodable>(prameters:P) -> Single<ResponseModel<GetVideoUrlDataModel>?> {
|
||||
return DDAF.post(urlString: HOST+API.getRealtimeUrl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel<GetVideoUrlDataModel>.self)
|
||||
}
|
||||
|
||||
func getReplayUrl<P:Encodable>(prameters:P) -> Single<ResponseModel<GetVideoUrlDataModel>?> {
|
||||
return DDAF.post(urlString: HOST+API.getReplayUrl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel<GetVideoUrlDataModel>.self)
|
||||
}
|
||||
|
||||
func closeHistoryControl<P:Encodable>(prameters:P) -> Single<ResponseModel<GetVideoUrlDataModel>?> {
|
||||
return DDAF.post(urlString: HOST+API.closeHistoryControl,parameters: prameters,encoding: URLEncodedFormParameterEncoder(destination: .httpBody),headers: [tokenHeader()],responseType: ResponseModel<GetVideoUrlDataModel>.self)
|
||||
}
|
||||
|
||||
func orderPhotoList<P:Encodable>(prameters:P) -> Single<ResponseModel<[OrderPhotoListDataModel]>?> {
|
||||
@@ -144,4 +152,12 @@ open class RequestList {
|
||||
func thisWeekNumber()-> Single<ResponseModel<Int>?> {
|
||||
return DDAF.get(urlString: HOST+API.thisWeekNumber,encoding: URLEncodedFormParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel<Int>.self)
|
||||
}
|
||||
|
||||
func alarmList<P:Encodable>(parameters:P) -> Single<ResponseModel<[AlarmListDataModel]>?> {
|
||||
return DDAF.post(urlString: HOST+API.alarmList,parameters: parameters,encoding: JSONParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel<[AlarmListDataModel]>.self)
|
||||
}
|
||||
|
||||
func getAlarmByCode<P:Encodable>(parameters:P) -> Single<ResponseModel<GetAlarmByCodeDataModel>?> {
|
||||
return DDAF.post(urlString: HOST+API.getAlarmByCode,parameters: parameters,encoding: URLEncodedFormParameterEncoder.default,headers: [tokenHeader()],responseType: ResponseModel<GetAlarmByCodeDataModel>.self)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,10 @@ class ResponseModel<T:Decodable> : Decodable {
|
||||
var total : Int?
|
||||
}
|
||||
|
||||
struct CommonError : Error {
|
||||
|
||||
}
|
||||
|
||||
class LoginDataModel : Decodable {
|
||||
var accessToken : LoginDataAccessTokenModel
|
||||
var refreshToken : LoginDataRefreshTokenModel
|
||||
@@ -193,7 +197,7 @@ public class VehicleMonitorListDataModel : Decodable {
|
||||
var taskList : [TaskModel]?
|
||||
var isSelected : Bool? = false
|
||||
var zIndex : Int? = 0
|
||||
|
||||
var number : String?
|
||||
public enum TerminalTypeEnum : String,Decodable {
|
||||
case APP = "APP"
|
||||
case GPS = "GPS"
|
||||
@@ -267,6 +271,8 @@ public class DispatchAppH5UrlDataModel : Decodable {
|
||||
var invoiceListInfo : String
|
||||
var indexList : String
|
||||
var reportIndex : String
|
||||
var vehicleAlarmList : String
|
||||
var vehicleAlarmDetail : String
|
||||
}
|
||||
|
||||
public class VersionCheckDataModel : Decodable {
|
||||
@@ -370,3 +376,22 @@ public class JumpPageDataModel : Decodable {
|
||||
var url : String?
|
||||
var content : String?
|
||||
}
|
||||
|
||||
public class AlarmListDataModel : Decodable {
|
||||
var vehicleName : String?
|
||||
var alarmTypeString : String?
|
||||
var code : String?
|
||||
}
|
||||
|
||||
public class GetAlarmByCodeDataModel : Decodable {
|
||||
var imei : String?
|
||||
var channel : Int?
|
||||
var startTime : String?
|
||||
var endTime : String?
|
||||
var vehicleId : Int?
|
||||
}
|
||||
|
||||
public class GetVideoUrlDataModel : Decodable {
|
||||
var channelList : [String]?
|
||||
var realtimeList : [String]?
|
||||
}
|
||||
|
||||
@@ -9,3 +9,5 @@
|
||||
# ifdef NSFoundationVersionNumber_iOS_9_x_Max
|
||||
# import <UserNotifications/UserNotifications.h>
|
||||
# endif
|
||||
|
||||
# import "VideoPlayView.h"
|
||||
|
||||
@@ -30,11 +30,13 @@ open class AcceptOrderTool : NSObject {
|
||||
// 来到首页的救援中-待接单
|
||||
let tabBarVc = UIApplication.shared.dd_keyWindow.rootViewController as? MainTabBarController
|
||||
let currentNav = tabBarVc?.selectedViewController as? UINavigationController
|
||||
currentNav?.popToRootViewController(animated: false)
|
||||
tabBarVc?.selectedIndex = 0
|
||||
let nav = tabBarVc?.children.first as? UINavigationController
|
||||
let vc = nav?.children.first as? RescueController
|
||||
vc?.categoryView.selectItem(at: 0)
|
||||
currentNav?.popToRootViewController(animated: true)
|
||||
DispatchQueue.main.asyncAfter(deadline: .now()+0.25, execute: {
|
||||
tabBarVc?.selectedIndex = 0
|
||||
let nav = tabBarVc?.children.first as? UINavigationController
|
||||
let vc = nav?.children.first as? RescueController
|
||||
vc?.categoryView.selectItem(at: 0)
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
BIN
OrderScheduling/VehicleMonitoring/.DS_Store
vendored
@@ -0,0 +1,93 @@
|
||||
//
|
||||
// VerticalLoopScrollLabel.swift
|
||||
// OrderScheduling
|
||||
//
|
||||
// Created by 中道 on 2025/7/28.
|
||||
//
|
||||
|
||||
import UIKit
|
||||
|
||||
/// 可点击的纵向轮播标签控件,支持点击获取当前 index
|
||||
class VerticalLoopScrollLabel: UIView {
|
||||
private let scrollView = UIScrollView()
|
||||
private let label1 = UILabel()
|
||||
private let label2 = UILabel()
|
||||
private var timer: Timer?
|
||||
|
||||
/// 点击回调,返回当前索引
|
||||
var onTap: ((Int) -> Void)?
|
||||
|
||||
var items: [String] = [] {
|
||||
didSet {
|
||||
guard !items.isEmpty else { return }
|
||||
currentIdx = 0
|
||||
label1.text = items.first
|
||||
label2.text = items.count > 1 ? items[1] : items.first
|
||||
setNeedsLayout()
|
||||
startLoop()
|
||||
}
|
||||
}
|
||||
|
||||
var interval: TimeInterval = 2.5
|
||||
private var currentIdx = 0
|
||||
private var isLabel1OnTop = true
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
clipsToBounds = true
|
||||
scrollView.isScrollEnabled = false
|
||||
addSubview(scrollView)
|
||||
|
||||
let tap = UITapGestureRecognizer(target: self, action: #selector(handleTap))
|
||||
addGestureRecognizer(tap)
|
||||
|
||||
for label in [label1, label2] {
|
||||
label.font = .systemFont(ofSize: 16, weight: .semibold)
|
||||
label.textColor = .white
|
||||
label.textAlignment = .left
|
||||
label.numberOfLines = 1
|
||||
label.lineBreakMode = .byTruncatingTail
|
||||
scrollView.addSubview(label)
|
||||
}
|
||||
}
|
||||
required init?(coder: NSCoder) { fatalError() }
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
scrollView.frame = bounds
|
||||
label1.frame = CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height)
|
||||
label2.frame = CGRect(x: 0, y: bounds.height, width: bounds.width, height: bounds.height)
|
||||
scrollView.contentSize = CGSize(width: bounds.width, height: bounds.height * 2)
|
||||
}
|
||||
|
||||
private func startLoop() {
|
||||
timer?.invalidate()
|
||||
guard items.count > 1 else { return }
|
||||
timer = Timer.scheduledTimer(withTimeInterval: interval, repeats: true, block: { [weak self] _ in
|
||||
self?.scrollNext()
|
||||
})
|
||||
}
|
||||
|
||||
private func scrollNext() {
|
||||
let fromLabel = isLabel1OnTop ? label1 : label2
|
||||
let toLabel = isLabel1OnTop ? label2 : label1
|
||||
let nextIdx = (currentIdx + 1) % items.count
|
||||
toLabel.text = items[nextIdx]
|
||||
// 动画向上滚动
|
||||
UIView.animate(withDuration: 0.35, animations: {
|
||||
self.scrollView.contentOffset = CGPoint(x: 0, y: self.bounds.height)
|
||||
}, completion: { _ in
|
||||
// 滚动完后交换内容和位置
|
||||
self.scrollView.contentOffset = .zero
|
||||
fromLabel.text = toLabel.text
|
||||
self.currentIdx = nextIdx
|
||||
self.isLabel1OnTop.toggle()
|
||||
})
|
||||
}
|
||||
|
||||
@objc private func handleTap() {
|
||||
onTap?(currentIdx)
|
||||
}
|
||||
|
||||
deinit { timer?.invalidate() }
|
||||
}
|
||||
@@ -0,0 +1,467 @@
|
||||
//
|
||||
// VehicleMonitorHistoryController.swift
|
||||
// OrderScheduling
|
||||
//
|
||||
// Created by 中道 on 2025/7/31.
|
||||
//
|
||||
|
||||
import UIKit
|
||||
import DDAutoUIKit_Private
|
||||
import SnapKit
|
||||
import RxSwift
|
||||
import RxCocoa
|
||||
import BRPickerView
|
||||
|
||||
class VehicleMonitorHistoryController : ZDViewController {
|
||||
var code : String?
|
||||
let timeline = TimelineView()
|
||||
let dateView = DateSwitcherView()
|
||||
let realtimeButton = UIButton()
|
||||
let disposeBag = DisposeBag()
|
||||
var fromDateString : String?
|
||||
var toDateString : String?
|
||||
var alarmResponse : ResponseModel<GetAlarmByCodeDataModel>?
|
||||
var refreshSub = ReplaySubject<String?>.create(bufferSize: 1)
|
||||
var replaySub = ReplaySubject<ResponseModel<GetAlarmByCodeDataModel>?>.create(bufferSize: 1)
|
||||
var videoView = VideoPlayView()
|
||||
var closeSub = ReplaySubject<ResponseModel<GetAlarmByCodeDataModel>?>.create(bufferSize: 1)
|
||||
init(code: String?) {
|
||||
self.code = code
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
}
|
||||
|
||||
@MainActor required public init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
}
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
dd_navigationItemTitle = "监控回放"
|
||||
dd_navigationBarBackgroundColor = .hex("354683")
|
||||
dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)]
|
||||
dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white]
|
||||
|
||||
// 回调当前选中时间
|
||||
timeline.onTimeSelected = {[weak self] hour, minute in
|
||||
self?.fromDateString = self?.dateView.getDateString()?.appending(" \(hour):\(minute):00")
|
||||
if let fromDateString = self?.fromDateString,let dateFormatter = self?.timeline.dateFormatter {
|
||||
let toDate = Date(timeIntervalSince1970: ((NSDate.br_date(from: fromDateString, dateFormat: dateFormatter)?.timeIntervalSince1970 ?? 0) + 600))
|
||||
self?.toDateString = NSDate.br_string(from: toDate, dateFormat: dateFormatter)
|
||||
|
||||
}else{
|
||||
self?.toDateString = self?.fromDateString
|
||||
}
|
||||
|
||||
if let alarmResponse = self?.alarmResponse {
|
||||
self?.replaySub.onNext(alarmResponse)
|
||||
}
|
||||
}
|
||||
|
||||
realtimeButton.setTitleColor(.white, for: .normal)
|
||||
realtimeButton.titleLabel?.font = .dd_systemFont(ofSize: 16, weight: .semibold)
|
||||
realtimeButton.backgroundColor = .dd_hex(light: "0273EE", dark: "0273EE")
|
||||
realtimeButton.setImage(UIImage(named: "vehicleMonitoring_history_icon"), for: .normal)
|
||||
realtimeButton.layer.cornerRadius = 6
|
||||
realtimeButton.setTitle("实时监控", for: .normal)
|
||||
realtimeButton.dd_customize(with: .ImageLeftPaddingTitleRightWithWholeCenter, padding: 10)
|
||||
realtimeButton.rx.tap
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] _ in
|
||||
self?.videoView.endShow()
|
||||
self?.closeSub.onNext(self?.alarmResponse)
|
||||
self?.navigationController?.pushViewController(VehicleMonitorVideoController(vehicleId: self?.alarmResponse?.data?.vehicleId,simNumber: self?.alarmResponse?.data?.imei), animated: true)
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
refreshSub
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.do(onNext: {[weak self] response in
|
||||
self?.view.dd_showHUD()
|
||||
})
|
||||
.flatMapLatest { code in
|
||||
return RQ.getAlarmByCode(parameters: GetAlarmByCodeParameters(code: code))
|
||||
.flatMap { response in
|
||||
return Single.create { single in
|
||||
if response?.success == true {
|
||||
single(.success(response))
|
||||
}else{
|
||||
single(.failure(CommonError()))
|
||||
}
|
||||
return Disposables.create()
|
||||
}
|
||||
}
|
||||
}
|
||||
.retry(when: { (rxError: Observable<Error>) -> Observable<Int> in
|
||||
return rxError.flatMap({ error in
|
||||
return Observable.timer(RxTimeInterval.seconds(5), scheduler: MainScheduler.asyncInstance)
|
||||
})
|
||||
})
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] response in
|
||||
self?.initUI(response: response)
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
replaySub
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.do(onNext: {[weak self] response in
|
||||
self?.view.dd_showHUD()
|
||||
})
|
||||
.flatMap({[weak self] response in
|
||||
return RQ.getReplayUrl(prameters: GetVideoUrlParameters(vehicleId: response?.data?.vehicleId, simNumber: response?.data?.imei, channel: response?.data?.channel, startDate: self?.fromDateString, endDate: self?.toDateString))
|
||||
})
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.do(onNext: {[weak self] response in
|
||||
self?.view.dd_hideHUD()
|
||||
})
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] response in
|
||||
if response?.success == true {
|
||||
if let first = response?.data?.realtimeList?.first {
|
||||
self?.videoView.wsUrl = first
|
||||
if self?.videoView.isPlaying == true {
|
||||
self?.videoView.beginShow()
|
||||
}else{
|
||||
}
|
||||
}
|
||||
}else{
|
||||
self?.view.dd_makeToast(response?.msg)
|
||||
}
|
||||
}).disposed(by: disposeBag)
|
||||
|
||||
closeSub
|
||||
.flatMapLatest { response in
|
||||
return RQ.closeHistoryControl(prameters: GetVideoUrlParameters(simNumber: response?.data?.imei,channel:response?.data?.channel))
|
||||
}
|
||||
.subscribe(onNext: { response in
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
|
||||
dateView.prevButton.rx.tap
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] _ in
|
||||
if let date = self?.getPreDate(),let dateFormat = self?.dateView.dateFormatter {
|
||||
self?.dateView.setDate(dateString: NSDate.br_string(from: date, dateFormat: dateFormat))
|
||||
}
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
dateView.nextButton.rx.tap
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] _ in
|
||||
if let date = self?.getNextDate(),let dateFormat = self?.dateView.dateFormatter {
|
||||
self?.dateView.setDate(dateString: NSDate.br_string(from: date, dateFormat: dateFormat))
|
||||
}
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
dateView.dateButton.rx.tap
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: { _ in
|
||||
let picker = BRDatePickerView(pickerMode: .YMD)
|
||||
picker.show()
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
NotificationCenter.default.rx
|
||||
.notification(UIApplication.didEnterBackgroundNotification)
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: { [weak self] _ in
|
||||
self?.videoView.endShow()
|
||||
self?.closeSub.onNext(self?.alarmResponse)
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
}
|
||||
|
||||
override func viewWillLayoutSubviews() {
|
||||
super.viewWillLayoutSubviews()
|
||||
if timeline.superview == nil {
|
||||
view.addSubview(videoView)
|
||||
videoView.snp.makeConstraints { make in
|
||||
make.top.equalTo(view.snp.top).offset(view.safeAreaInsets.top)
|
||||
make.width.equalTo(auto(375))
|
||||
make.height.equalTo(auto(300))
|
||||
make.centerX.equalToSuperview()
|
||||
}
|
||||
|
||||
view.addSubview(dateView)
|
||||
dateView.snp.makeConstraints { make in
|
||||
make.centerX.equalToSuperview()
|
||||
make.top.equalTo(videoView.snp.bottom).offset(20)
|
||||
make.height.equalTo(64)
|
||||
make.width.equalTo(200)
|
||||
}
|
||||
|
||||
view.addSubview(timeline)
|
||||
timeline.snp.makeConstraints { make in
|
||||
make.left.right.equalToSuperview()
|
||||
make.height.equalTo(100)
|
||||
make.top.equalTo(dateView.snp.bottom).offset(20)
|
||||
}
|
||||
|
||||
view.addSubview(realtimeButton)
|
||||
realtimeButton.snp.makeConstraints { make in
|
||||
make.left.right.equalToSuperview().inset(30)
|
||||
make.height.equalTo(48)
|
||||
make.top.equalTo(timeline.snp.bottom).offset(20)
|
||||
}
|
||||
|
||||
refreshSub.onNext(code)
|
||||
}
|
||||
}
|
||||
|
||||
func initUI(response: ResponseModel<GetAlarmByCodeDataModel>?) {
|
||||
alarmResponse = response
|
||||
if let startTime = response?.data?.startTime {
|
||||
dateView.setDate(dateString: startTime.components(separatedBy: " ").first)
|
||||
let currentDate = (NSDate.br_date(from: startTime, dateFormat: timeline.dateFormatter) as? NSDate) ?? NSDate()
|
||||
timeline.scrollToHour(hour: currentDate.br_hour,minute: currentDate.br_minute)
|
||||
}
|
||||
}
|
||||
|
||||
func getPreDate() -> Date? {
|
||||
if let dateString = dateView.getDateString() {
|
||||
let date = NSDate.br_date(from: dateString, dateFormat: dateView.dateFormatter) as? NSDate
|
||||
return date?.br_getNewDate(toDays: -1)
|
||||
}
|
||||
return Date()
|
||||
}
|
||||
|
||||
func getNextDate() -> Date? {
|
||||
if let dateString = dateView.getDateString(){
|
||||
let date = NSDate.br_date(from: dateString, dateFormat: dateView.dateFormatter) as? NSDate
|
||||
return date?.br_getNewDate(toDays: 1)
|
||||
}
|
||||
return Date()
|
||||
}
|
||||
|
||||
override func dd_backActionPop(_ isAnimated: Bool) {
|
||||
super.dd_backActionPop(isAnimated)
|
||||
closeSub.onNext(alarmResponse)
|
||||
}
|
||||
}
|
||||
|
||||
class DateSwitcherView: UIView {
|
||||
let dateFormatter = "yyyy-MM-dd"
|
||||
|
||||
// MARK: - UI
|
||||
let prevButton: UIButton = {
|
||||
let btn = UIButton(type: .system)
|
||||
btn.setImage(UIImage(systemName: "chevron.left"), for: .normal)
|
||||
btn.tintColor = .darkGray
|
||||
return btn
|
||||
}()
|
||||
|
||||
let nextButton: UIButton = {
|
||||
let btn = UIButton(type: .system)
|
||||
btn.setImage(UIImage(systemName: "chevron.right"), for: .normal)
|
||||
btn.tintColor = .darkGray
|
||||
return btn
|
||||
}()
|
||||
|
||||
let dateButton: UIButton = {
|
||||
let btn = UIButton(type: .system)
|
||||
btn.titleLabel?.font = .boldSystemFont(ofSize: 18)
|
||||
btn.setTitleColor(.black, for: .normal)
|
||||
btn.titleLabel?.textAlignment = .center
|
||||
btn.backgroundColor = .clear
|
||||
return btn
|
||||
}()
|
||||
|
||||
// MARK: - Init
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
setupUI()
|
||||
}
|
||||
required init?(coder: NSCoder) {
|
||||
super.init(coder: coder)
|
||||
setupUI()
|
||||
}
|
||||
|
||||
private func setupUI() {
|
||||
|
||||
addSubview(prevButton)
|
||||
addSubview(dateButton)
|
||||
addSubview(nextButton)
|
||||
|
||||
prevButton.snp.makeConstraints { make in
|
||||
make.left.equalToSuperview().offset(18)
|
||||
make.centerY.equalToSuperview()
|
||||
make.width.height.equalTo(28)
|
||||
}
|
||||
|
||||
nextButton.snp.makeConstraints { make in
|
||||
make.right.equalToSuperview().inset(18)
|
||||
make.centerY.equalToSuperview()
|
||||
make.width.height.equalTo(28)
|
||||
}
|
||||
|
||||
dateButton.snp.makeConstraints { make in
|
||||
make.center.equalToSuperview()
|
||||
}
|
||||
}
|
||||
|
||||
func setDate(dateString: String?) {
|
||||
dateButton.setTitle(dateString, for: .normal)
|
||||
}
|
||||
|
||||
func getDateString() -> String? {
|
||||
return dateButton.titleLabel?.text
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class TimelineView: UIView, UIScrollViewDelegate {
|
||||
var dateFormatter = "yyyy-MM-dd HH:mm:ss"
|
||||
var onTimeSelected: ((String, String) -> Void)?
|
||||
var selectHour : Int = 0
|
||||
var selectMinute : Int = 0
|
||||
private let scrollView = UIScrollView()
|
||||
private let contentView = UIView()
|
||||
private let indicatorView = UIView()
|
||||
private let quarterWidth: CGFloat = 36 // 每15分钟宽度(1小时=4*36=144)
|
||||
private let longTickHeight: CGFloat = 10 // 整点、半点高度
|
||||
private let shortTickHeight: CGFloat = 5 // 15/45分高度
|
||||
private let labelHeight: CGFloat = 18
|
||||
private let totalHours = 24
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
setupUI()
|
||||
}
|
||||
required init?(coder: NSCoder) {
|
||||
super.init(coder: coder)
|
||||
setupUI()
|
||||
}
|
||||
|
||||
private func setupUI() {
|
||||
addSubview(scrollView)
|
||||
scrollView.showsHorizontalScrollIndicator = false
|
||||
scrollView.delegate = self
|
||||
scrollView.snp.makeConstraints { $0.edges.equalToSuperview() }
|
||||
scrollView.addSubview(contentView)
|
||||
|
||||
indicatorView.backgroundColor = .gray
|
||||
addSubview(indicatorView)
|
||||
indicatorView.snp.makeConstraints {
|
||||
$0.centerX.equalToSuperview()
|
||||
$0.width.equalTo(2)
|
||||
$0.top.bottom.equalToSuperview()
|
||||
}
|
||||
}
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
let contentWidth = CGFloat(totalHours) * 4 * quarterWidth
|
||||
contentView.frame = CGRect(x: 0, y: 0, width: contentWidth, height: bounds.height)
|
||||
scrollView.contentSize = contentView.bounds.size
|
||||
let inset = bounds.width / 2
|
||||
scrollView.contentInset = UIEdgeInsets(top: 0, left: inset, bottom: 0, right: inset)
|
||||
layoutTicks()
|
||||
}
|
||||
|
||||
private func layoutTicks() {
|
||||
contentView.subviews.forEach { $0.removeFromSuperview() }
|
||||
for hour in 0..<totalHours {
|
||||
for quarter in 0..<4 {
|
||||
let minute = quarter * 15
|
||||
let x = CGFloat(hour) * 4 * quarterWidth + CGFloat(quarter) * quarterWidth
|
||||
let isMajorTick = (minute == 0 || minute == 30)
|
||||
let tick = UIView()
|
||||
tick.backgroundColor = .gray
|
||||
contentView.addSubview(tick)
|
||||
tick.snp.makeConstraints {
|
||||
$0.left.equalTo(x)
|
||||
$0.width.equalTo(1)
|
||||
$0.top.equalToSuperview()
|
||||
$0.height.equalTo(isMajorTick ? longTickHeight : shortTickHeight)
|
||||
}
|
||||
if minute == 0 || minute == 30 {
|
||||
let label = UILabel()
|
||||
label.font = UIFont.systemFont(ofSize: 12)
|
||||
label.textColor = .darkGray
|
||||
// 显示 xx:00 或 xx:30
|
||||
label.text = String(format: "%02d:%02d", hour, minute)
|
||||
label.textAlignment = .center
|
||||
contentView.addSubview(label)
|
||||
label.snp.makeConstraints {
|
||||
$0.centerX.equalTo(tick)
|
||||
$0.top.equalTo(tick.snp.bottom).offset(2)
|
||||
$0.height.equalTo(labelHeight)
|
||||
$0.width.equalTo(44)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// 画末尾 24:00 的长刻度线
|
||||
let x = CGFloat(totalHours) * 4 * quarterWidth
|
||||
let tick = UIView()
|
||||
tick.backgroundColor = .gray
|
||||
contentView.addSubview(tick)
|
||||
tick.snp.makeConstraints {
|
||||
$0.left.equalTo(x)
|
||||
$0.width.equalTo(1)
|
||||
$0.top.equalToSuperview()
|
||||
$0.height.equalTo(longTickHeight)
|
||||
}
|
||||
let label = UILabel()
|
||||
label.font = UIFont.systemFont(ofSize: 12)
|
||||
label.textColor = .darkGray
|
||||
label.text = "24:00"
|
||||
label.textAlignment = .center
|
||||
contentView.addSubview(label)
|
||||
label.snp.makeConstraints {
|
||||
$0.centerX.equalTo(tick)
|
||||
$0.top.equalTo(tick.snp.bottom).offset(2)
|
||||
$0.height.equalTo(labelHeight)
|
||||
$0.width.equalTo(44)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - 选中时间(分钟级)
|
||||
// 这个在用户停止拖拽时调用
|
||||
func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
|
||||
fireTimeSelectedCallback(scrollView)
|
||||
}
|
||||
|
||||
// 这个在程序性滚动结束(比如setContentOffset动画)时调用
|
||||
func scrollViewDidEndScrollingAnimation(_ scrollView: UIScrollView) {
|
||||
fireTimeSelectedCallback(scrollView)
|
||||
}
|
||||
|
||||
// 这个在用户用力一甩但还没松手时调用(可选)
|
||||
func scrollViewDidEndDragging(_ scrollView: UIScrollView, willDecelerate decelerate: Bool) {
|
||||
if !decelerate {
|
||||
fireTimeSelectedCallback(scrollView)
|
||||
}
|
||||
}
|
||||
|
||||
// 封装一个实际的回调方法
|
||||
private func fireTimeSelectedCallback(_ scrollView: UIScrollView) {
|
||||
let centerX = scrollView.contentOffset.x + bounds.width / 2
|
||||
let totalTimelineWidth = CGFloat(totalHours) * 4 * quarterWidth
|
||||
let totalMinutes = CGFloat(totalHours * 60)
|
||||
let minutePerPt = totalMinutes / totalTimelineWidth
|
||||
let selectedMinute = Int((centerX * minutePerPt).rounded())
|
||||
let hour = min(max(selectedMinute / 60, 0), totalHours)
|
||||
let minute = min(max(selectedMinute % 60, 0), 59)
|
||||
let hourString = (hour < 10) ? ("0"+"\(hour)") : "\(hour)"
|
||||
let minString = (minute < 10) ? ("0"+"\(minute)") : "\(minute)"
|
||||
selectHour = hour
|
||||
self.selectMinute = minute
|
||||
onTimeSelected?(hourString, minString)
|
||||
}
|
||||
|
||||
func scrollToHour(hour: Int, minute: Int = 0, animated: Bool = true) {
|
||||
// 计算偏移,分钟级
|
||||
let totalMinutes = hour * 60 + minute
|
||||
let quarter = CGFloat(totalMinutes) / 15.0
|
||||
let offset = quarter * quarterWidth - bounds.width / 2
|
||||
scrollView.setContentOffset(CGPoint(x: offset, y: 0), animated: animated)
|
||||
}
|
||||
}
|
||||
@@ -16,18 +16,65 @@ import RxRelay
|
||||
import RxCocoa
|
||||
import DDProgressHUDKit_Private
|
||||
|
||||
extension VehicleMonitorVideoController {
|
||||
func addActions() {
|
||||
extension VehicleMonitorVideoController : JXCategoryViewDelegate {
|
||||
public func categoryView(_ categoryView: JXCategoryBaseView!, didSelectedItemAt index: Int) {
|
||||
videoView.endShow()
|
||||
RQ.closeHistoryControl(prameters: GetVideoUrlParameters(simNumber: simNumber,channel: channel))
|
||||
.subscribe {[weak self] _ in
|
||||
if let wsUrl = self?.videos[index] {
|
||||
self?.videoView.wsUrl = wsUrl
|
||||
self?.channel = self?.channels[index]
|
||||
}
|
||||
}
|
||||
.disposed(by: disposeBag)
|
||||
}
|
||||
}
|
||||
|
||||
open class VehicleMonitorVideoController : ZDViewController {
|
||||
private var vehicleId : Int?
|
||||
private var simNumber : String?
|
||||
private let vehicleMonitorVideoView : VehicleMonitorVideoView
|
||||
private let reloadRelay = ReplayRelay<Any?>.create(bufferSize: 1)
|
||||
private let disposeBag = DisposeBag()
|
||||
private var videos : [String] = []
|
||||
var channels : [Int] = []
|
||||
var channel : Int?
|
||||
var closeSub = ReplaySubject<Any?>.create(bufferSize: 1)
|
||||
var videoView = VideoPlayView()
|
||||
public init(vehicleId:Int?,simNumber: String?) {
|
||||
self.vehicleId = vehicleId
|
||||
self.simNumber = simNumber
|
||||
self.vehicleMonitorVideoView = VehicleMonitorVideoView()
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
}
|
||||
|
||||
public required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
}
|
||||
|
||||
open override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
dd_navigationItemTitle = "实时监控"
|
||||
dd_navigationBarBackgroundColor = .hex("354683")
|
||||
dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)]
|
||||
dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white]
|
||||
|
||||
vehicleMonitorVideoView.categoryView.delegate = self
|
||||
|
||||
reloadRelay
|
||||
.filter({[weak self] _ in
|
||||
return self?.vehicleId != nil
|
||||
return self?.vehicleId != nil && self?.simNumber != nil
|
||||
})
|
||||
.observe(on: MainScheduler.instance)
|
||||
.do(onNext: {[weak self] _ in
|
||||
self?.view.dd_showHUD()
|
||||
})
|
||||
.flatMapLatest {[weak self] _ in
|
||||
return RQ.getRtspChannel(prameters: RtspChannelParameters(vehicleId: (self?.vehicleId)!))
|
||||
return RQ.getRealtimeUrl(prameters: GetVideoUrlParameters(vehicleId: (self?.vehicleId)!,simNumber: (self?.simNumber)!))
|
||||
}
|
||||
.observe(on: MainScheduler.instance)
|
||||
.do(onNext: {[weak self] _ in
|
||||
@@ -37,12 +84,15 @@ extension VehicleMonitorVideoController {
|
||||
.subscribe(onNext: {[weak self] response in
|
||||
if response?.success == true {
|
||||
var channels : [String] = []
|
||||
for index in 0..<(response?.data?.count ?? 0) {
|
||||
channels.append("通道"+"\(index + 1)")
|
||||
self?.channels.removeAll()
|
||||
for index in 0..<(response?.data?.channelList?.count ?? 0) {
|
||||
let channelN = response?.data?.channelList?[index] ?? ""
|
||||
channels.append("通道"+"\(channelN)")
|
||||
self?.channels.append(Int(channelN) ?? 1)
|
||||
}
|
||||
if let data = response?.data {
|
||||
self?.videos.removeAll()
|
||||
self?.videos.append(contentsOf: data)
|
||||
self?.videos.append(contentsOf: data.realtimeList ?? [])
|
||||
}
|
||||
self?.vehicleMonitorVideoView.categoryView.titles = channels
|
||||
self?.vehicleMonitorVideoView.categoryView.reloadData()
|
||||
@@ -54,69 +104,52 @@ extension VehicleMonitorVideoController {
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
reloadRelay.accept(nil)
|
||||
}
|
||||
}
|
||||
|
||||
extension VehicleMonitorVideoController : JXCategoryViewDelegate {
|
||||
public func categoryView(_ categoryView: JXCategoryBaseView!, didSelectedItemAt index: Int) {
|
||||
closeSub
|
||||
.flatMapLatest {[weak self] response in
|
||||
return RQ.closeHistoryControl(prameters: GetVideoUrlParameters(simNumber: self?.simNumber,channel:self?.channel))
|
||||
}
|
||||
.subscribe(onNext: { response in
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
let vc = children.first as? VehicleMonitoringVideoDetailController
|
||||
vc?.playAssetURL(assetURL: URL(string: videos[index])!)
|
||||
NotificationCenter.default.rx
|
||||
.notification(UIApplication.didEnterBackgroundNotification)
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: { [weak self] _ in
|
||||
self?.videoView.endShow()
|
||||
self?.closeSub.onNext(nil)
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
}
|
||||
}
|
||||
|
||||
open class VehicleMonitorVideoController : ZDViewController {
|
||||
private let vehicleId : Int?
|
||||
private let vehicleMonitorVideoView : VehicleMonitorVideoView
|
||||
private let reloadRelay = ReplayRelay<Any?>.create(bufferSize: 1)
|
||||
private let disposeBag = DisposeBag()
|
||||
private var videos : [String] = []
|
||||
|
||||
public init(vehicleId:Int?) {
|
||||
self.vehicleId = vehicleId
|
||||
self.vehicleMonitorVideoView = VehicleMonitorVideoView()
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
}
|
||||
|
||||
public required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
open override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
dd_navigationItemTitle = "视频监控"
|
||||
dd_navigationBarBackgroundColor = .hex("354683")
|
||||
dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.white(alpha: 0.7),.font:UIFont.mediumFont(17)]
|
||||
dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.white]
|
||||
dd_backBarButtonItem?.tintColor = .hex("000000")
|
||||
|
||||
vehicleMonitorVideoView.categoryView.delegate = self
|
||||
view.addSubview(vehicleMonitorVideoView)
|
||||
vehicleMonitorVideoView.snp.makeConstraints { make in
|
||||
make.top.equalToSuperview().offset(CGRectGetHeight(UIApplication.shared.dd_statusBarFrame)+CGRectGetHeight(navigationController?.navigationBar.frame ?? .zero))
|
||||
make.left.right.bottom.equalToSuperview()
|
||||
open override func viewWillLayoutSubviews() {
|
||||
super.viewWillLayoutSubviews()
|
||||
if vehicleMonitorVideoView.superview == nil {
|
||||
view.addSubview(vehicleMonitorVideoView)
|
||||
vehicleMonitorVideoView.snp.makeConstraints { make in
|
||||
make.top.equalToSuperview().offset(view.safeAreaInsets.top)
|
||||
make.left.right.bottom.equalToSuperview()
|
||||
}
|
||||
|
||||
vehicleMonitorVideoView.addSubview(videoView)
|
||||
videoView.snp.makeConstraints { make in
|
||||
make.top.equalTo(vehicleMonitorVideoView.categoryView.snp.bottom).offset(auto(10))
|
||||
make.width.equalTo(auto(375))
|
||||
make.height.equalTo(auto(300))
|
||||
make.centerX.equalToSuperview()
|
||||
}
|
||||
reloadRelay.accept(nil)
|
||||
}
|
||||
|
||||
let videoDetailVc = VehicleMonitoringVideoDetailController(assetURL: nil)
|
||||
videoDetailVc.dd_navigationBarBackgroundColor = .white
|
||||
videoDetailVc.dd_navigationBarTitleTextAttributes = [.foregroundColor : UIColor.hex("000000"),.font:UIFont.mediumFont(17)]
|
||||
videoDetailVc.dd_navigationBarBarButtonItemAttributes = [.foregroundColor : UIColor.hex("000000")]
|
||||
|
||||
addChild(videoDetailVc)
|
||||
vehicleMonitorVideoView.addSubview(videoDetailVc.view)
|
||||
videoDetailVc.view.snp.makeConstraints { make in
|
||||
make.top.equalTo(vehicleMonitorVideoView.categoryView.snp.bottom).offset(auto(10))
|
||||
make.width.equalTo(auto(375))
|
||||
make.height.equalTo(auto(300))
|
||||
make.centerX.equalToSuperview()
|
||||
}
|
||||
|
||||
addActions()
|
||||
}
|
||||
|
||||
|
||||
open override func dd_backActionPop(_ isAnimated: Bool) {
|
||||
super.dd_backActionPop(isAnimated)
|
||||
videoView.endShow()
|
||||
closeSub.onNext(nil)
|
||||
}
|
||||
|
||||
open override var preferredStatusBarStyle: UIStatusBarStyle {
|
||||
return .default
|
||||
return .lightContent
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -51,15 +51,22 @@ extension VehicleMonitoringController {
|
||||
.do(onNext: {[weak self] _ in
|
||||
self?.view.dd_showHUD()
|
||||
})
|
||||
.flatMapLatest { _ in
|
||||
return Single.zip(RQ.vehicleMonitorList(),RQ.generalInfo())
|
||||
.flatMapLatest {[weak self] _ in
|
||||
guard let self = self else {
|
||||
return Single.just((
|
||||
nil as ResponseModel<VehicleMonitorListDataModel>?,
|
||||
nil as ResponseModel<GeneralInfoDataModel>?,
|
||||
[] as [AlarmListDataModel]
|
||||
))
|
||||
}
|
||||
return Single.zip(RQ.vehicleMonitorList(),RQ.generalInfo(),self.getAllAlarmList(pageNum: 1, alarmList: []))
|
||||
}
|
||||
.observe(on: MainScheduler.instance)
|
||||
.do(onNext: {[weak self] _,_ in
|
||||
.do(onNext: {[weak self] _,_,_ in
|
||||
self?.view.dd_hideHUD()
|
||||
})
|
||||
.observe(on: MainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] response,generalInfo in
|
||||
.subscribe(onNext: {[weak self] response,generalInfo,alarmList in
|
||||
if generalInfo?.success == true {
|
||||
/// 如果list列表数量为0的话就显示当前位置
|
||||
if let lat = generalInfo?.data?.addressLat,let lon = generalInfo?.data?.addressLon {
|
||||
@@ -109,6 +116,22 @@ extension VehicleMonitoringController {
|
||||
self?.vehicleMonitoringView.vehicleMonitoringPannelView.categoryView.reloadData()
|
||||
}
|
||||
|
||||
if alarmList.count > 0 {
|
||||
self?.alarmList = alarmList
|
||||
|
||||
var items : [String] = []
|
||||
for i in 0..<alarmList.count {
|
||||
let item = alarmList[i]
|
||||
items.append((item.alarmTypeString ?? "")+":"+(item.vehicleName ?? ""))
|
||||
}
|
||||
self?.vehicleMonitoringView.offlineView.offLineLabel.items = items
|
||||
self?.vehicleMonitoringView.offlineView.isHidden = false
|
||||
self?.vehicleMonitoringView.alarmView.isHidden = false
|
||||
}else{
|
||||
self?.vehicleMonitoringView.offlineView.isHidden = true
|
||||
self?.vehicleMonitoringView.alarmView.isHidden = true
|
||||
}
|
||||
|
||||
/// 从详情页返回
|
||||
self?.vehicleMonitoringListDetailView.backButton.sendActions(for: .touchUpInside)
|
||||
|
||||
@@ -206,13 +229,7 @@ extension VehicleMonitoringController {
|
||||
.observe(on: MainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] _ in
|
||||
if let view = self?.vehicleConfigView {
|
||||
var isShowMonitoring = false
|
||||
if USER.supplierType == 1 && self?.selectedModel?.terminalType == VehicleMonitorListDataModel.ItemModel.TerminalTypeEnum.GPS.rawValue {
|
||||
isShowMonitoring = true
|
||||
}else{
|
||||
isShowMonitoring = false
|
||||
}
|
||||
view.showMonitoring(isShowMonitoring)
|
||||
view.showMonitoring(false)
|
||||
ENTRY.showVehicleMonitoringConfigEntry(view: view, name: vehicleMonitoringConfigEntry)
|
||||
}
|
||||
})
|
||||
@@ -249,16 +266,24 @@ extension VehicleMonitoringController {
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
vehicleConfigView.videoGes.rx.event
|
||||
.observe(on: MainScheduler.instance)
|
||||
vehicleMonitoringListDetailView.videoButton.rx.tap
|
||||
.observe(on: ConcurrentMainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] _ in
|
||||
ENTRY.dismiss(name: vehicleMonitoringConfigEntry) {[weak self] in
|
||||
let vc = VehicleMonitorVideoController.init(vehicleId: self?.selectedModel?.vehicleId)
|
||||
self?.navigationController?.pushViewController(vc, animated: true)
|
||||
}
|
||||
self?.navigationController?.pushViewController(VehicleMonitorVideoController(vehicleId: self?.selectedModel?.vehicleId,simNumber: self?.selectedModel?.number), animated: true)
|
||||
})
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
vehicleMonitoringView.alarmView.dd_addTapGesture {
|
||||
WEBTOOL.open(name: .vehicleAlarmList, appending: nil)
|
||||
}
|
||||
|
||||
vehicleMonitoringView.offlineView.offLineLabel.onTap = {[weak self] index in
|
||||
let model = self?.alarmList[index]
|
||||
if let code = model?.code {
|
||||
WEBTOOL.open(name: .vehicleAlarmDetail, appending: "&code=\(code)")
|
||||
}
|
||||
}
|
||||
|
||||
vehicleConfigView.logoutGes.rx.event
|
||||
.observe(on: MainScheduler.instance)
|
||||
.subscribe(onNext: {[weak self] _ in
|
||||
@@ -279,6 +304,24 @@ extension VehicleMonitoringController {
|
||||
.disposed(by: disposeBag)
|
||||
|
||||
}
|
||||
|
||||
func getAllAlarmList(pageNum: Int,alarmList: [AlarmListDataModel]) -> Single<[AlarmListDataModel]> {
|
||||
func recursive(pageNum: Int,alarmList: [AlarmListDataModel]) -> Single<[AlarmListDataModel]> {
|
||||
return RQ.alarmList(parameters: AlarmListParameters(pageNum: pageNum, supplierId: USER.supplierId,handStatus: AlarmListParameters.HandStatusEnum.pending.rawValue))
|
||||
.flatMap { response in
|
||||
if (response?.data?.count ?? 0) == 0 {
|
||||
return Single.create { single in
|
||||
single(.success(alarmList))
|
||||
return Disposables.create()
|
||||
}
|
||||
}
|
||||
let addAlarmList = alarmList + (response?.data ?? [])
|
||||
return recursive(pageNum: pageNum + 1, alarmList: addAlarmList)
|
||||
}
|
||||
}
|
||||
|
||||
return recursive(pageNum: 1, alarmList: [])
|
||||
}
|
||||
}
|
||||
|
||||
extension VehicleMonitoringController : DDMAMapViewDelegate {
|
||||
@@ -721,6 +764,13 @@ extension VehicleMonitoringController {
|
||||
|
||||
vehicleMonitoringListDetailView.updateData(taskModels: vehicleModel.taskList ?? [])
|
||||
|
||||
/// 视频按钮显示规则
|
||||
if USER.supplierType == 1 && vehicleModel.terminalType == VehicleMonitorListDataModel.ItemModel.TerminalTypeEnum.GPS.rawValue {
|
||||
vehicleMonitoringListDetailView.videoButton.isHidden = false
|
||||
}else{
|
||||
vehicleMonitoringListDetailView.videoButton.isHidden = true
|
||||
}
|
||||
|
||||
/// 当为max时收回pannelView
|
||||
if pannelPanGes.panGesValue.expandLevel == .max {
|
||||
previousStateOfPannelView = .max
|
||||
@@ -833,6 +883,7 @@ open class VehicleMonitoringController : ZDViewController {
|
||||
private var vehicleLogoutModel : VehicleMonitorListDataModel.ItemModel?
|
||||
private var vehicleLogoutRelay = ReplayRelay<VehicleMonitorListDataModel.ItemModel?>.create(bufferSize: 1)
|
||||
|
||||
private var alarmList : [AlarmListDataModel] = []
|
||||
private let disposeBag = DisposeBag()
|
||||
|
||||
open override func viewDidLoad() {
|
||||
@@ -990,16 +1041,29 @@ open class VehicleMonitoringView : DDView {
|
||||
public let coverView : DDView
|
||||
public let tapGes : UITapGestureRecognizer
|
||||
public let panGes : UIPanGestureRecognizer
|
||||
public let offlineView : VehicleDeviceOffLineView
|
||||
public let alarmView : VehicleAlarmView
|
||||
public init(titles: [String]) {
|
||||
vehicleMonitoringPannelView = VehicleMonitoringPannelView(titles:titles)
|
||||
maMapView = DDMAMapView()
|
||||
coverView = DDView()
|
||||
tapGes = UITapGestureRecognizer()
|
||||
panGes = UIPanGestureRecognizer()
|
||||
offlineView = VehicleDeviceOffLineView()
|
||||
alarmView = VehicleAlarmView()
|
||||
super.init(frame: .zero)
|
||||
|
||||
maMapView.maMapView.isRotateCameraEnabled = false
|
||||
addSubview(maMapView)
|
||||
|
||||
offlineView.offlineIconImageView.image = UIImage(named: "vehicleMonitoring_offline_icon")
|
||||
offlineView.backgroundColor = .dd_hex(light: "FB8958", dark: "FB8958")
|
||||
offlineView.layer.cornerRadius = 6
|
||||
offlineView.isHidden = true
|
||||
addSubview(offlineView)
|
||||
alarmView.imageView.image = UIImage(named: "vehicleMonitoring_alarm")
|
||||
alarmView.isHidden = true
|
||||
addSubview(alarmView)
|
||||
coverView.addGestureRecognizer(tapGes)
|
||||
coverView.addGestureRecognizer(panGes)
|
||||
coverView.isHidden = true
|
||||
@@ -1009,6 +1073,19 @@ open class VehicleMonitoringView : DDView {
|
||||
coverView.snp.makeConstraints { make in
|
||||
make.edges.equalToSuperview()
|
||||
}
|
||||
|
||||
alarmView.snp.makeConstraints { make in
|
||||
make.top.equalToSuperview().offset(10)
|
||||
make.right.equalToSuperview().offset(-10)
|
||||
make.width.height.equalTo(50)
|
||||
}
|
||||
|
||||
offlineView.snp.makeConstraints { make in
|
||||
make.centerY.equalTo(alarmView)
|
||||
make.centerX.equalToSuperview()
|
||||
make.height.equalTo(35)
|
||||
make.width.equalTo(200)
|
||||
}
|
||||
}
|
||||
|
||||
required public init?(coder: NSCoder) {
|
||||
@@ -1016,6 +1093,70 @@ open class VehicleMonitoringView : DDView {
|
||||
}
|
||||
}
|
||||
|
||||
open class VehicleDeviceOffLineView : DDView {
|
||||
let offlineIconImageView : DDImageView
|
||||
let offLineLabel : VerticalLoopScrollLabel
|
||||
public override init(frame: CGRect) {
|
||||
offlineIconImageView = DDImageView()
|
||||
offLineLabel = VerticalLoopScrollLabel()
|
||||
super.init(frame: frame)
|
||||
addSubview(offlineIconImageView)
|
||||
addSubview(offLineLabel)
|
||||
|
||||
offlineIconImageView.snp.makeConstraints { make in
|
||||
make.left.equalTo(15)
|
||||
make.centerY.equalToSuperview()
|
||||
}
|
||||
|
||||
offLineLabel.snp.makeConstraints { make in
|
||||
make.left.equalTo(offlineIconImageView.snp.right).offset(10)
|
||||
make.right.equalToSuperview().offset(-10)
|
||||
make.top.bottom.equalToSuperview().inset(8)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@MainActor required public init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
}
|
||||
|
||||
open class VehicleAlarmView : DDView {
|
||||
public let imageView : DDImageView
|
||||
public let count : DDLabel
|
||||
|
||||
public override init(frame: CGRect) {
|
||||
self.imageView = DDImageView()
|
||||
self.count = DDLabel()
|
||||
super.init(frame: frame)
|
||||
|
||||
addSubview(imageView)
|
||||
count.layer.cornerRadius = 1
|
||||
count.layer.borderColor = UIColor.dd_hex(light: "FFFFFF", dark: "FFFFFF").cgColor
|
||||
count.layer.borderWidth = 0.8
|
||||
count.layer.masksToBounds = true
|
||||
count.backgroundColor = .dd_hex(light: "F93D3D", dark: "F93D3D")
|
||||
count.textColor = .dd_hex(light: "FFFFFF", dark: "FFFFFF")
|
||||
count.font = .dd_systemFont(ofSize: 12, weight: .semibold)
|
||||
addSubview(count)
|
||||
|
||||
imageView.snp.makeConstraints { make in
|
||||
make.centerX.centerY.equalToSuperview()
|
||||
make.width.height.lessThanOrEqualToSuperview()
|
||||
}
|
||||
|
||||
count.snp.makeConstraints { make in
|
||||
make.right.equalTo(imageView.snp.right)
|
||||
make.top.equalTo(imageView.snp.top)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@MainActor required public init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
}
|
||||
|
||||
open class VehicleMonitoringPannelView : DDView {
|
||||
public let radiusView : DDView
|
||||
public let categoryView : JXCategoryNumberView
|
||||
@@ -1212,6 +1353,7 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg
|
||||
public let stateLabel : DDLabel
|
||||
public let vehicleLabel : DDLabel
|
||||
public let settingButton : DDButton
|
||||
public let videoButton : DDButton
|
||||
public let nameLabel : DDLabel
|
||||
public let callButton : DDButton
|
||||
public let containerView : DDView
|
||||
@@ -1235,6 +1377,8 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg
|
||||
vehicleLabel = DDLabel.dd_init(withText: "", font: .mediumFont(auto(14)), textColor: .hex("11142F"))
|
||||
settingButton = DDButton.dd_initCustom()
|
||||
settingButton.setBackgroundImage(UIImage(named: "vehicleMonitoring_setting"), for: .normal)
|
||||
videoButton = DDButton.dd_initCustom()
|
||||
videoButton.setBackgroundImage(UIImage(named: "vehicleMonitoring_video_icon"), for: .normal)
|
||||
nameLabel = DDLabel.dd_init(withText: "", font: .regularFont(auto(14)), textColor: .hex("11142F"))
|
||||
callButton = DDButton.dd_initCustom()
|
||||
callButton.setBackgroundImage(UIImage(named: "vehicleMonitor_call_cell"), for: .normal)
|
||||
@@ -1258,6 +1402,7 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg
|
||||
icon.addSubview(stateLabel)
|
||||
addSubview(vehicleLabel)
|
||||
addSubview(settingButton)
|
||||
addSubview(videoButton)
|
||||
addSubview(nameLabel)
|
||||
addSubview(callButton)
|
||||
|
||||
@@ -1339,12 +1484,19 @@ class VehicleMonitoringListDetailView : DDView, JXCategoryListContainerViewDeleg
|
||||
}
|
||||
|
||||
settingButton.snp.makeConstraints { make in
|
||||
make.left.equalTo(vehicleLabel.snp.right).offset(auto(2.5))
|
||||
make.left.equalTo(vehicleLabel.snp.right).offset(auto(5))
|
||||
make.centerY.equalTo(icon)
|
||||
make.width.equalTo(auto(16))
|
||||
make.height.equalTo(auto(14))
|
||||
}
|
||||
|
||||
videoButton.snp.makeConstraints { make in
|
||||
make.left.equalTo(settingButton.snp.right).offset(auto(10))
|
||||
make.centerY.equalTo(icon)
|
||||
make.width.equalTo(auto(23))
|
||||
make.height.equalTo(auto(13))
|
||||
}
|
||||
|
||||
callButton.snp.makeConstraints { make in
|
||||
make.right.equalTo(-auto(20))
|
||||
make.centerY.equalTo(backButton)
|
||||
|
||||
31
OrderScheduling/Video/Video/view/VideoPlayView.h
Normal file
@@ -0,0 +1,31 @@
|
||||
//
|
||||
// WWVideoReplayView.h
|
||||
// wanwayInternet
|
||||
//made in zhongdao Copyright © 2020 liuchao. All rights reserved.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
#import "AAPLEAGLLayer.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
UIKIT_EXTERN NSString *WWCarVideoReplayLastChannel;
|
||||
|
||||
@protocol WWVideoReplayViewDelegate <NSObject>
|
||||
|
||||
- (void)viedoReplayView:(UIView *)videoView fullScreenAction:(BOOL)fullScreen;
|
||||
|
||||
@end
|
||||
|
||||
@interface VideoPlayView : UIView
|
||||
|
||||
@property (nonatomic,strong)AAPLEAGLLayer *playLayer;
|
||||
@property (nonatomic, weak) id<WWVideoReplayViewDelegate> repalyDelegate;
|
||||
@property (nonatomic, strong) NSString *wsUrl;
|
||||
@property (nonatomic, assign) BOOL isPlaying;
|
||||
|
||||
- (void)beginShow;
|
||||
- (void)endShow;
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
314
OrderScheduling/Video/Video/view/VideoPlayView.m
Normal file
@@ -0,0 +1,314 @@
|
||||
//
|
||||
// WWVideoReplayView.m
|
||||
// wanwayInternet
|
||||
//made in zhongdao Copyright © 2020 liuchao. All rights reserved.
|
||||
//
|
||||
|
||||
#import "VideoPlayView.h"
|
||||
#import "SRWebSocket.h"
|
||||
#import "H264DecodeTool.h"
|
||||
#import "g726.h"
|
||||
#import "g711.h"
|
||||
#import "PCMStreamPlayer.h"
|
||||
#import "YFTimerManager.h"
|
||||
#import "YFProgressHUD.h"
|
||||
|
||||
@interface VideoPlayView ()<SRWebSocketDelegate, H264DecodeFrameCallbackDelegate, NSStreamDelegate,YFTimerDelegate>
|
||||
|
||||
@property (nonatomic,strong) SRWebSocket *websocket;
|
||||
@property (nonatomic,strong) NSMutableData *receivedVideoData;
|
||||
@property (nonatomic,strong) H264DecodeTool *h264Decoder;
|
||||
@property (nonatomic, strong) PCMStreamPlayer *pcmPlayer;
|
||||
@property (nonatomic, assign,getter=isStopPlayBuffer) BOOL stopPlayBuffer;
|
||||
@property (nonatomic, strong) UIButton *playBtn;
|
||||
@property (nonatomic, strong) YFProgressHUD *hud;
|
||||
@end
|
||||
|
||||
@implementation VideoPlayView {
|
||||
g726_state_t *m_state726;
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame {
|
||||
if (self = [super initWithFrame:frame]) {
|
||||
self.backgroundColor = [UIColor blackColor];
|
||||
|
||||
//g726 to pcm
|
||||
m_state726 = (g726_state_t *)malloc(sizeof(g726_state_t));
|
||||
m_state726 = g726_init(m_state726, 8000*5);//2-16kBits 3-24kBits 4-32kBits 5-40kBits
|
||||
_receivedVideoData = [NSMutableData data];
|
||||
|
||||
//操作按钮
|
||||
[self addSubview:self.playBtn];
|
||||
|
||||
[self addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(endShow)]];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)layoutSubviews {
|
||||
[super layoutSubviews];
|
||||
self.playBtn.center = CGPointMake(CGRectGetWidth(self.bounds)/2, CGRectGetHeight(self.bounds)/2);
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
NSLog(@"***** video replay view dealloc");
|
||||
[self stopPlay];
|
||||
_pcmPlayer = nil;
|
||||
_playLayer = nil;
|
||||
[self.websocket close];
|
||||
}
|
||||
|
||||
|
||||
//全屏播放
|
||||
- (void)repalyFullScreenBtnTouchAction:(UIButton *)btn {
|
||||
btn.selected = !btn.selected;
|
||||
if (self.repalyDelegate && [self.repalyDelegate respondsToSelector:@selector(viedoReplayView:fullScreenAction:)]) {
|
||||
[self.repalyDelegate viedoReplayView:self fullScreenAction:btn.isSelected];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)endShow {
|
||||
if (self.hud) {
|
||||
[YFProgressHUD hiddenProgressHUDforView:self];
|
||||
self.hud = nil;
|
||||
}
|
||||
|
||||
self.playBtn.hidden = NO;
|
||||
[self stopPlay];
|
||||
|
||||
self.isPlaying = NO;
|
||||
}
|
||||
|
||||
- (void)stopPlay {
|
||||
|
||||
[YFTimerManager deleteTimerDelegate:self forTimeInterval:5.0];
|
||||
|
||||
if (_websocket != nil && _websocket.readyState == SR_OPEN) {
|
||||
[_websocket close];
|
||||
|
||||
}
|
||||
if (_websocket != nil) {
|
||||
_websocket = nil;
|
||||
}
|
||||
|
||||
if (self.pcmPlayer) { [self.pcmPlayer resetPlay]; }
|
||||
|
||||
if (_playLayer) {
|
||||
[_playLayer resetRenderBuffer];
|
||||
[_playLayer cleanUpTextures];
|
||||
self.stopPlayBuffer = YES;
|
||||
}
|
||||
|
||||
if (_receivedVideoData.length > 0) {
|
||||
[_receivedVideoData resetBytesInRange:NSMakeRange(0, self.receivedVideoData.length)];
|
||||
_receivedVideoData.length = 0;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)beginShow {
|
||||
[self replayBtnTouchWith:self.wsUrl];
|
||||
}
|
||||
|
||||
- (void)replayBtnTouchWith:(NSString *)wsUrl {
|
||||
|
||||
if ([wsUrl isKindOfClass:[NSString class]] && wsUrl.length > 0) {
|
||||
self.stopPlayBuffer = NO;
|
||||
self.playBtn.hidden = YES;
|
||||
[self videoShowWithUrl:wsUrl];
|
||||
self.isPlaying = YES;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
- (void)videoShowWithUrl:(NSString *)url {
|
||||
NSMutableURLRequest *req = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:url]];
|
||||
_websocket = [[SRWebSocket alloc] initWithURLRequest:req];
|
||||
_websocket.delegate = self;
|
||||
|
||||
if (self.pcmPlayer) {
|
||||
[self.pcmPlayer resetPlay];
|
||||
}
|
||||
|
||||
[self.playLayer resetRenderBuffer];
|
||||
[_websocket open];
|
||||
self.hud = [YFProgressHUD showProgressHUDinView:self title:@"努力加载视频中!"];
|
||||
|
||||
}
|
||||
|
||||
//发送心跳包
|
||||
-(void)toDoThingsWhenTimeCome:(NSTimeInterval)interval{
|
||||
|
||||
if (interval == 5.0) {
|
||||
if (self.websocket != nil && self.websocket.readyState == SR_OPEN) {
|
||||
NSLog(@"********* websocket send state %ld", self.websocket.readyState);
|
||||
[self.websocket send:@"0"];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#pragma mark ================ SRWebSocketDelegate =======================
|
||||
- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message {
|
||||
NSLog(@"收到数据了******* %ld %@ %@",webSocket.readyState, [message class], message);
|
||||
|
||||
if ([message isKindOfClass:[NSData class]] == NO) { return; }
|
||||
|
||||
NSData *data = [NSData dataWithData:message];
|
||||
// NSString *aString = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
|
||||
// NSLog(@"\n\n%@\n\n",aString)
|
||||
|
||||
|
||||
if (data.length < 24) { return; }
|
||||
//NSLog(@" --- begin --- %@", data);
|
||||
|
||||
if (self.hud) {
|
||||
[YFProgressHUD hiddenProgressHUDforView:self];
|
||||
self.hud = nil;
|
||||
}
|
||||
|
||||
//sim 卡号在收到的包头里。对讲用到
|
||||
// self.speakSimCardData = [data subdataWithRange:NSMakeRange(8, 6)];
|
||||
|
||||
__weak typeof(self) weakself = self;
|
||||
// g726_state_t *weak_m_state726 = m_state726;
|
||||
|
||||
NSInteger dataLength = data.length;
|
||||
NSInteger dataOffset = 0;
|
||||
|
||||
while (dataOffset < dataLength) {
|
||||
|
||||
//跳过5个字节没用
|
||||
NSData *typeData = [data subdataWithRange:NSMakeRange(dataOffset + 5, 1)]; //62 -> 0110 0010 -> 第1位,和后7位
|
||||
const Byte *byteData = (Byte *)[typeData bytes];
|
||||
|
||||
Byte ptByte = byteData[0];
|
||||
int isComplete = ptByte >> 7; //标志位,是否是完整数据帧边界,根据这个来拼接
|
||||
int loadType = ptByte & 0x7f; //98位视频数据,否则为音频数据
|
||||
|
||||
NSLog(@"%@ -- %d %d", typeData, isComplete, loadType);
|
||||
|
||||
if (loadType == 98) {
|
||||
NSData *videoSizeData = [data subdataWithRange:NSMakeRange(dataOffset + 28, 2)];
|
||||
const Byte *sizeBytes = (Byte *)[videoSizeData bytes];
|
||||
int size = (sizeBytes[0] & 0xff) * 16 * 16 + (sizeBytes[1] & 0xff);
|
||||
NSData *videoPerData = [data subdataWithRange:NSMakeRange(dataOffset + 30, size)];
|
||||
|
||||
//NSLog(@"---- one data %@", videoPerData);
|
||||
[weakself.receivedVideoData appendData:videoPerData]; //61 0110 0001
|
||||
|
||||
dataOffset += 30 + size;
|
||||
|
||||
//NSLog(@"size -- %@ %d %ld ",videoSizeData, size, dataOffset);
|
||||
if (isComplete > 0) {
|
||||
//NSLog(@"---- all data %@", self.receivedVideoData);
|
||||
[weakself.h264Decoder decodeNalu:(uint8_t *)[weakself.receivedVideoData bytes] size:(uint32_t)weakself.receivedVideoData.length alive:NO];
|
||||
[weakself.receivedVideoData resetBytesInRange:NSMakeRange(0, weakself.receivedVideoData.length)];
|
||||
weakself.receivedVideoData.length = 0;
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
NSData *audioSizeData = [data subdataWithRange:NSMakeRange(dataOffset + 24, 2)];
|
||||
const Byte *sizeBytes = (Byte *)[audioSizeData bytes];
|
||||
int size = (sizeBytes[0] & 0xff) * 16 * 16 + (sizeBytes[1] & 0xff);//[self intFromData:videoSizeData];
|
||||
NSData *audioData = [data subdataWithRange:NSMakeRange(dataOffset + 26, size)];
|
||||
NSLog(@"---- audio = %d %ld-%d", loadType, dataLength,size);
|
||||
dataOffset += 26 + size;
|
||||
//[self.fileHandle writeData:audioData];
|
||||
|
||||
//G726 40k码率 8000采样频率 5bit采样位数
|
||||
//86 -- 1000 0110 -- loadType=6-G711A 8-G726
|
||||
|
||||
//G726 转码 pcm
|
||||
//ffplay -f s16le -ac 1 -ar 8000 a.pcm
|
||||
//ffplay -f g726le -ar 8000 -ac 1 -code_size 5 -i test.g726
|
||||
int outLen = size*6, iRet = 0;
|
||||
short *outBuffer = (short *)malloc(outLen);
|
||||
unsigned char *audioDataBuffer = (unsigned char *)audioData.bytes;
|
||||
|
||||
if (loadType == 6) {
|
||||
// audio = 6 186-160
|
||||
//iRet = 0, out = 320, {length = 320, bytes = 0x008a008e 00a600d9 ... 0801002d 005a007a }
|
||||
outLen = g711_decode(outBuffer, &outLen, audioDataBuffer, size, TP_ALAW);
|
||||
} else if (loadType == 7) {
|
||||
outLen = g711_decode(outBuffer, &outLen, audioDataBuffer, size, TP_ULAW);
|
||||
} else {
|
||||
//audio = 8 126-100
|
||||
//iRet = 160, out = 320, {length = 320, bytes = 0x0000fcff 1c00f4ff ... 4816c457 0c12a8da }
|
||||
iRet = g726_decode(self->m_state726, outBuffer, audioDataBuffer, size);
|
||||
outLen = iRet*2;
|
||||
}
|
||||
|
||||
//不播放声音
|
||||
if (weakself.pcmPlayer) {
|
||||
[weakself.pcmPlayer playWithData:(Byte *)outBuffer size:outLen];
|
||||
}
|
||||
free(outBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
//NSLog(@" --- end --- ");
|
||||
}
|
||||
|
||||
- (void)webSocketDidOpen:(SRWebSocket *)webSocket {
|
||||
[YFTimerManager addTimerDelegate:self forTimeInterval:5.0];
|
||||
}
|
||||
|
||||
- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error {
|
||||
if (self.hud) {
|
||||
[YFProgressHUD hiddenProgressHUDforView:self];
|
||||
self.hud = nil;
|
||||
}
|
||||
[YFTimerManager deleteTimerDelegate:self forTimeInterval:5.0];
|
||||
NSLog(@"*********** websocket error %@", error);
|
||||
[YFProgressHUD showToastTitle:(@"视频播放失败,请重试!")];
|
||||
[self endShow];
|
||||
|
||||
}
|
||||
|
||||
#pragma mark ================ H264DecodeFrameCallbackDelegate =======================
|
||||
- (void)gotDecodedFrame:(CVImageBufferRef)imageBuffer {
|
||||
if(imageBuffer) {
|
||||
//解码回来的数据绘制播放
|
||||
if (!self.isStopPlayBuffer) {
|
||||
self.playLayer.pixelBuffer = imageBuffer;
|
||||
CVPixelBufferRelease(imageBuffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - getter
|
||||
|
||||
- (AAPLEAGLLayer *)playLayer {
|
||||
if (_playLayer == nil) {
|
||||
_playLayer = [[AAPLEAGLLayer alloc] initWithFrame:self.bounds];
|
||||
[self.layer insertSublayer:_playLayer atIndex:0];
|
||||
}
|
||||
return _playLayer;;
|
||||
}
|
||||
|
||||
|
||||
- (H264DecodeTool *)h264Decoder {
|
||||
if (_h264Decoder == nil) {
|
||||
_h264Decoder = [[H264DecodeTool alloc] init];
|
||||
_h264Decoder.delegate = self;
|
||||
}
|
||||
return _h264Decoder;
|
||||
}
|
||||
|
||||
- (PCMStreamPlayer *)pcmPlayer {
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (UIButton *)playBtn{
|
||||
if (!_playBtn) {
|
||||
_playBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 80, 80)];
|
||||
_playBtn.center = self.center;
|
||||
[_playBtn setImage:[UIImage imageNamed:@"ww_video_paly"] forState:UIControlStateNormal];
|
||||
[_playBtn addTarget:self action:@selector(beginShow) forControlEvents:UIControlEventTouchUpInside];
|
||||
}
|
||||
return _playBtn;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
20
OrderScheduling/Video/VideoTools/AAPLEAGLLayer.h
Executable file
@@ -0,0 +1,20 @@
|
||||
/*
|
||||
Copyright (C) 2014 Apple Inc. All Rights Reserved.
|
||||
See LICENSE.txt for this sample’s licensing information
|
||||
|
||||
Abstract:
|
||||
|
||||
This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner.
|
||||
|
||||
*/
|
||||
|
||||
//@import QuartzCore;
|
||||
#include <QuartzCore/QuartzCore.h>
|
||||
#include <CoreVideo/CoreVideo.h>
|
||||
|
||||
@interface AAPLEAGLLayer : CAEAGLLayer
|
||||
@property CVPixelBufferRef pixelBuffer;
|
||||
- (id)initWithFrame:(CGRect)frame;
|
||||
- (void)resetRenderBuffer;
|
||||
- (void) cleanUpTextures;
|
||||
@end
|
||||
595
OrderScheduling/Video/VideoTools/AAPLEAGLLayer.m
Executable file
@@ -0,0 +1,595 @@
|
||||
/*
|
||||
Copyright (C) 2014 Apple Inc. All Rights Reserved.
|
||||
See LICENSE.txt for this sample’s licensing information
|
||||
|
||||
Abstract:
|
||||
|
||||
This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner.
|
||||
|
||||
*/
|
||||
|
||||
#import "AAPLEAGLLayer.h"
|
||||
|
||||
#import <AVFoundation/AVUtilities.h>
|
||||
#import <mach/mach_time.h>
|
||||
#include <AVFoundation/AVFoundation.h>
|
||||
#import <UIKit/UIScreen.h>
|
||||
#include <OpenGLES/EAGL.h>
|
||||
#include <OpenGLES/ES2/gl.h>
|
||||
#include <OpenGLES/ES2/glext.h>
|
||||
#include <CoreFoundation/CoreFoundation.h>
|
||||
#include <CoreFoundation/CFString.h>
|
||||
|
||||
// Uniform index.
|
||||
enum
|
||||
{
|
||||
UNIFORM_Y,
|
||||
UNIFORM_UV,
|
||||
UNIFORM_ROTATION_ANGLE,
|
||||
UNIFORM_COLOR_CONVERSION_MATRIX,
|
||||
NUM_UNIFORMS
|
||||
};
|
||||
GLint uniforms[NUM_UNIFORMS];
|
||||
|
||||
// Attribute index.
|
||||
enum
|
||||
{
|
||||
ATTRIB_VERTEX,
|
||||
ATTRIB_TEXCOORD,
|
||||
NUM_ATTRIBUTES
|
||||
};
|
||||
|
||||
// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)
|
||||
|
||||
// BT.601, which is the standard for SDTV.
|
||||
static const GLfloat kColorConversion601[] = {
|
||||
1.164, 1.164, 1.164,
|
||||
0.0, -0.392, 2.017,
|
||||
1.596, -0.813, 0.0,
|
||||
};
|
||||
|
||||
// BT.709, which is the standard for HDTV.
|
||||
static const GLfloat kColorConversion709[] = {
|
||||
1.164, 1.164, 1.164,
|
||||
0.0, -0.213, 2.112,
|
||||
1.793, -0.533, 0.0,
|
||||
};
|
||||
|
||||
|
||||
|
||||
@interface AAPLEAGLLayer ()
|
||||
{
|
||||
// The pixel dimensions of the CAEAGLLayer.
|
||||
GLint _backingWidth;
|
||||
GLint _backingHeight;
|
||||
|
||||
EAGLContext *_context;
|
||||
CVOpenGLESTextureRef _lumaTexture;
|
||||
CVOpenGLESTextureRef _chromaTexture;
|
||||
|
||||
GLuint _frameBufferHandle;
|
||||
GLuint _colorBufferHandle;
|
||||
|
||||
const GLfloat *_preferredConversion;
|
||||
}
|
||||
@property GLuint program;
|
||||
|
||||
@end
|
||||
@implementation AAPLEAGLLayer
|
||||
@synthesize pixelBuffer = _pixelBuffer;
|
||||
|
||||
-(CVPixelBufferRef) pixelBuffer
|
||||
{
|
||||
return _pixelBuffer;
|
||||
}
|
||||
|
||||
- (void)setPixelBuffer:(CVPixelBufferRef)pb
|
||||
{
|
||||
if(_pixelBuffer) {
|
||||
CVPixelBufferRelease(_pixelBuffer);
|
||||
}
|
||||
_pixelBuffer = CVPixelBufferRetain(pb);
|
||||
|
||||
int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer);
|
||||
int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer);
|
||||
[self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
CGFloat scale = [[UIScreen mainScreen] scale];
|
||||
self.contentsScale = scale;
|
||||
|
||||
self.opaque = TRUE;
|
||||
self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]};
|
||||
|
||||
[self setFrame:frame];
|
||||
|
||||
// Set the context into which the frames will be drawn.
|
||||
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
|
||||
|
||||
if (!_context) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
// Set the default conversion to BT.709, which is the standard for HDTV.
|
||||
_preferredConversion = kColorConversion709;
|
||||
|
||||
[self setupGL];
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight
|
||||
{
|
||||
if (!_context || ![EAGLContext setCurrentContext:_context]) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(pixelBuffer == NULL) {
|
||||
NSLog(@"Pixel buffer is null");
|
||||
return;
|
||||
}
|
||||
|
||||
CVReturn err;
|
||||
|
||||
size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
|
||||
|
||||
/*
|
||||
Use the color attachment of the pixel buffer to determine the appropriate color conversion matrix.
|
||||
*/
|
||||
CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
|
||||
if ( CFStringCompare((CFStringRef)colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) {
|
||||
_preferredConversion = kColorConversion601;
|
||||
}
|
||||
else {
|
||||
_preferredConversion = kColorConversion709;
|
||||
}
|
||||
|
||||
/*
|
||||
CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef.
|
||||
*/
|
||||
|
||||
/*
|
||||
Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane.
|
||||
*/
|
||||
|
||||
CVOpenGLESTextureCacheRef _videoTextureCache;
|
||||
|
||||
// Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
|
||||
err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache);
|
||||
if (err != noErr) {
|
||||
NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
|
||||
return;
|
||||
}
|
||||
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
|
||||
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
|
||||
_videoTextureCache,
|
||||
pixelBuffer,
|
||||
NULL,
|
||||
GL_TEXTURE_2D,
|
||||
GL_RED_EXT,
|
||||
frameWidth,
|
||||
frameHeight,
|
||||
GL_RED_EXT,
|
||||
GL_UNSIGNED_BYTE,
|
||||
0,
|
||||
&_lumaTexture);
|
||||
if (err) {
|
||||
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
|
||||
}
|
||||
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
|
||||
if(planeCount == 2) {
|
||||
// UV-plane.
|
||||
glActiveTexture(GL_TEXTURE1);
|
||||
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
|
||||
_videoTextureCache,
|
||||
pixelBuffer,
|
||||
NULL,
|
||||
GL_TEXTURE_2D,
|
||||
GL_RG_EXT,
|
||||
frameWidth / 2,
|
||||
frameHeight / 2,
|
||||
GL_RG_EXT,
|
||||
GL_UNSIGNED_BYTE,
|
||||
1,
|
||||
&_chromaTexture);
|
||||
if (err) {
|
||||
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
|
||||
}
|
||||
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
|
||||
|
||||
// Set the view port to the entire view.
|
||||
glViewport(0, 0, _backingWidth, _backingHeight);
|
||||
|
||||
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
// Use shader program.
|
||||
glUseProgram(self.program);
|
||||
// glUniform1f(uniforms[UNIFORM_LUMA_THRESHOLD], 1);
|
||||
// glUniform1f(uniforms[UNIFORM_CHROMA_THRESHOLD], 1);
|
||||
glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
|
||||
glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
|
||||
|
||||
// Set up the quad vertices with respect to the orientation and aspect ratio of the video.
|
||||
CGRect viewBounds = self.bounds;
|
||||
CGSize contentSize = CGSizeMake(frameWidth, frameHeight);
|
||||
CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds);
|
||||
|
||||
// Compute normalized quad coordinates to draw the frame into.
|
||||
CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0);
|
||||
CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width,
|
||||
vertexSamplingRect.size.height/viewBounds.size.height);
|
||||
|
||||
// Normalize the quad vertices.
|
||||
if (cropScaleAmount.width > cropScaleAmount.height) {
|
||||
normalizedSamplingSize.width = 1.0;
|
||||
normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width;
|
||||
}
|
||||
else {
|
||||
normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height;
|
||||
normalizedSamplingSize.height = 1.0;;
|
||||
}
|
||||
|
||||
/*
|
||||
The quad vertex data defines the region of 2D plane onto which we draw our pixel buffers.
|
||||
Vertex data formed using (-1,-1) and (1,1) as the bottom left and top right coordinates respectively, covers the entire screen.
|
||||
*/
|
||||
GLfloat quadVertexData [] = {
|
||||
(GLfloat)(-1 * normalizedSamplingSize.width), (GLfloat)(-1 * normalizedSamplingSize.height),
|
||||
(GLfloat)normalizedSamplingSize.width, (GLfloat)(-1 * normalizedSamplingSize.height),
|
||||
(GLfloat)(-1 * normalizedSamplingSize.width), (GLfloat)normalizedSamplingSize.height,
|
||||
(GLfloat)normalizedSamplingSize.width, (GLfloat)normalizedSamplingSize.height,
|
||||
};
|
||||
|
||||
// Update attribute values.
|
||||
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData);
|
||||
glEnableVertexAttribArray(ATTRIB_VERTEX);
|
||||
|
||||
/*
|
||||
The texture vertices are set up such that we flip the texture vertically. This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system.
|
||||
*/
|
||||
CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1);
|
||||
GLfloat quadTextureData[] = {
|
||||
(GLfloat)CGRectGetMinX(textureSamplingRect), (GLfloat)CGRectGetMaxY(textureSamplingRect),
|
||||
(GLfloat)CGRectGetMaxX(textureSamplingRect), (GLfloat)CGRectGetMaxY(textureSamplingRect),
|
||||
(GLfloat)CGRectGetMinX(textureSamplingRect), (GLfloat)CGRectGetMinY(textureSamplingRect),
|
||||
(GLfloat)CGRectGetMaxX(textureSamplingRect), (GLfloat)CGRectGetMinY(textureSamplingRect)
|
||||
};
|
||||
|
||||
glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData);
|
||||
glEnableVertexAttribArray(ATTRIB_TEXCOORD);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
|
||||
[_context presentRenderbuffer:GL_RENDERBUFFER];
|
||||
|
||||
[self cleanUpTextures];
|
||||
// Periodic texture cache flush every frame
|
||||
CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
|
||||
|
||||
if(_videoTextureCache) {
|
||||
CFRelease(_videoTextureCache);
|
||||
}
|
||||
}
|
||||
|
||||
# pragma mark - OpenGL setup
|
||||
|
||||
- (void)setupGL
|
||||
{
|
||||
if (!_context || ![EAGLContext setCurrentContext:_context]) {
|
||||
return;
|
||||
}
|
||||
|
||||
[self setupBuffers];
|
||||
[self loadShaders];
|
||||
|
||||
glUseProgram(self.program);
|
||||
|
||||
// 0 and 1 are the texture IDs of _lumaTexture and _chromaTexture respectively.
|
||||
glUniform1i(uniforms[UNIFORM_Y], 0);
|
||||
glUniform1i(uniforms[UNIFORM_UV], 1);
|
||||
glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
|
||||
glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
|
||||
}
|
||||
|
||||
#pragma mark - Utilities
|
||||
|
||||
- (void)setupBuffers
|
||||
{
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
|
||||
glEnableVertexAttribArray(ATTRIB_VERTEX);
|
||||
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
|
||||
|
||||
glEnableVertexAttribArray(ATTRIB_TEXCOORD);
|
||||
glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
|
||||
|
||||
[self createBuffers];
|
||||
}
|
||||
|
||||
- (void) createBuffers
|
||||
{
|
||||
glGenFramebuffers(1, &_frameBufferHandle);
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
|
||||
|
||||
glGenRenderbuffers(1, &_colorBufferHandle);
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
|
||||
|
||||
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self];
|
||||
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
|
||||
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
|
||||
|
||||
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle);
|
||||
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
|
||||
NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
|
||||
}
|
||||
}
|
||||
|
||||
- (void) releaseBuffers
|
||||
{
|
||||
if(_frameBufferHandle) {
|
||||
glDeleteFramebuffers(1, &_frameBufferHandle);
|
||||
_frameBufferHandle = 0;
|
||||
}
|
||||
|
||||
if(_colorBufferHandle) {
|
||||
glDeleteRenderbuffers(1, &_colorBufferHandle);
|
||||
_colorBufferHandle = 0;
|
||||
}
|
||||
}
|
||||
|
||||
- (void) resetRenderBuffer
|
||||
{
|
||||
if (!_context || ![EAGLContext setCurrentContext:_context]) {
|
||||
return;
|
||||
}
|
||||
|
||||
[self releaseBuffers];
|
||||
[self createBuffers];
|
||||
}
|
||||
|
||||
- (void) cleanUpTextures
|
||||
{
|
||||
if (_lumaTexture) {
|
||||
CFRelease(_lumaTexture);
|
||||
_lumaTexture = NULL;
|
||||
}
|
||||
|
||||
if (_chromaTexture) {
|
||||
CFRelease(_chromaTexture);
|
||||
_chromaTexture = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - OpenGL ES 2 shader compilation
|
||||
|
||||
const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;"
|
||||
"precision mediump float;"
|
||||
"uniform sampler2D SamplerY;"
|
||||
"uniform sampler2D SamplerUV;"
|
||||
"uniform mat3 colorConversionMatrix;"
|
||||
"void main()"
|
||||
"{"
|
||||
" mediump vec3 yuv;"
|
||||
" lowp vec3 rgb;"
|
||||
// Subtract constants to map the video range start at 0
|
||||
" yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));"
|
||||
" yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));"
|
||||
" rgb = colorConversionMatrix * yuv;"
|
||||
" gl_FragColor = vec4(rgb, 1);"
|
||||
"}";
|
||||
|
||||
const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;"
|
||||
"attribute vec2 texCoord;"
|
||||
"uniform float preferredRotation;"
|
||||
"varying vec2 texCoordVarying;"
|
||||
"void main()"
|
||||
"{"
|
||||
" mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0,"
|
||||
" sin(preferredRotation), cos(preferredRotation), 0.0, 0.0,"
|
||||
" 0.0, 0.0, 1.0, 0.0,"
|
||||
" 0.0, 0.0, 0.0, 1.0);"
|
||||
" gl_Position = position * rotationMatrix;"
|
||||
" texCoordVarying = texCoord;"
|
||||
"}";
|
||||
|
||||
- (BOOL)loadShaders
|
||||
{
|
||||
GLuint vertShader = 0, fragShader = 0;
|
||||
|
||||
// Create the shader program.
|
||||
self.program = glCreateProgram();
|
||||
|
||||
if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) {
|
||||
NSLog(@"Failed to compile vertex shader");
|
||||
return NO;
|
||||
}
|
||||
|
||||
if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) {
|
||||
NSLog(@"Failed to compile fragment shader");
|
||||
return NO;
|
||||
}
|
||||
|
||||
// Attach vertex shader to program.
|
||||
glAttachShader(self.program, vertShader);
|
||||
|
||||
// Attach fragment shader to program.
|
||||
glAttachShader(self.program, fragShader);
|
||||
|
||||
// Bind attribute locations. This needs to be done prior to linking.
|
||||
glBindAttribLocation(self.program, ATTRIB_VERTEX, "position");
|
||||
glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord");
|
||||
|
||||
// Link the program.
|
||||
if (![self linkProgram:self.program]) {
|
||||
NSLog(@"Failed to link program: %d", self.program);
|
||||
|
||||
if (vertShader) {
|
||||
glDeleteShader(vertShader);
|
||||
vertShader = 0;
|
||||
}
|
||||
if (fragShader) {
|
||||
glDeleteShader(fragShader);
|
||||
fragShader = 0;
|
||||
}
|
||||
if (self.program) {
|
||||
glDeleteProgram(self.program);
|
||||
self.program = 0;
|
||||
}
|
||||
|
||||
return NO;
|
||||
}
|
||||
|
||||
// Get uniform locations.
|
||||
uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY");
|
||||
uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV");
|
||||
// uniforms[UNIFORM_LUMA_THRESHOLD] = glGetUniformLocation(self.program, "lumaThreshold");
|
||||
// uniforms[UNIFORM_CHROMA_THRESHOLD] = glGetUniformLocation(self.program, "chromaThreshold");
|
||||
uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation");
|
||||
uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix");
|
||||
|
||||
// Release vertex and fragment shaders.
|
||||
if (vertShader) {
|
||||
glDetachShader(self.program, vertShader);
|
||||
glDeleteShader(vertShader);
|
||||
}
|
||||
if (fragShader) {
|
||||
glDetachShader(self.program, fragShader);
|
||||
glDeleteShader(fragShader);
|
||||
}
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString
|
||||
{
|
||||
*shader = glCreateShader(type);
|
||||
glShaderSource(*shader, 1, &shaderString, NULL);
|
||||
glCompileShader(*shader);
|
||||
|
||||
#if defined(DEBUG)
|
||||
GLint logLength;
|
||||
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
|
||||
if (logLength > 0) {
|
||||
GLchar *log = (GLchar *)malloc(logLength);
|
||||
glGetShaderInfoLog(*shader, logLength, &logLength, log);
|
||||
NSLog(@"Shader compile log:\n%s", log);
|
||||
free(log);
|
||||
}
|
||||
#endif
|
||||
|
||||
GLint status = 0;
|
||||
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
|
||||
if (status == 0) {
|
||||
glDeleteShader(*shader);
|
||||
return NO;
|
||||
}
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL
|
||||
{
|
||||
NSError *error;
|
||||
NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error];
|
||||
if (sourceString == nil) {
|
||||
NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]);
|
||||
return NO;
|
||||
}
|
||||
|
||||
const GLchar *source = (GLchar *)[sourceString UTF8String];
|
||||
|
||||
return [self compileShaderString:shader type:type shaderString:source];
|
||||
}
|
||||
|
||||
- (BOOL)linkProgram:(GLuint)prog
|
||||
{
|
||||
GLint status;
|
||||
glLinkProgram(prog);
|
||||
|
||||
#if defined(DEBUG)
|
||||
GLint logLength;
|
||||
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
|
||||
if (logLength > 0) {
|
||||
GLchar *log = (GLchar *)malloc(logLength);
|
||||
glGetProgramInfoLog(prog, logLength, &logLength, log);
|
||||
NSLog(@"Program link log:\n%s", log);
|
||||
free(log);
|
||||
}
|
||||
#endif
|
||||
|
||||
glGetProgramiv(prog, GL_LINK_STATUS, &status);
|
||||
if (status == 0) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)validateProgram:(GLuint)prog
|
||||
{
|
||||
GLint logLength, status;
|
||||
|
||||
glValidateProgram(prog);
|
||||
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
|
||||
if (logLength > 0) {
|
||||
GLchar *log = (GLchar *)malloc(logLength);
|
||||
glGetProgramInfoLog(prog, logLength, &logLength, log);
|
||||
NSLog(@"Program validate log:\n%s", log);
|
||||
free(log);
|
||||
}
|
||||
|
||||
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
|
||||
if (status == 0) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
if (!_context || ![EAGLContext setCurrentContext:_context]) {
|
||||
return;
|
||||
}
|
||||
|
||||
[self cleanUpTextures];
|
||||
|
||||
if(_pixelBuffer) {
|
||||
CVPixelBufferRelease(_pixelBuffer);
|
||||
}
|
||||
|
||||
if (self.program) {
|
||||
glDeleteProgram(self.program);
|
||||
self.program = 0;
|
||||
}
|
||||
if(_context) {
|
||||
//[_context release];
|
||||
_context = nil;
|
||||
}
|
||||
//[super dealloc];
|
||||
}
|
||||
|
||||
@end
|
||||
29
OrderScheduling/Video/VideoTools/H264DecodeTool.h
Normal file
@@ -0,0 +1,29 @@
|
||||
//
|
||||
// H264DecodeTool.h
|
||||
// VideoToolBoxDecodeH264
|
||||
//made in zhongdao Copyright © 2018年 AnDong. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <VideoToolbox/VideoToolbox.h>
|
||||
|
||||
@protocol H264DecodeFrameCallbackDelegate <NSObject>
|
||||
|
||||
//回调sps和pps数据
|
||||
- (void)gotDecodedFrame:(CVImageBufferRef )imageBuffer;
|
||||
|
||||
@end
|
||||
|
||||
@interface H264DecodeTool : NSObject
|
||||
|
||||
-(BOOL)initH264Decoder;
|
||||
|
||||
//解码nalu
|
||||
-(void)decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize alive:(BOOL)isAlive;
|
||||
|
||||
- (void)endDecode;
|
||||
|
||||
@property (weak, nonatomic) id<H264DecodeFrameCallbackDelegate> delegate;
|
||||
|
||||
@end
|
||||
298
OrderScheduling/Video/VideoTools/H264DecodeTool.m
Normal file
@@ -0,0 +1,298 @@
|
||||
//
|
||||
// H264DecodeTool.m
|
||||
// VideoToolBoxDecodeH264
|
||||
//made in zhongdao Copyright © 2018年 AnDong. All rights reserved.
|
||||
//
|
||||
|
||||
#import "H264DecodeTool.h"
|
||||
|
||||
const uint8_t lyStartCode[4] = {0, 0, 0, 1};
|
||||
|
||||
@interface H264DecodeTool(){
|
||||
|
||||
//解码session
|
||||
VTDecompressionSessionRef _decoderSession;
|
||||
|
||||
//解码format 封装了sps和pps
|
||||
CMVideoFormatDescriptionRef _decoderFormatDescription;
|
||||
|
||||
//sps & pps
|
||||
uint8_t *_sps;
|
||||
NSInteger _spsSize;
|
||||
uint8_t *_pps;
|
||||
NSInteger _ppsSize;
|
||||
|
||||
}
|
||||
@property(nonatomic,assign)BOOL isNewValue;
|
||||
|
||||
@end
|
||||
|
||||
@implementation H264DecodeTool
|
||||
|
||||
- (BOOL)initH264Decoder{
|
||||
|
||||
if(_decoderSession){
|
||||
return YES;
|
||||
}
|
||||
|
||||
|
||||
const uint8_t* const parameterSetPointers[2] = { _sps, _pps };
|
||||
const size_t parameterSetSizes[2] = { _spsSize, _ppsSize };
|
||||
|
||||
//NSLog(@"----- init h264 -- sps %@ --- pps %@", [NSData dataWithBytes:_sps length:_spsSize], [NSData dataWithBytes:_pps length:_ppsSize]);
|
||||
|
||||
//用sps 和pps 实例化_decoderFormatDescription
|
||||
OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
|
||||
2, //参数个数
|
||||
parameterSetPointers,
|
||||
parameterSetSizes,
|
||||
4, //nal startcode开始的size
|
||||
&_decoderFormatDescription);
|
||||
|
||||
if(status == noErr) {
|
||||
NSDictionary* destinationPixelBufferAttributes = @{
|
||||
(id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
|
||||
//硬解必须是 kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
||||
// 或者是kCVPixelFormatType_420YpCbCr8Planar
|
||||
//因为iOS是 nv12 其他是nv21
|
||||
(id)kCVPixelBufferWidthKey : [NSNumber numberWithInt:1280],
|
||||
(id)kCVPixelBufferHeightKey : [NSNumber numberWithInt:960],
|
||||
//这里宽高和编码反的 两倍关系
|
||||
(id)kCVPixelBufferOpenGLCompatibilityKey : [NSNumber numberWithBool:YES]
|
||||
};
|
||||
|
||||
|
||||
|
||||
VTDecompressionOutputCallbackRecord callBackRecord;
|
||||
callBackRecord.decompressionOutputCallback = didDecompress;
|
||||
callBackRecord.decompressionOutputRefCon = (__bridge void *)self;
|
||||
status = VTDecompressionSessionCreate(kCFAllocatorDefault,
|
||||
_decoderFormatDescription,
|
||||
NULL,
|
||||
(__bridge CFDictionaryRef)destinationPixelBufferAttributes,
|
||||
&callBackRecord,
|
||||
&_decoderSession);
|
||||
VTSessionSetProperty(_decoderSession, kVTDecompressionPropertyKey_ThreadCount, (__bridge CFTypeRef)[NSNumber numberWithInt:1]);
|
||||
VTSessionSetProperty(_decoderSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue);
|
||||
} else {
|
||||
NSLog(@"IOS8VT: reset decoder session failed status=%d", status);
|
||||
return NO;
|
||||
}
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
//解码回调
|
||||
static void didDecompress( void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){
|
||||
CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
|
||||
|
||||
//持有pixelBuffer数据,否则会被释放
|
||||
*outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);
|
||||
H264DecodeTool *decoder = (__bridge H264DecodeTool *)decompressionOutputRefCon;
|
||||
if (decoder.delegate)
|
||||
{
|
||||
[decoder.delegate gotDecodedFrame:pixelBuffer];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//解码nalu裸数据
|
||||
-(void)decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize alive:(BOOL)isAlive
|
||||
{
|
||||
|
||||
int flag = 1;
|
||||
uint8_t * packetBuffer = NULL;
|
||||
long packetSize = 0;
|
||||
|
||||
long count = 0, location = 0;
|
||||
for (long i = 0 ; i < frameSize; i++) {
|
||||
if (frame[i] == 0) {
|
||||
count++;
|
||||
} else if (frame[i] == 1 && ((isAlive && count == 3) || ( !isAlive && count >= 3)) && i > 3) {
|
||||
// 0x00 0x00 0x00 0x01 如果存在多个0x00 以前的count == 3 就有问题 所以用count >= 3 正确,这里专门针对DQ001和除它之外的其他设备的实时和回看适配
|
||||
if (packetBuffer) {
|
||||
free(packetBuffer);
|
||||
packetBuffer = NULL;
|
||||
}
|
||||
packetSize = i - location - 3;
|
||||
|
||||
packetBuffer = (uint8_t *)malloc(packetSize);
|
||||
memcpy(packetBuffer, frame+location, packetSize);
|
||||
|
||||
location = i - 3;
|
||||
count = 0;
|
||||
|
||||
flag = 0;
|
||||
//NSLog(@"1---%@", [NSData dataWithBytes:packetBuffer length:packetSize]);
|
||||
[self oneDecodeNalu:packetBuffer size:(uint32_t)packetSize];
|
||||
} else {
|
||||
count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (flag) {
|
||||
//NSLog(@"2---%@", [NSData dataWithBytes:frame length:frameSize]);
|
||||
[self oneDecodeNalu:frame size:frameSize];
|
||||
} else {
|
||||
free(packetBuffer);
|
||||
packetBuffer = NULL;
|
||||
packetSize = frameSize - location;
|
||||
|
||||
packetBuffer = (uint8_t *)malloc(packetSize);
|
||||
memcpy(packetBuffer, frame+location, packetSize);
|
||||
|
||||
//NSLog(@"3---%@", [NSData dataWithBytes:packetBuffer length:packetSize]);
|
||||
[self oneDecodeNalu:packetBuffer size:(uint32_t)packetSize];
|
||||
}
|
||||
|
||||
//NSLog(@"4---");
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
-(void)oneDecodeNalu:(uint8_t *)frame size:(uint32_t)frameSize {
|
||||
// NSLog(@"------------开始解码");
|
||||
|
||||
//获取nalu type
|
||||
int nalu_type = (frame[4] & 0x1F);
|
||||
CVPixelBufferRef pixelBuffer = NULL;
|
||||
|
||||
//填充nalu size 去掉start code 替换成nalu size
|
||||
uint32_t nalSize = (uint32_t)(frameSize - 4);
|
||||
uint8_t *pNalSize = (uint8_t*)(&nalSize);
|
||||
frame[0] = *(pNalSize + 3);
|
||||
frame[1] = *(pNalSize + 2);
|
||||
frame[2] = *(pNalSize + 1);
|
||||
frame[3] = *(pNalSize);
|
||||
|
||||
switch (nalu_type)
|
||||
{
|
||||
case 0x05:
|
||||
//关键帧
|
||||
if([self initH264Decoder])
|
||||
{
|
||||
pixelBuffer = [self decode:frame size:frameSize];
|
||||
}
|
||||
break;
|
||||
case 0x07:
|
||||
//sps
|
||||
_spsSize = frameSize - 4;
|
||||
// uint8_t *oldsps = _sps;
|
||||
|
||||
_sps = (uint8_t *)malloc(_spsSize);
|
||||
memcpy(_sps, &frame[4], _spsSize);
|
||||
// if (oldsps != _sps) {
|
||||
// self.isNewValue = YES;
|
||||
// }
|
||||
break;
|
||||
case 0x08:
|
||||
{
|
||||
//pps
|
||||
// uint8_t * oldpps = _pps;
|
||||
|
||||
_ppsSize = frameSize - 4;
|
||||
_pps = (uint8_t *)malloc(_ppsSize);
|
||||
memcpy(_pps, &frame[4], _ppsSize);
|
||||
|
||||
// if (oldpps != _pps) {
|
||||
// self.isNewValue = YES;
|
||||
// }
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
// B/P frame
|
||||
if([self initH264Decoder])
|
||||
{
|
||||
pixelBuffer = [self decode:frame size:frameSize];
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//解码帧数据
|
||||
- (CVPixelBufferRef)decode:(uint8_t *)frame size:(uint32_t)frameSize{
|
||||
CVPixelBufferRef outputPixelBuffer = NULL;
|
||||
|
||||
CMBlockBufferRef blockBuffer = NULL;
|
||||
|
||||
//创建CMBlockBufferRef
|
||||
OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL,
|
||||
(void *)frame,
|
||||
frameSize,
|
||||
kCFAllocatorNull,
|
||||
NULL,
|
||||
0,
|
||||
frameSize,
|
||||
FALSE,
|
||||
&blockBuffer);
|
||||
if (status == kCMBlockBufferNoErr) {
|
||||
|
||||
CMSampleBufferRef sampleBuffer = NULL;
|
||||
const size_t sampleSizeArray[] = {frameSize};
|
||||
|
||||
//创建sampleBuffer
|
||||
status = CMSampleBufferCreateReady(kCFAllocatorDefault,
|
||||
blockBuffer,
|
||||
_decoderFormatDescription ,
|
||||
1, 0, NULL, 1, sampleSizeArray,
|
||||
&sampleBuffer);
|
||||
|
||||
if (status == kCMBlockBufferNoErr && sampleBuffer) {
|
||||
VTDecodeFrameFlags flags = 0;
|
||||
VTDecodeInfoFlags flagOut = 0;
|
||||
//CMSampleBufferRef丢进去解码
|
||||
OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(_decoderSession,
|
||||
sampleBuffer,
|
||||
flags,
|
||||
&outputPixelBuffer,
|
||||
&flagOut);
|
||||
|
||||
if(decodeStatus == kVTInvalidSessionErr) {
|
||||
NSLog(@"IOS8VT: Invalid session, reset decoder session");
|
||||
} else if(decodeStatus == kVTVideoDecoderBadDataErr) {
|
||||
NSLog(@"IOS8VT: decode failed status=%d(Bad data)", decodeStatus);
|
||||
} else if(decodeStatus != noErr) {
|
||||
NSLog(@"IOS8VT: decode failed status=%d", decodeStatus);
|
||||
}
|
||||
CFRelease(sampleBuffer);
|
||||
}
|
||||
CFRelease(blockBuffer);
|
||||
}
|
||||
//返回pixelBuffer数据
|
||||
return outputPixelBuffer;
|
||||
}
|
||||
|
||||
- (void)endDecode{
|
||||
|
||||
if(_decoderSession) {
|
||||
VTDecompressionSessionInvalidate(_decoderSession);
|
||||
CFRelease(_decoderSession);
|
||||
_decoderSession = NULL;
|
||||
}
|
||||
|
||||
if(_decoderFormatDescription) {
|
||||
CFRelease(_decoderFormatDescription);
|
||||
_decoderFormatDescription = NULL;
|
||||
}
|
||||
|
||||
if (_sps) {
|
||||
free(_sps);
|
||||
}
|
||||
|
||||
if (_pps) {
|
||||
free(_pps);
|
||||
}
|
||||
|
||||
_ppsSize = _spsSize = 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@end
|
||||
18
OrderScheduling/Video/VideoTools/PCMStreamPlayer.h
Normal file
@@ -0,0 +1,18 @@
|
||||
//
|
||||
// PCMStreamPlayer.h
|
||||
// LinePlayer
|
||||
//made in zhongdao Copyright © 2020 myz. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface PCMStreamPlayer : NSObject
|
||||
|
||||
-(void)playWithData:(Byte *)pcmData size:(int)length;
|
||||
- (void)resetPlay;
|
||||
//-(void)stop;
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
131
OrderScheduling/Video/VideoTools/PCMStreamPlayer.m
Normal file
@@ -0,0 +1,131 @@
|
||||
//
|
||||
// PCMStreamPlayer.m
|
||||
// LinePlayer
|
||||
//made in zhongdao Copyright © 2020 myz. All rights reserved.
|
||||
//
|
||||
|
||||
#import "PCMStreamPlayer.h"
|
||||
#import <AudioToolbox/AudioToolbox.h>
|
||||
|
||||
|
||||
#define QUEUE_BUFFER_SIZE 6 //队列缓冲个数
|
||||
#define MIN_SIZE_PER_FRAME 600 //每帧最小数据长度
|
||||
|
||||
@interface PCMStreamPlayer() {
|
||||
NSLock *synlock ;//同步控制
|
||||
AudioQueueRef audioQueue;//音频播放队列
|
||||
BOOL audioQueueUsed[QUEUE_BUFFER_SIZE]; //音频缓存是否在使用中
|
||||
AudioStreamBasicDescription audioDescription;//音频参数
|
||||
AudioQueueBufferRef audioQueueBuffers[QUEUE_BUFFER_SIZE];//音频缓冲
|
||||
|
||||
int bufferSizeCount;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation PCMStreamPlayer
|
||||
|
||||
- (instancetype)init {
|
||||
if (self=[super init]) {
|
||||
bufferSizeCount = 1;
|
||||
synlock = [[NSLock alloc] init];
|
||||
[self reset];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)reset {
|
||||
[self stop];
|
||||
|
||||
///设置音频参数
|
||||
audioDescription.mSampleRate = 8000; //采样率
|
||||
audioDescription.mFormatID = kAudioFormatLinearPCM;
|
||||
audioDescription.mFormatFlags = (kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked);
|
||||
audioDescription.mChannelsPerFrame = 1; ///单声道
|
||||
audioDescription.mFramesPerPacket = 1; //每一个packet一侦数据
|
||||
audioDescription.mBitsPerChannel = 16; //每个采样点16bit量化
|
||||
audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel / 8) * audioDescription.mChannelsPerFrame;
|
||||
audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame;
|
||||
AudioQueueNewOutput(&audioDescription, audioPlayerAQInputCallback, (__bridge void*)self, nil, nil, 0, &audioQueue); //使用player的内部线程播放
|
||||
//AudioQueueSetParameter(audioQueue, kAudioQueueParam_Volume, 1.0);
|
||||
//初始化音频缓冲区
|
||||
for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) {
|
||||
AudioQueueAllocateBuffer(audioQueue, MIN_SIZE_PER_FRAME, &audioQueueBuffers[i]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
NSLog(@"***** pcmstream player dealloc");
|
||||
}
|
||||
|
||||
- (void)stop {
|
||||
if (audioQueue) {
|
||||
AudioQueueStop(audioQueue, true);
|
||||
AudioQueueReset(audioQueue);
|
||||
audioQueue = nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)resetPlay {
|
||||
[self stop];
|
||||
}
|
||||
|
||||
-(void)playWithData:(Byte *)pcmData size:(int)length {
|
||||
if (audioQueue == nil) { //|| ![self checkBufferHasUsed]
|
||||
// 第一次使用
|
||||
[self reset];
|
||||
AudioQueueStart(audioQueue, NULL);
|
||||
}
|
||||
|
||||
[synlock lock];
|
||||
AudioQueueBufferRef audioQueueBuffer = NULL;
|
||||
while (true) {
|
||||
audioQueueBuffer = [self getNotUsedBuffer];
|
||||
if (audioQueueBuffer != NULL) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
audioQueueBuffer->mAudioDataByteSize = length;
|
||||
memcpy(audioQueueBuffer->mAudioData, pcmData, length);
|
||||
AudioQueueEnqueueBuffer(audioQueue, audioQueueBuffer, 0, NULL);
|
||||
[synlock unlock];
|
||||
}
|
||||
static void audioPlayerAQInputCallback(void *input, AudioQueueRef audioQueue, AudioQueueBufferRef audioQueueBuffers) {
|
||||
PCMStreamPlayer *player = (__bridge PCMStreamPlayer*)input;
|
||||
[player playerCallback:audioQueueBuffers];
|
||||
}
|
||||
|
||||
// 是不是有缓冲在使用中
|
||||
- (BOOL)checkBufferHasUsed
|
||||
{
|
||||
for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) {
|
||||
if (YES == audioQueueUsed[i]) {
|
||||
return YES;
|
||||
}
|
||||
}
|
||||
return NO;
|
||||
}
|
||||
// 获取没有在使用的缓冲
|
||||
- (AudioQueueBufferRef)getNotUsedBuffer
|
||||
{
|
||||
for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) {
|
||||
if (NO == audioQueueUsed[i]) {
|
||||
audioQueueUsed[i] = YES;
|
||||
return audioQueueBuffers[i];
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// 标志缓冲空闲中
|
||||
- (void)playerCallback:(AudioQueueBufferRef)outQB {
|
||||
for (int i = 0; i < QUEUE_BUFFER_SIZE; i++) {
|
||||
if (outQB == audioQueueBuffers[i]) {
|
||||
audioQueueUsed[i] = NO;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
154
OrderScheduling/Video/VideoTools/SRWebSocket.h
Normal file
@@ -0,0 +1,154 @@
|
||||
//
|
||||
// Copyright 2012 Square Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <Security/SecCertificate.h>
|
||||
|
||||
typedef NS_ENUM(NSInteger, SRReadyState) {
|
||||
SR_CONNECTING = 0,
|
||||
SR_OPEN = 1,
|
||||
SR_CLOSING = 2,
|
||||
SR_CLOSED = 3,
|
||||
};
|
||||
|
||||
typedef enum SRStatusCode : NSInteger {
|
||||
// 0–999: Reserved and not used.
|
||||
SRStatusCodeNormal = 1000,
|
||||
SRStatusCodeGoingAway = 1001,
|
||||
SRStatusCodeProtocolError = 1002,
|
||||
SRStatusCodeUnhandledType = 1003,
|
||||
// 1004 reserved.
|
||||
SRStatusNoStatusReceived = 1005,
|
||||
SRStatusCodeAbnormal = 1006,
|
||||
SRStatusCodeInvalidUTF8 = 1007,
|
||||
SRStatusCodePolicyViolated = 1008,
|
||||
SRStatusCodeMessageTooBig = 1009,
|
||||
SRStatusCodeMissingExtension = 1010,
|
||||
SRStatusCodeInternalError = 1011,
|
||||
SRStatusCodeServiceRestart = 1012,
|
||||
SRStatusCodeTryAgainLater = 1013,
|
||||
// 1014: Reserved for future use by the WebSocket standard.
|
||||
SRStatusCodeTLSHandshake = 1015,
|
||||
// 1016–1999: Reserved for future use by the WebSocket standard.
|
||||
// 2000–2999: Reserved for use by WebSocket extensions.
|
||||
// 3000–3999: Available for use by libraries and frameworks. May not be used by applications. Available for registration at the IANA via first-come, first-serve.
|
||||
// 4000–4999: Available for use by applications.
|
||||
} SRStatusCode;
|
||||
|
||||
@class SRWebSocket;
|
||||
|
||||
extern NSString *const SRWebSocketErrorDomain;
|
||||
extern NSString *const SRHTTPResponseErrorKey;
|
||||
|
||||
#pragma mark - SRWebSocketDelegate
|
||||
|
||||
@protocol SRWebSocketDelegate;
|
||||
|
||||
#pragma mark - SRWebSocket
|
||||
|
||||
@interface SRWebSocket : NSObject <NSStreamDelegate>
|
||||
|
||||
@property (nonatomic, weak) id <SRWebSocketDelegate> delegate;
|
||||
|
||||
@property (nonatomic, readonly) SRReadyState readyState;
|
||||
@property (nonatomic, readonly, retain) NSURL *url;
|
||||
|
||||
|
||||
@property (nonatomic, readonly) CFHTTPMessageRef receivedHTTPHeaders;
|
||||
|
||||
// Optional array of cookies (NSHTTPCookie objects) to apply to the connections
|
||||
@property (nonatomic, readwrite) NSArray * requestCookies;
|
||||
|
||||
// This returns the negotiated protocol.
|
||||
// It will be nil until after the handshake completes.
|
||||
@property (nonatomic, readonly, copy) NSString *protocol;
|
||||
|
||||
// Protocols should be an array of strings that turn into Sec-WebSocket-Protocol.
|
||||
- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates;
|
||||
- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols;
|
||||
- (id)initWithURLRequest:(NSURLRequest *)request;
|
||||
|
||||
// Some helper constructors.
|
||||
- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols allowsUntrustedSSLCertificates:(BOOL)allowsUntrustedSSLCertificates;
|
||||
- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols;
|
||||
- (id)initWithURL:(NSURL *)url;
|
||||
|
||||
// Delegate queue will be dispatch_main_queue by default.
|
||||
// You cannot set both OperationQueue and dispatch_queue.
|
||||
- (void)setDelegateOperationQueue:(NSOperationQueue*) queue;
|
||||
- (void)setDelegateDispatchQueue:(dispatch_queue_t) queue;
|
||||
|
||||
// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using defaultModes.
|
||||
- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
|
||||
- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
|
||||
|
||||
// SRWebSockets are intended for one-time-use only. Open should be called once and only once.
|
||||
- (void)open;
|
||||
|
||||
- (void)close;
|
||||
- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason;
|
||||
|
||||
// Send a UTF8 String or Data.
|
||||
- (void)send:(id)data;
|
||||
|
||||
// Send Data (can be nil) in a ping message.
|
||||
- (void)sendPing:(NSData *)data;
|
||||
|
||||
@end
|
||||
|
||||
#pragma mark - SRWebSocketDelegate
|
||||
|
||||
@protocol SRWebSocketDelegate <NSObject>
|
||||
|
||||
// message will either be an NSString if the server is using text
|
||||
// or NSData if the server is using binary.
|
||||
- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message;
|
||||
|
||||
@optional
|
||||
|
||||
- (void)webSocketDidOpen:(SRWebSocket *)webSocket;
|
||||
- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error;
|
||||
- (void)webSocket:(SRWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean;
|
||||
- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload;
|
||||
|
||||
// Return YES to convert messages sent as Text to an NSString. Return NO to skip NSData -> NSString conversion for Text messages. Defaults to YES.
|
||||
- (BOOL)webSocketShouldConvertTextFrameToString:(SRWebSocket *)webSocket;
|
||||
|
||||
@end
|
||||
|
||||
#pragma mark - NSURLRequest (SRCertificateAdditions)
|
||||
|
||||
@interface NSURLRequest (SRCertificateAdditions)
|
||||
|
||||
@property (nonatomic, retain, readonly) NSArray *SR_SSLPinnedCertificates;
|
||||
|
||||
@end
|
||||
|
||||
#pragma mark - NSMutableURLRequest (SRCertificateAdditions)
|
||||
|
||||
@interface NSMutableURLRequest (SRCertificateAdditions)
|
||||
|
||||
@property (nonatomic, retain) NSArray *SR_SSLPinnedCertificates;
|
||||
|
||||
@end
|
||||
|
||||
#pragma mark - NSRunLoop (SRWebSocket)
|
||||
|
||||
@interface NSRunLoop (SRWebSocket)
|
||||
|
||||
+ (NSRunLoop *)SR_networkRunLoop;
|
||||
|
||||
@end
|
||||
1921
OrderScheduling/Video/VideoTools/SRWebSocket.m
Normal file
30
OrderScheduling/Video/VideoTools/g711.h
Normal file
@@ -0,0 +1,30 @@
|
||||
/*
|
||||
Copyright (c) 2013-2016 EasyDarwin.ORG. All rights reserved.
|
||||
Github: https://github.com/EasyDarwin
|
||||
WEChat: EasyDarwin
|
||||
Website: http://www.easydarwin.org
|
||||
*/
|
||||
|
||||
#ifndef __G_711_H_
|
||||
#define __G_711_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
enum _e_g711_tp
|
||||
{
|
||||
TP_ALAW, //G711A
|
||||
TP_ULAW //G711U
|
||||
};
|
||||
|
||||
unsigned char linear2alaw(int pcm_val); /* 2's complement (16-bit range) */
|
||||
int alaw2linear(unsigned char a_val);
|
||||
|
||||
unsigned char linear2ulaw(int pcm_val); /* 2's complement (16-bit range) */
|
||||
int ulaw2linear(unsigned char u_val);
|
||||
|
||||
unsigned char alaw2ulaw(unsigned char aval);
|
||||
unsigned char ulaw2alaw(unsigned char uval);
|
||||
|
||||
int g711_decode(void *pout_buf, int *pout_len, const void *pin_buf, const int in_len , int type);
|
||||
|
||||
#endif
|
||||
306
OrderScheduling/Video/VideoTools/g711.m
Normal file
@@ -0,0 +1,306 @@
|
||||
/*
|
||||
* g711.c
|
||||
*
|
||||
* u-law, A-law and linear PCM conversions.
|
||||
*/
|
||||
|
||||
//#include "stdafx.h"
|
||||
#include <stdint.h>
|
||||
#include <stdio.h>
|
||||
#include "g711.h"
|
||||
|
||||
#define SIGN_BIT (0x80) /* Sign bit for a A-law byte. */
|
||||
#define QUANT_MASK (0xf) /* Quantization field mask. */
|
||||
#define NSEGS (8) /* Number of A-law segments. */
|
||||
#define SEG_SHIFT (4) /* Left shift for segment number. */
|
||||
#define SEG_MASK (0x70) /* Segment field mask. */
|
||||
|
||||
static short seg_end[8] = {0xFF, 0x1FF, 0x3FF, 0x7FF,
|
||||
0xFFF, 0x1FFF, 0x3FFF, 0x7FFF};
|
||||
|
||||
/* copy from CCITT G.711 specifications */
|
||||
unsigned char _u2a[128] = { /* u- to A-law conversions */
|
||||
1, 1, 2, 2, 3, 3, 4, 4,
|
||||
5, 5, 6, 6, 7, 7, 8, 8,
|
||||
9, 10, 11, 12, 13, 14, 15, 16,
|
||||
17, 18, 19, 20, 21, 22, 23, 24,
|
||||
25, 27, 29, 31, 33, 34, 35, 36,
|
||||
37, 38, 39, 40, 41, 42, 43, 44,
|
||||
46, 48, 49, 50, 51, 52, 53, 54,
|
||||
55, 56, 57, 58, 59, 60, 61, 62,
|
||||
64, 65, 66, 67, 68, 69, 70, 71,
|
||||
72, 73, 74, 75, 76, 77, 78, 79,
|
||||
81, 82, 83, 84, 85, 86, 87, 88,
|
||||
89, 90, 91, 92, 93, 94, 95, 96,
|
||||
97, 98, 99, 100, 101, 102, 103, 104,
|
||||
105, 106, 107, 108, 109, 110, 111, 112,
|
||||
113, 114, 115, 116, 117, 118, 119, 120,
|
||||
121, 122, 123, 124, 125, 126, 127, 128};
|
||||
|
||||
unsigned char _a2u[128] = { /* A- to u-law conversions */
|
||||
1, 3, 5, 7, 9, 11, 13, 15,
|
||||
16, 17, 18, 19, 20, 21, 22, 23,
|
||||
24, 25, 26, 27, 28, 29, 30, 31,
|
||||
32, 32, 33, 33, 34, 34, 35, 35,
|
||||
36, 37, 38, 39, 40, 41, 42, 43,
|
||||
44, 45, 46, 47, 48, 48, 49, 49,
|
||||
50, 51, 52, 53, 54, 55, 56, 57,
|
||||
58, 59, 60, 61, 62, 63, 64, 64,
|
||||
65, 66, 67, 68, 69, 70, 71, 72,
|
||||
73, 74, 75, 76, 77, 78, 79, 79,
|
||||
80, 81, 82, 83, 84, 85, 86, 87,
|
||||
88, 89, 90, 91, 92, 93, 94, 95,
|
||||
96, 97, 98, 99, 100, 101, 102, 103,
|
||||
104, 105, 106, 107, 108, 109, 110, 111,
|
||||
112, 113, 114, 115, 116, 117, 118, 119,
|
||||
120, 121, 122, 123, 124, 125, 126, 127};
|
||||
|
||||
static int
|
||||
search(
|
||||
int val,
|
||||
short *table,
|
||||
int size)
|
||||
{
|
||||
int i;
|
||||
|
||||
for (i = 0; i < size; i++) {
|
||||
if (val <= *table++)
|
||||
return (i);
|
||||
}
|
||||
return (size);
|
||||
}
|
||||
|
||||
/*
|
||||
* linear2alaw() - Convert a 16-bit linear PCM value to 8-bit A-law
|
||||
*
|
||||
* linear2alaw() accepts an 16-bit integer and encodes it as A-law data.
|
||||
*
|
||||
* Linear Input Code Compressed Code
|
||||
* ------------------------ ---------------
|
||||
* 0000000wxyza 000wxyz
|
||||
* 0000001wxyza 001wxyz
|
||||
* 000001wxyzab 010wxyz
|
||||
* 00001wxyzabc 011wxyz
|
||||
* 0001wxyzabcd 100wxyz
|
||||
* 001wxyzabcde 101wxyz
|
||||
* 01wxyzabcdef 110wxyz
|
||||
* 1wxyzabcdefg 111wxyz
|
||||
*
|
||||
* For further information see John C. Bellamy's Digital Telephony, 1982,
|
||||
* John Wiley & Sons, pps 98-111 and 472-476.
|
||||
*/
|
||||
unsigned char
|
||||
linear2alaw(
|
||||
int pcm_val) /* 2's complement (16-bit range) */
|
||||
{
|
||||
int mask;
|
||||
int seg;
|
||||
unsigned char aval;
|
||||
|
||||
if (pcm_val >= 0) {
|
||||
mask = 0xD5; /* sign (7th) bit = 1 */
|
||||
} else {
|
||||
mask = 0x55; /* sign bit = 0 */
|
||||
pcm_val = -pcm_val - 8;
|
||||
}
|
||||
|
||||
/* Convert the scaled magnitude to segment number. */
|
||||
seg = search(pcm_val, seg_end, 8);
|
||||
|
||||
/* Combine the sign, segment, and quantization bits. */
|
||||
|
||||
if (seg >= 8) /* out of range, return maximum value. */
|
||||
return (0x7F ^ mask);
|
||||
else {
|
||||
aval = seg << SEG_SHIFT;
|
||||
if (seg < 2)
|
||||
aval |= (pcm_val >> 4) & QUANT_MASK;
|
||||
else
|
||||
aval |= (pcm_val >> (seg + 3)) & QUANT_MASK;
|
||||
return (aval ^ mask);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* alaw2linear() - Convert an A-law value to 16-bit linear PCM
|
||||
*
|
||||
*/
|
||||
int
|
||||
alaw2linear(
|
||||
unsigned char a_val)
|
||||
{
|
||||
int t;
|
||||
int seg;
|
||||
|
||||
a_val ^= 0x55;
|
||||
|
||||
t = (a_val & QUANT_MASK) << 4;
|
||||
seg = ((unsigned)a_val & SEG_MASK) >> SEG_SHIFT;
|
||||
switch (seg) {
|
||||
case 0:
|
||||
t += 8;
|
||||
break;
|
||||
case 1:
|
||||
t += 0x108;
|
||||
break;
|
||||
default:
|
||||
t += 0x108;
|
||||
t <<= seg - 1;
|
||||
}
|
||||
return ((a_val & SIGN_BIT) ? t : -t);
|
||||
}
|
||||
|
||||
#define BIAS (0x84) /* Bias for linear code. */
|
||||
|
||||
/*
|
||||
* linear2ulaw() - Convert a linear PCM value to u-law
|
||||
*
|
||||
* In order to simplify the encoding process, the original linear magnitude
|
||||
* is biased by adding 33 which shifts the encoding range from (0 - 8158) to
|
||||
* (33 - 8191). The result can be seen in the following encoding table:
|
||||
*
|
||||
* Biased Linear Input Code Compressed Code
|
||||
* ------------------------ ---------------
|
||||
* 00000001wxyza 000wxyz
|
||||
* 0000001wxyzab 001wxyz
|
||||
* 000001wxyzabc 010wxyz
|
||||
* 00001wxyzabcd 011wxyz
|
||||
* 0001wxyzabcde 100wxyz
|
||||
* 001wxyzabcdef 101wxyz
|
||||
* 01wxyzabcdefg 110wxyz
|
||||
* 1wxyzabcdefgh 111wxyz
|
||||
*
|
||||
* Each biased linear code has a leading 1 which identifies the segment
|
||||
* number. The value of the segment number is equal to 7 minus the number
|
||||
* of leading 0's. The quantization interval is directly available as the
|
||||
* four bits wxyz. * The trailing bits (a - h) are ignored.
|
||||
*
|
||||
* Ordinarily the complement of the resulting code word is used for
|
||||
* transmission, and so the code word is complemented before it is returned.
|
||||
*
|
||||
* For further information see John C. Bellamy's Digital Telephony, 1982,
|
||||
* John Wiley & Sons, pps 98-111 and 472-476.
|
||||
*/
|
||||
unsigned char
|
||||
linear2ulaw(
|
||||
int pcm_val) /* 2's complement (16-bit range) */
|
||||
{
|
||||
int mask;
|
||||
int seg;
|
||||
unsigned char uval;
|
||||
|
||||
/* Get the sign and the magnitude of the value. */
|
||||
if (pcm_val < 0) {
|
||||
pcm_val = BIAS - pcm_val;
|
||||
mask = 0x7F;
|
||||
} else {
|
||||
pcm_val += BIAS;
|
||||
mask = 0xFF;
|
||||
}
|
||||
|
||||
/* Convert the scaled magnitude to segment number. */
|
||||
seg = search(pcm_val, seg_end, 8);
|
||||
|
||||
/*
|
||||
* Combine the sign, segment, quantization bits;
|
||||
* and complement the code word.
|
||||
*/
|
||||
if (seg >= 8) /* out of range, return maximum value. */
|
||||
return (0x7F ^ mask);
|
||||
else {
|
||||
uval = (seg << 4) | ((pcm_val >> (seg + 3)) & 0xF);
|
||||
return (uval ^ mask);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* ulaw2linear() - Convert a u-law value to 16-bit linear PCM
|
||||
*
|
||||
* First, a biased linear code is derived from the code word. An unbiased
|
||||
* output can then be obtained by subtracting 33 from the biased code.
|
||||
*
|
||||
* Note that this function expects to be passed the complement of the
|
||||
* original code word. This is in keeping with ISDN conventions.
|
||||
*/
|
||||
int
|
||||
ulaw2linear(
|
||||
unsigned char u_val)
|
||||
{
|
||||
int t;
|
||||
|
||||
/* Complement to obtain normal u-law value. */
|
||||
u_val = ~u_val;
|
||||
|
||||
/*
|
||||
* Extract and bias the quantization bits. Then
|
||||
* shift up by the segment number and subtract out the bias.
|
||||
*/
|
||||
t = ((u_val & QUANT_MASK) << 3) + BIAS;
|
||||
t <<= ((unsigned)u_val & SEG_MASK) >> SEG_SHIFT;
|
||||
|
||||
return ((u_val & SIGN_BIT) ? (BIAS - t) : (t - BIAS));
|
||||
}
|
||||
|
||||
/* A-law to u-law conversion */
|
||||
unsigned char
|
||||
alaw2ulaw(
|
||||
unsigned char aval)
|
||||
{
|
||||
aval &= 0xff;
|
||||
return ((aval & 0x80) ? (0xFF ^ _a2u[aval ^ 0xD5]) :
|
||||
(0x7F ^ _a2u[aval ^ 0x55]));
|
||||
}
|
||||
|
||||
/* u-law to A-law conversion */
|
||||
unsigned char
|
||||
ulaw2alaw(
|
||||
unsigned char uval)
|
||||
{
|
||||
uval &= 0xff;
|
||||
return ((uval & 0x80) ? (0xD5 ^ (_u2a[0xFF ^ uval] - 1)) :
|
||||
(0x55 ^ (_u2a[0x7F ^ uval] - 1)));
|
||||
}
|
||||
|
||||
int g711_decode(void *pout_buf, int *pout_len, const void *pin_buf, const int in_len , int type)
|
||||
{
|
||||
int16_t *dst = (int16_t *) pout_buf;
|
||||
uint8_t *src = (uint8_t *) pin_buf;
|
||||
uint32_t i = 0;
|
||||
int Ret = 0;
|
||||
|
||||
if ((NULL == pout_buf) || \
|
||||
(NULL == pout_len) || \
|
||||
(NULL == pin_buf) || \
|
||||
(0 == in_len))
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (*pout_len < 2 * in_len)
|
||||
{
|
||||
return -2;
|
||||
}
|
||||
//---{{{
|
||||
if (TP_ALAW == type)
|
||||
{
|
||||
for (i = 0; i < in_len; i++)
|
||||
{
|
||||
//*(dst++) = alawtos16[*(src++)];
|
||||
*(dst++) = (int16_t)alaw2linear(*(src++));
|
||||
}
|
||||
}else
|
||||
{
|
||||
for (i = 0; i < in_len; i++)
|
||||
{
|
||||
//*(dst++) = alawtos16[*(src++)];
|
||||
*(dst++) = (int16_t)ulaw2linear(*(src++));
|
||||
}
|
||||
}
|
||||
|
||||
//---}}}
|
||||
*pout_len = 2 * in_len;
|
||||
|
||||
Ret = 2 * in_len;
|
||||
return Ret;
|
||||
}
|
||||
188
OrderScheduling/Video/VideoTools/g726.h
Normal file
@@ -0,0 +1,188 @@
|
||||
|
||||
/*! Bitstream handler state */
|
||||
typedef struct bitstream_state_s
|
||||
{
|
||||
/*! The bit stream. */
|
||||
unsigned int bitstream;
|
||||
/*! The residual bits in bitstream. */
|
||||
int residue;
|
||||
}bitstream_state_t;
|
||||
|
||||
typedef struct g726_state_s g726_state_t;
|
||||
typedef short (*g726_decoder_func_t)(g726_state_t *s, unsigned char code);
|
||||
typedef unsigned char (*g726_encoder_func_t)(g726_state_t *s, short amp);
|
||||
|
||||
|
||||
/*!
|
||||
* The following is the definition of the state structure
|
||||
* used by the G.726 encoder and decoder to preserve their internal
|
||||
* state between successive calls. The meanings of the majority
|
||||
* of the state structure fields are explained in detail in the
|
||||
* CCITT Recommendation G.726. The field names are essentially indentical
|
||||
* to variable names in the bit level description of the coding algorithm
|
||||
* included in this recommendation.
|
||||
*/
|
||||
struct g726_state_s
|
||||
{
|
||||
/*! The bit rate */
|
||||
int rate;
|
||||
/*! The external coding, for tandem operation */
|
||||
//int ext_coding;
|
||||
/*! The number of bits per sample */
|
||||
int bits_per_sample;
|
||||
/*! One of the G.726_PACKING_xxx options */
|
||||
//int packing;
|
||||
|
||||
/*! Locked or steady state step size multiplier. */
|
||||
int yl;
|
||||
/*! Unlocked or non-steady state step size multiplier. */
|
||||
short yu;
|
||||
/*! short term energy estimate. */
|
||||
short dms;
|
||||
/*! Long term energy estimate. */
|
||||
short dml;
|
||||
/*! Linear weighting coefficient of 'yl' and 'yu'. */
|
||||
short ap;
|
||||
|
||||
/*! Coefficients of pole portion of prediction filter. */
|
||||
short a[2];
|
||||
/*! Coefficients of zero portion of prediction filter. */
|
||||
short b[6];
|
||||
/*! Signs of previous two samples of a partially reconstructed signal. */
|
||||
short pk[2];
|
||||
/*! Previous 6 samples of the quantized difference signal represented in
|
||||
an internal floating point format. */
|
||||
short dq[6];
|
||||
/*! Previous 2 samples of the quantized difference signal represented in an
|
||||
internal floating point format. */
|
||||
short sr[2];
|
||||
/*! Delayed tone detect */
|
||||
int td;
|
||||
|
||||
/*! \brief The bit stream processing context. */
|
||||
bitstream_state_t bs;
|
||||
|
||||
/*! \brief The current encoder function. */
|
||||
g726_encoder_func_t enc_func;
|
||||
/*! \brief The current decoder function. */
|
||||
g726_decoder_func_t dec_func;
|
||||
};
|
||||
|
||||
/*
|
||||
* Maps G.726_16 code word to reconstructed scale factor normalized log
|
||||
* magnitude values.
|
||||
*/
|
||||
static const int g726_16_dqlntab[4] =
|
||||
{
|
||||
116, 365, 365, 116
|
||||
};
|
||||
|
||||
/* Maps G.726_16 code word to log of scale factor multiplier. */
|
||||
static const int g726_16_witab[4] =
|
||||
{
|
||||
-704, 14048, 14048, -704
|
||||
};
|
||||
|
||||
/*
|
||||
* Maps G.726_16 code words to a set of values whose long and short
|
||||
* term averages are computed and then compared to give an indication
|
||||
* how stationary (steady state) the signal is.
|
||||
*/
|
||||
static const int g726_16_fitab[4] =
|
||||
{
|
||||
0x000, 0xE00, 0xE00, 0x000
|
||||
};
|
||||
|
||||
/*
|
||||
* Maps G.726_24 code word to reconstructed scale factor normalized log
|
||||
* magnitude values.
|
||||
*/
|
||||
static const int g726_24_dqlntab[8] =
|
||||
{
|
||||
-2048, 135, 273, 373, 373, 273, 135, -2048
|
||||
};
|
||||
|
||||
/* Maps G.726_24 code word to log of scale factor multiplier. */
|
||||
static const int g726_24_witab[8] =
|
||||
{
|
||||
-128, 960, 4384, 18624, 18624, 4384, 960, -128
|
||||
};
|
||||
|
||||
/*
|
||||
* Maps G.726_24 code words to a set of values whose long and short
|
||||
* term averages are computed and then compared to give an indication
|
||||
* how stationary (steady state) the signal is.
|
||||
*/
|
||||
static const int g726_24_fitab[8] =
|
||||
{
|
||||
0x000, 0x200, 0x400, 0xE00, 0xE00, 0x400, 0x200, 0x000
|
||||
};
|
||||
|
||||
/*
|
||||
* Maps G.726_32 code word to reconstructed scale factor normalized log
|
||||
* magnitude values.
|
||||
*/
|
||||
static const int g726_32_dqlntab[16] =
|
||||
{
|
||||
-2048, 4, 135, 213, 273, 323, 373, 425,
|
||||
425, 373, 323, 273, 213, 135, 4, -2048
|
||||
};
|
||||
|
||||
/* Maps G.726_32 code word to log of scale factor multiplier. */
|
||||
static const int g726_32_witab[16] =
|
||||
{
|
||||
-384, 576, 1312, 2048, 3584, 6336, 11360, 35904,
|
||||
35904, 11360, 6336, 3584, 2048, 1312, 576, -384
|
||||
};
|
||||
|
||||
/*
|
||||
* Maps G.726_32 code words to a set of values whose long and short
|
||||
* term averages are computed and then compared to give an indication
|
||||
* how stationary (steady state) the signal is.
|
||||
*/
|
||||
static const int g726_32_fitab[16] =
|
||||
{
|
||||
0x000, 0x000, 0x000, 0x200, 0x200, 0x200, 0x600, 0xE00,
|
||||
0xE00, 0x600, 0x200, 0x200, 0x200, 0x000, 0x000, 0x000
|
||||
};
|
||||
|
||||
/*
|
||||
* Maps G.726_40 code word to ructeconstructed scale factor normalized log
|
||||
* magnitude values.
|
||||
*/
|
||||
static const int g726_40_dqlntab[32] =
|
||||
{
|
||||
-2048, -66, 28, 104, 169, 224, 274, 318,
|
||||
358, 395, 429, 459, 488, 514, 539, 566,
|
||||
566, 539, 514, 488, 459, 429, 395, 358,
|
||||
318, 274, 224, 169, 104, 28, -66, -2048
|
||||
};
|
||||
|
||||
/* Maps G.726_40 code word to log of scale factor multiplier. */
|
||||
static const int g726_40_witab[32] =
|
||||
{
|
||||
448, 448, 768, 1248, 1280, 1312, 1856, 3200,
|
||||
4512, 5728, 7008, 8960, 11456, 14080, 16928, 22272,
|
||||
22272, 16928, 14080, 11456, 8960, 7008, 5728, 4512,
|
||||
3200, 1856, 1312, 1280, 1248, 768, 448, 448
|
||||
};
|
||||
|
||||
/*
|
||||
* Maps G.726_40 code words to a set of values whose long and short
|
||||
* term averages are computed and then compared to give an indication
|
||||
* how stationary (steady state) the signal is.
|
||||
*/
|
||||
static const int g726_40_fitab[32] =
|
||||
{
|
||||
0x000, 0x000, 0x000, 0x000, 0x000, 0x200, 0x200, 0x200,
|
||||
0x200, 0x200, 0x400, 0x600, 0x800, 0xA00, 0xC00, 0xC00,
|
||||
0xC00, 0xC00, 0xA00, 0x800, 0x600, 0x400, 0x200, 0x200,
|
||||
0x200, 0x200, 0x200, 0x000, 0x000, 0x000, 0x000, 0x000
|
||||
};
|
||||
|
||||
|
||||
g726_state_t *g726_init(g726_state_t *s, int bit_rate);
|
||||
|
||||
int g726_decode(g726_state_t *s, short amp[], const unsigned char g726_data[], int g726_bytes);
|
||||
|
||||
int g726_encode(g726_state_t *s, unsigned char g726_data[], const short amp[], int len);
|
||||
889
OrderScheduling/Video/VideoTools/g726.m
Normal file
@@ -0,0 +1,889 @@
|
||||
/*
|
||||
Copyright (c) 2013-2016 EasyDarwin.ORG. All rights reserved.
|
||||
Github: https://github.com/EasyDarwin
|
||||
WEChat: EasyDarwin
|
||||
Website: http://www.easydarwin.org
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <math.h>
|
||||
#include <stdlib.h>
|
||||
#include "g726.h"
|
||||
|
||||
static const int qtab_726_16[1] =
|
||||
{
|
||||
261
|
||||
};
|
||||
|
||||
static const int qtab_726_24[3] =
|
||||
{
|
||||
8, 218, 331
|
||||
};
|
||||
|
||||
static const int qtab_726_32[7] =
|
||||
{
|
||||
-124, 80, 178, 246, 300, 349, 400
|
||||
};
|
||||
|
||||
static const int qtab_726_40[15] =
|
||||
{
|
||||
-122, -16, 68, 139, 198, 250, 298, 339,
|
||||
378, 413, 445, 475, 502, 528, 553
|
||||
};
|
||||
|
||||
|
||||
static __inline int top_bit(unsigned int bits)
|
||||
{
|
||||
#if defined(__i386__) || defined(__x86_64__)
|
||||
int res;
|
||||
|
||||
__asm__ (" xorl %[res],%[res];\n"
|
||||
" decl %[res];\n"
|
||||
" bsrl %[bits],%[res]\n"
|
||||
: [res] "=&r" (res)
|
||||
: [bits] "rm" (bits));
|
||||
return res;
|
||||
#elif defined(__ppc__) || defined(__powerpc__)
|
||||
int res;
|
||||
|
||||
__asm__ ("cntlzw %[res],%[bits];\n"
|
||||
: [res] "=&r" (res)
|
||||
: [bits] "r" (bits));
|
||||
return 31 - res;
|
||||
#elif defined(_M_IX86) // Visual Studio x86
|
||||
__asm
|
||||
{
|
||||
xor eax, eax
|
||||
dec eax
|
||||
bsr eax, bits
|
||||
}
|
||||
#else
|
||||
int res;
|
||||
|
||||
if (bits == 0)
|
||||
return -1;
|
||||
res = 0;
|
||||
if (bits & 0xFFFF0000)
|
||||
{
|
||||
bits &= 0xFFFF0000;
|
||||
res += 16;
|
||||
}
|
||||
if (bits & 0xFF00FF00)
|
||||
{
|
||||
bits &= 0xFF00FF00;
|
||||
res += 8;
|
||||
}
|
||||
if (bits & 0xF0F0F0F0)
|
||||
{
|
||||
bits &= 0xF0F0F0F0;
|
||||
res += 4;
|
||||
}
|
||||
if (bits & 0xCCCCCCCC)
|
||||
{
|
||||
bits &= 0xCCCCCCCC;
|
||||
res += 2;
|
||||
}
|
||||
if (bits & 0xAAAAAAAA)
|
||||
{
|
||||
bits &= 0xAAAAAAAA;
|
||||
res += 1;
|
||||
}
|
||||
return res;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
static bitstream_state_t *bitstream_init(bitstream_state_t *s)
|
||||
{
|
||||
if (s == NULL)
|
||||
return NULL;
|
||||
s->bitstream = 0;
|
||||
s->residue = 0;
|
||||
return s;
|
||||
}
|
||||
|
||||
/*
|
||||
* Given a raw sample, 'd', of the difference signal and a
|
||||
* quantization step size scale factor, 'y', this routine returns the
|
||||
* ADPCM codeword to which that sample gets quantized. The step
|
||||
* size scale factor division operation is done in the log base 2 domain
|
||||
* as a subtraction.
|
||||
*/
|
||||
static short quantize(int d, /* Raw difference signal sample */
|
||||
int y, /* Step size multiplier */
|
||||
const int table[], /* quantization table */
|
||||
int quantizer_states) /* table size of short integers */
|
||||
{
|
||||
short dqm; /* Magnitude of 'd' */
|
||||
short exp; /* Integer part of base 2 log of 'd' */
|
||||
short mant; /* Fractional part of base 2 log */
|
||||
short dl; /* Log of magnitude of 'd' */
|
||||
short dln; /* Step size scale factor normalized log */
|
||||
int i;
|
||||
int size;
|
||||
|
||||
/*
|
||||
* LOG
|
||||
*
|
||||
* Compute base 2 log of 'd', and store in 'dl'.
|
||||
*/
|
||||
dqm = (short) abs(d);
|
||||
exp = (short) (top_bit(dqm >> 1) + 1);
|
||||
/* Fractional portion. */
|
||||
mant = ((dqm << 7) >> exp) & 0x7F;
|
||||
dl = (exp << 7) + mant;
|
||||
|
||||
/*
|
||||
* SUBTB
|
||||
*
|
||||
* "Divide" by step size multiplier.
|
||||
*/
|
||||
dln = dl - (short) (y >> 2);
|
||||
|
||||
/*
|
||||
* QUAN
|
||||
*
|
||||
* Search for codword i for 'dln'.
|
||||
*/
|
||||
size = (quantizer_states - 1) >> 1;
|
||||
for (i = 0; i < size; i++)
|
||||
{
|
||||
if (dln < table[i])
|
||||
break;
|
||||
}
|
||||
if (d < 0)
|
||||
{
|
||||
/* Take 1's complement of i */
|
||||
return (short) ((size << 1) + 1 - i);
|
||||
}
|
||||
if (i == 0 && (quantizer_states & 1))
|
||||
{
|
||||
/* Zero is only valid if there are an even number of states, so
|
||||
take the 1's complement if the code is zero. */
|
||||
return (short) quantizer_states;
|
||||
}
|
||||
return (short) i;
|
||||
}
|
||||
/*- End of function --------------------------------------------------------*/
|
||||
|
||||
|
||||
/*
|
||||
* returns the integer product of the 14-bit integer "an" and
|
||||
* "floating point" representation (4-bit exponent, 6-bit mantessa) "srn".
|
||||
*/
|
||||
static short fmult(short an, short srn)
|
||||
{
|
||||
short anmag;
|
||||
short anexp;
|
||||
short anmant;
|
||||
short wanexp;
|
||||
short wanmant;
|
||||
short retval;
|
||||
|
||||
anmag = (an > 0) ? an : ((-an) & 0x1FFF);
|
||||
anexp = (short) (top_bit(anmag) - 5);
|
||||
anmant = (anmag == 0) ? 32 : (anexp >= 0) ? (anmag >> anexp) : (anmag << -anexp);
|
||||
wanexp = anexp + ((srn >> 6) & 0xF) - 13;
|
||||
|
||||
wanmant = (anmant*(srn & 0x3F) + 0x30) >> 4;
|
||||
retval = (wanexp >= 0) ? ((wanmant << wanexp) & 0x7FFF) : (wanmant >> -wanexp);
|
||||
|
||||
return (((an ^ srn) < 0) ? -retval : retval);
|
||||
}
|
||||
|
||||
/*
|
||||
* Compute the estimated signal from the 6-zero predictor.
|
||||
*/
|
||||
static __inline short predictor_zero(g726_state_t *s)
|
||||
{
|
||||
int i;
|
||||
int sezi;
|
||||
|
||||
sezi = fmult(s->b[0] >> 2, s->dq[0]);
|
||||
/* ACCUM */
|
||||
for (i = 1; i < 6; i++)
|
||||
sezi += fmult(s->b[i] >> 2, s->dq[i]);
|
||||
return (short) sezi;
|
||||
}
|
||||
/*- End of function --------------------------------------------------------*/
|
||||
|
||||
/*
|
||||
* Computes the estimated signal from the 2-pole predictor.
|
||||
*/
|
||||
static __inline short predictor_pole(g726_state_t *s)
|
||||
{
|
||||
return (fmult(s->a[1] >> 2, s->sr[1]) + fmult(s->a[0] >> 2, s->sr[0]));
|
||||
}
|
||||
|
||||
/*
|
||||
* Computes the quantization step size of the adaptive quantizer.
|
||||
*/
|
||||
static int step_size(g726_state_t *s)
|
||||
{
|
||||
int y;
|
||||
int dif;
|
||||
int al;
|
||||
|
||||
if (s->ap >= 256)
|
||||
return s->yu;
|
||||
y = s->yl >> 6;
|
||||
dif = s->yu - y;
|
||||
al = s->ap >> 2;
|
||||
if (dif > 0)
|
||||
y += (dif*al) >> 6;
|
||||
else if (dif < 0)
|
||||
y += (dif*al + 0x3F) >> 6;
|
||||
return y;
|
||||
}
|
||||
/*- End of function --------------------------------------------------------*/
|
||||
|
||||
/*
|
||||
* Returns reconstructed difference signal 'dq' obtained from
|
||||
* codeword 'i' and quantization step size scale factor 'y'.
|
||||
* Multiplication is performed in log base 2 domain as addition.
|
||||
*/
|
||||
static short reconstruct(int sign, /* 0 for non-negative value */
|
||||
int dqln, /* G.72x codeword */
|
||||
int y) /* Step size multiplier */
|
||||
{
|
||||
short dql; /* Log of 'dq' magnitude */
|
||||
short dex; /* Integer part of log */
|
||||
short dqt;
|
||||
short dq; /* Reconstructed difference signal sample */
|
||||
|
||||
dql = (short) (dqln + (y >> 2)); /* ADDA */
|
||||
|
||||
if (dql < 0)
|
||||
return ((sign) ? -0x8000 : 0);
|
||||
/* ANTILOG */
|
||||
dex = (dql >> 7) & 15;
|
||||
dqt = 128 + (dql & 127);
|
||||
dq = (dqt << 7) >> (14 - dex);
|
||||
return ((sign) ? (dq - 0x8000) : dq);
|
||||
}
|
||||
/*- End of function --------------------------------------------------------*/
|
||||
|
||||
/*
|
||||
* updates the state variables for each output code
|
||||
*/
|
||||
static void update(g726_state_t *s,
|
||||
int y, /* quantizer step size */
|
||||
int wi, /* scale factor multiplier */
|
||||
int fi, /* for long/short term energies */
|
||||
int dq, /* quantized prediction difference */
|
||||
int sr, /* reconstructed signal */
|
||||
int dqsez) /* difference from 2-pole predictor */
|
||||
{
|
||||
short mag;
|
||||
short exp;
|
||||
short a2p; /* LIMC */
|
||||
short a1ul; /* UPA1 */
|
||||
short pks1; /* UPA2 */
|
||||
short fa1;
|
||||
short ylint;
|
||||
short dqthr;
|
||||
short ylfrac;
|
||||
short thr;
|
||||
short pk0;
|
||||
int i;
|
||||
int tr;
|
||||
|
||||
a2p = 0;
|
||||
/* Needed in updating predictor poles */
|
||||
pk0 = (dqsez < 0) ? 1 : 0;
|
||||
|
||||
/* prediction difference magnitude */
|
||||
mag = (short) (dq & 0x7FFF);
|
||||
/* TRANS */
|
||||
ylint = (short) (s->yl >> 15); /* exponent part of yl */
|
||||
ylfrac = (short) ((s->yl >> 10) & 0x1F); /* fractional part of yl */
|
||||
/* Limit threshold to 31 << 10 */
|
||||
thr = (ylint > 9) ? (31 << 10) : ((32 + ylfrac) << ylint);
|
||||
dqthr = (thr + (thr >> 1)) >> 1; /* dqthr = 0.75 * thr */
|
||||
if (!s->td) /* signal supposed voice */
|
||||
tr = 0;
|
||||
else if (mag <= dqthr) /* supposed data, but small mag */
|
||||
tr = 0; /* treated as voice */
|
||||
else /* signal is data (modem) */
|
||||
tr = 1;
|
||||
|
||||
/*
|
||||
* Quantizer scale factor adaptation.
|
||||
*/
|
||||
|
||||
/* FUNCTW & FILTD & DELAY */
|
||||
/* update non-steady state step size multiplier */
|
||||
s->yu = (short) (y + ((wi - y) >> 5));
|
||||
|
||||
/* LIMB */
|
||||
if (s->yu < 544)
|
||||
s->yu = 544;
|
||||
else if (s->yu > 5120)
|
||||
s->yu = 5120;
|
||||
|
||||
/* FILTE & DELAY */
|
||||
/* update steady state step size multiplier */
|
||||
s->yl += s->yu + ((-s->yl) >> 6);
|
||||
|
||||
/*
|
||||
* Adaptive predictor coefficients.
|
||||
*/
|
||||
if (tr)
|
||||
{
|
||||
/* Reset the a's and b's for a modem signal */
|
||||
s->a[0] = 0;
|
||||
s->a[1] = 0;
|
||||
s->b[0] = 0;
|
||||
s->b[1] = 0;
|
||||
s->b[2] = 0;
|
||||
s->b[3] = 0;
|
||||
s->b[4] = 0;
|
||||
s->b[5] = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Update the a's and b's */
|
||||
/* UPA2 */
|
||||
pks1 = pk0 ^ s->pk[0];
|
||||
|
||||
/* Update predictor pole a[1] */
|
||||
a2p = s->a[1] - (s->a[1] >> 7);
|
||||
if (dqsez != 0)
|
||||
{
|
||||
fa1 = (pks1) ? s->a[0] : -s->a[0];
|
||||
/* a2p = function of fa1 */
|
||||
if (fa1 < -8191)
|
||||
a2p -= 0x100;
|
||||
else if (fa1 > 8191)
|
||||
a2p += 0xFF;
|
||||
else
|
||||
a2p += fa1 >> 5;
|
||||
|
||||
if (pk0 ^ s->pk[1])
|
||||
{
|
||||
/* LIMC */
|
||||
if (a2p <= -12160)
|
||||
a2p = -12288;
|
||||
else if (a2p >= 12416)
|
||||
a2p = 12288;
|
||||
else
|
||||
a2p -= 0x80;
|
||||
}
|
||||
else if (a2p <= -12416)
|
||||
a2p = -12288;
|
||||
else if (a2p >= 12160)
|
||||
a2p = 12288;
|
||||
else
|
||||
a2p += 0x80;
|
||||
}
|
||||
|
||||
/* TRIGB & DELAY */
|
||||
s->a[1] = a2p;
|
||||
|
||||
/* UPA1 */
|
||||
/* Update predictor pole a[0] */
|
||||
s->a[0] -= s->a[0] >> 8;
|
||||
if (dqsez != 0)
|
||||
{
|
||||
if (pks1 == 0)
|
||||
s->a[0] += 192;
|
||||
else
|
||||
s->a[0] -= 192;
|
||||
}
|
||||
/* LIMD */
|
||||
a1ul = 15360 - a2p;
|
||||
if (s->a[0] < -a1ul)
|
||||
s->a[0] = -a1ul;
|
||||
else if (s->a[0] > a1ul)
|
||||
s->a[0] = a1ul;
|
||||
|
||||
/* UPB : update predictor zeros b[6] */
|
||||
for (i = 0; i < 6; i++)
|
||||
{
|
||||
/* Distinguish 40Kbps mode from the others */
|
||||
s->b[i] -= s->b[i] >> ((s->bits_per_sample == 5) ? 9 : 8);
|
||||
if (dq & 0x7FFF)
|
||||
{
|
||||
/* XOR */
|
||||
if ((dq ^ s->dq[i]) >= 0)
|
||||
s->b[i] += 128;
|
||||
else
|
||||
s->b[i] -= 128;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 5; i > 0; i--)
|
||||
s->dq[i] = s->dq[i - 1];
|
||||
/* FLOAT A : convert dq[0] to 4-bit exp, 6-bit mantissa f.p. */
|
||||
if (mag == 0)
|
||||
{
|
||||
s->dq[0] = (dq >= 0) ? 0x20 : 0xFC20;
|
||||
}
|
||||
else
|
||||
{
|
||||
exp = (short) (top_bit(mag) + 1);
|
||||
s->dq[0] = (dq >= 0)
|
||||
? ((exp << 6) + ((mag << 6) >> exp))
|
||||
: ((exp << 6) + ((mag << 6) >> exp) - 0x400);
|
||||
}
|
||||
|
||||
s->sr[1] = s->sr[0];
|
||||
/* FLOAT B : convert sr to 4-bit exp., 6-bit mantissa f.p. */
|
||||
if (sr == 0)
|
||||
{
|
||||
s->sr[0] = 0x20;
|
||||
}
|
||||
else if (sr > 0)
|
||||
{
|
||||
exp = (short) (top_bit(sr) + 1);
|
||||
s->sr[0] = (short) ((exp << 6) + ((sr << 6) >> exp));
|
||||
}
|
||||
else if (sr > -32768)
|
||||
{
|
||||
mag = (short) -sr;
|
||||
exp = (short) (top_bit(mag) + 1);
|
||||
s->sr[0] = (exp << 6) + ((mag << 6) >> exp) - 0x400;
|
||||
}
|
||||
else
|
||||
{
|
||||
s->sr[0] = (short) 0xFC20;
|
||||
}
|
||||
|
||||
/* DELAY A */
|
||||
s->pk[1] = s->pk[0];
|
||||
s->pk[0] = pk0;
|
||||
|
||||
/* TONE */
|
||||
if (tr) /* this sample has been treated as data */
|
||||
s->td = 0; /* next one will be treated as voice */
|
||||
else if (a2p < -11776) /* small sample-to-sample correlation */
|
||||
s->td = 1; /* signal may be data */
|
||||
else /* signal is voice */
|
||||
s->td = 0;
|
||||
|
||||
/* Adaptation speed control. */
|
||||
/* FILTA */
|
||||
s->dms += ((short) fi - s->dms) >> 5;
|
||||
/* FILTB */
|
||||
s->dml += (((short) (fi << 2) - s->dml) >> 7);
|
||||
|
||||
if (tr)
|
||||
s->ap = 256;
|
||||
else if (y < 1536) /* SUBTC */
|
||||
s->ap += (0x200 - s->ap) >> 4;
|
||||
else if (s->td)
|
||||
s->ap += (0x200 - s->ap) >> 4;
|
||||
else if (abs((s->dms << 2) - s->dml) >= (s->dml >> 3))
|
||||
s->ap += (0x200 - s->ap) >> 4;
|
||||
else
|
||||
s->ap += (-s->ap) >> 4;
|
||||
}
|
||||
|
||||
/*
|
||||
* Decodes a 2-bit CCITT G.726_16 ADPCM code and returns
|
||||
* the resulting 16-bit linear PCM, A-law or u-law sample value.
|
||||
*/
|
||||
static short g726_16_decoder(g726_state_t *s, unsigned char code)
|
||||
{
|
||||
short sezi;
|
||||
short sei;
|
||||
short se;
|
||||
short sr;
|
||||
short dq;
|
||||
short dqsez;
|
||||
int y;
|
||||
|
||||
/* Mask to get proper bits */
|
||||
code &= 0x03;
|
||||
sezi = predictor_zero(s);
|
||||
sei = sezi + predictor_pole(s);
|
||||
|
||||
y = step_size(s);
|
||||
dq = reconstruct(code & 2, g726_16_dqlntab[code], y);
|
||||
|
||||
/* Reconstruct the signal */
|
||||
se = sei >> 1;
|
||||
sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq);
|
||||
|
||||
/* Pole prediction difference */
|
||||
dqsez = sr + (sezi >> 1) - se;
|
||||
|
||||
update(s, y, g726_16_witab[code], g726_16_fitab[code], dq, sr, dqsez);
|
||||
|
||||
return (sr << 2);
|
||||
}
|
||||
/*- End of function --------------------------------------------------------*/
|
||||
|
||||
|
||||
/*
|
||||
* Encodes a linear PCM, A-law or u-law input sample and returns its 3-bit code.
|
||||
*/
|
||||
static unsigned char g726_16_encoder(g726_state_t *s, short amp)
|
||||
{
|
||||
int y;
|
||||
short sei;
|
||||
short sezi;
|
||||
short se;
|
||||
short d;
|
||||
short sr;
|
||||
short dqsez;
|
||||
short dq;
|
||||
short i;
|
||||
|
||||
sezi = predictor_zero(s);
|
||||
sei = sezi + predictor_pole(s);
|
||||
se = sei >> 1;
|
||||
d = amp - se;
|
||||
|
||||
/* Quantize prediction difference */
|
||||
y = step_size(s);
|
||||
i = quantize(d, y, qtab_726_16, 4);
|
||||
dq = reconstruct(i & 2, g726_16_dqlntab[i], y);
|
||||
|
||||
/* Reconstruct the signal */
|
||||
sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq);
|
||||
|
||||
/* Pole prediction difference */
|
||||
dqsez = sr + (sezi >> 1) - se;
|
||||
|
||||
update(s, y, g726_16_witab[i], g726_16_fitab[i], dq, sr, dqsez);
|
||||
return (unsigned char) i;
|
||||
}
|
||||
|
||||
/*
|
||||
* Decodes a 3-bit CCITT G.726_24 ADPCM code and returns
|
||||
* the resulting 16-bit linear PCM, A-law or u-law sample value.
|
||||
*/
|
||||
static short g726_24_decoder(g726_state_t *s, unsigned char code)
|
||||
{
|
||||
short sezi;
|
||||
short sei;
|
||||
short se;
|
||||
short sr;
|
||||
short dq;
|
||||
short dqsez;
|
||||
int y;
|
||||
|
||||
/* Mask to get proper bits */
|
||||
code &= 0x07;
|
||||
sezi = predictor_zero(s);
|
||||
sei = sezi + predictor_pole(s);
|
||||
|
||||
y = step_size(s);
|
||||
dq = reconstruct(code & 4, g726_24_dqlntab[code], y);
|
||||
|
||||
/* Reconstruct the signal */
|
||||
se = sei >> 1;
|
||||
sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq);
|
||||
|
||||
/* Pole prediction difference */
|
||||
dqsez = sr + (sezi >> 1) - se;
|
||||
|
||||
update(s, y, g726_24_witab[code], g726_24_fitab[code], dq, sr, dqsez);
|
||||
|
||||
return (sr << 2);
|
||||
}
|
||||
/*- End of function --------------------------------------------------------*/
|
||||
|
||||
|
||||
/*
|
||||
* Encodes a linear PCM, A-law or u-law input sample and returns its 3-bit code.
|
||||
*/
|
||||
static unsigned char g726_24_encoder(g726_state_t *s, short amp)
|
||||
{
|
||||
short sei;
|
||||
short sezi;
|
||||
short se;
|
||||
short d;
|
||||
short sr;
|
||||
short dqsez;
|
||||
short dq;
|
||||
short i;
|
||||
int y;
|
||||
|
||||
sezi = predictor_zero(s);
|
||||
sei = sezi + predictor_pole(s);
|
||||
se = sei >> 1;
|
||||
d = amp - se;
|
||||
|
||||
/* Quantize prediction difference */
|
||||
y = step_size(s);
|
||||
i = quantize(d, y, qtab_726_24, 7);
|
||||
dq = reconstruct(i & 4, g726_24_dqlntab[i], y);
|
||||
|
||||
/* Reconstruct the signal */
|
||||
sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq);
|
||||
|
||||
/* Pole prediction difference */
|
||||
dqsez = sr + (sezi >> 1) - se;
|
||||
|
||||
update(s, y, g726_24_witab[i], g726_24_fitab[i], dq, sr, dqsez);
|
||||
return (unsigned char) i;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Decodes a 4-bit CCITT G.726_32 ADPCM code and returns
|
||||
* the resulting 16-bit linear PCM, A-law or u-law sample value.
|
||||
*/
|
||||
static short g726_32_decoder(g726_state_t *s, unsigned char code)
|
||||
{
|
||||
short sezi;
|
||||
short sei;
|
||||
short se;
|
||||
short sr;
|
||||
short dq;
|
||||
short dqsez;
|
||||
int y;
|
||||
|
||||
/* Mask to get proper bits */
|
||||
code &= 0x0F;
|
||||
sezi = predictor_zero(s);
|
||||
sei = sezi + predictor_pole(s);
|
||||
|
||||
y = step_size(s);
|
||||
dq = reconstruct(code & 8, g726_32_dqlntab[code], y);
|
||||
|
||||
/* Reconstruct the signal */
|
||||
se = sei >> 1;
|
||||
sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq);
|
||||
|
||||
/* Pole prediction difference */
|
||||
dqsez = sr + (sezi >> 1) - se;
|
||||
|
||||
update(s, y, g726_32_witab[code], g726_32_fitab[code], dq, sr, dqsez);
|
||||
|
||||
return (sr << 2);
|
||||
}
|
||||
/*- End of function --------------------------------------------------------*/
|
||||
|
||||
/*
|
||||
* Encodes a linear input sample and returns its 4-bit code.
|
||||
*/
|
||||
static unsigned char g726_32_encoder(g726_state_t *s, short amp)
|
||||
{
|
||||
short sei;
|
||||
short sezi;
|
||||
short se;
|
||||
short d;
|
||||
short sr;
|
||||
short dqsez;
|
||||
short dq;
|
||||
short i;
|
||||
int y;
|
||||
|
||||
sezi = predictor_zero(s);
|
||||
sei = sezi + predictor_pole(s);
|
||||
se = sei >> 1;
|
||||
d = amp - se;
|
||||
|
||||
/* Quantize the prediction difference */
|
||||
y = step_size(s);
|
||||
i = quantize(d, y, qtab_726_32, 15);
|
||||
dq = reconstruct(i & 8, g726_32_dqlntab[i], y);
|
||||
|
||||
/* Reconstruct the signal */
|
||||
sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq);
|
||||
|
||||
/* Pole prediction difference */
|
||||
dqsez = sr + (sezi >> 1) - se;
|
||||
|
||||
update(s, y, g726_32_witab[i], g726_32_fitab[i], dq, sr, dqsez);
|
||||
return (unsigned char) i;
|
||||
}
|
||||
|
||||
/*
|
||||
* Decodes a 5-bit CCITT G.726 40Kbps code and returns
|
||||
* the resulting 16-bit linear PCM, A-law or u-law sample value.
|
||||
*/
|
||||
static short g726_40_decoder(g726_state_t *s, unsigned char code)
|
||||
{
|
||||
short sezi;
|
||||
short sei;
|
||||
short se;
|
||||
short sr;
|
||||
short dq;
|
||||
short dqsez;
|
||||
int y;
|
||||
|
||||
/* Mask to get proper bits */
|
||||
code &= 0x1F;
|
||||
sezi = predictor_zero(s);
|
||||
sei = sezi + predictor_pole(s);
|
||||
|
||||
y = step_size(s);
|
||||
dq = reconstruct(code & 0x10, g726_40_dqlntab[code], y);
|
||||
|
||||
/* Reconstruct the signal */
|
||||
se = sei >> 1;
|
||||
sr = (dq < 0) ? (se - (dq & 0x7FFF)) : (se + dq);
|
||||
|
||||
/* Pole prediction difference */
|
||||
dqsez = sr + (sezi >> 1) - se;
|
||||
|
||||
update(s, y, g726_40_witab[code], g726_40_fitab[code], dq, sr, dqsez);
|
||||
|
||||
return (sr << 2);
|
||||
}
|
||||
/*- End of function --------------------------------------------------------*/
|
||||
|
||||
|
||||
/*
|
||||
* Encodes a 16-bit linear PCM, A-law or u-law input sample and retuens
|
||||
* the resulting 5-bit CCITT G.726 40Kbps code.
|
||||
*/
|
||||
static unsigned char g726_40_encoder(g726_state_t *s, short amp)
|
||||
{
|
||||
short sei;
|
||||
short sezi;
|
||||
short se;
|
||||
short d;
|
||||
short sr;
|
||||
short dqsez;
|
||||
short dq;
|
||||
short i;
|
||||
int y;
|
||||
|
||||
sezi = predictor_zero(s);
|
||||
sei = sezi + predictor_pole(s);
|
||||
se = sei >> 1;
|
||||
d = amp - se;
|
||||
|
||||
/* Quantize prediction difference */
|
||||
y = step_size(s);
|
||||
i = quantize(d, y, qtab_726_40, 31);
|
||||
dq = reconstruct(i & 0x10, g726_40_dqlntab[i], y);
|
||||
|
||||
/* Reconstruct the signal */
|
||||
sr = (dq < 0) ? (se - (dq & 0x7FFF)) : (se + dq);
|
||||
|
||||
/* Pole prediction difference */
|
||||
dqsez = sr + (sezi >> 1) - se;
|
||||
|
||||
update(s, y, g726_40_witab[i], g726_40_fitab[i], dq, sr, dqsez);
|
||||
return (unsigned char) i;
|
||||
}
|
||||
|
||||
g726_state_t *g726_init(g726_state_t *s, int bit_rate)
|
||||
{
|
||||
int i;
|
||||
|
||||
if (bit_rate != 16000 && bit_rate != 24000 && bit_rate != 32000 && bit_rate != 40000)
|
||||
return NULL;
|
||||
|
||||
s->yl = 34816;
|
||||
s->yu = 544;
|
||||
s->dms = 0;
|
||||
s->dml = 0;
|
||||
s->ap = 0;
|
||||
s->rate = bit_rate;
|
||||
|
||||
for (i = 0; i < 2; i++)
|
||||
{
|
||||
s->a[i] = 0;
|
||||
s->pk[i] = 0;
|
||||
s->sr[i] = 32;
|
||||
}
|
||||
for (i = 0; i < 6; i++)
|
||||
{
|
||||
s->b[i] = 0;
|
||||
s->dq[i] = 32;
|
||||
}
|
||||
s->td = 0;
|
||||
switch (bit_rate)
|
||||
{
|
||||
case 16000:
|
||||
s->enc_func = g726_16_encoder;
|
||||
s->dec_func = g726_16_decoder;
|
||||
s->bits_per_sample = 2;
|
||||
break;
|
||||
case 24000:
|
||||
s->enc_func = g726_24_encoder;
|
||||
s->dec_func = g726_24_decoder;
|
||||
s->bits_per_sample = 3;
|
||||
break;
|
||||
case 32000:
|
||||
default:
|
||||
s->enc_func = g726_32_encoder;
|
||||
s->dec_func = g726_32_decoder;
|
||||
s->bits_per_sample = 4;
|
||||
break;
|
||||
case 40000:
|
||||
s->enc_func = g726_40_encoder;
|
||||
s->dec_func = g726_40_decoder;
|
||||
s->bits_per_sample = 5;
|
||||
break;
|
||||
}
|
||||
bitstream_init(&s->bs);
|
||||
return s;
|
||||
}
|
||||
|
||||
int g726_decode(g726_state_t *s,
|
||||
short amp[],
|
||||
const unsigned char g726_data[],
|
||||
int g726_bytes)
|
||||
{
|
||||
int i;
|
||||
int samples;
|
||||
unsigned char code;
|
||||
int sl;
|
||||
|
||||
for (samples = i = 0; ; )
|
||||
{
|
||||
if (s->bs.residue < s->bits_per_sample)
|
||||
{
|
||||
if (i >= g726_bytes)
|
||||
break;
|
||||
s->bs.bitstream = (s->bs.bitstream << 8) | g726_data[g726_bytes-i-1]; i++;
|
||||
// s->bs.bitstream = (s->bs.bitstream << 8) | g726_data[i++];
|
||||
s->bs.residue += 8;
|
||||
}
|
||||
code = (unsigned char) ((s->bs.bitstream >> (s->bs.residue - s->bits_per_sample)) & ((1 << s->bits_per_sample) - 1));
|
||||
|
||||
s->bs.residue -= s->bits_per_sample;
|
||||
|
||||
sl = s->dec_func(s, code);
|
||||
|
||||
amp[samples++] = (short) sl;
|
||||
}
|
||||
return samples;
|
||||
}
|
||||
|
||||
|
||||
int g726_encode(g726_state_t *s,
|
||||
unsigned char g726_data[],
|
||||
const short amp[],
|
||||
int len)
|
||||
{
|
||||
int i;
|
||||
int g726_bytes;
|
||||
short sl;
|
||||
unsigned char code;
|
||||
|
||||
for (g726_bytes = i = 0; i < len; i++)
|
||||
{
|
||||
sl = amp[i] >> 2;
|
||||
|
||||
code = s->enc_func(s, sl);
|
||||
|
||||
s->bs.bitstream = (s->bs.bitstream << s->bits_per_sample) | code;
|
||||
s->bs.residue += s->bits_per_sample;
|
||||
if (s->bs.residue >= 8)
|
||||
{
|
||||
g726_data[g726_bytes++] = (unsigned char) ((s->bs.bitstream >> (s->bs.residue - 8)) & 0xFF);
|
||||
s->bs.residue -= 8;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
int j = 0, k = g726_bytes - 1;
|
||||
unsigned char temp = 0;
|
||||
while (j < k) {
|
||||
temp = g726_data[j]; g726_data[j] = g726_data[k]; g726_data[k] = temp;
|
||||
j++; k--;
|
||||
}
|
||||
|
||||
return g726_bytes;
|
||||
}
|
||||
|
||||
85
OrderScheduling/Video/YFProgressHUD/YFProgressHUD.h
Normal file
@@ -0,0 +1,85 @@
|
||||
//
|
||||
// YFProgressHUD.h
|
||||
// LoadingViewAnimation
|
||||
//made in zhongdao Copyright © 2017年 tracy wang. All rights reserved.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface YFProgressHUD : UIView
|
||||
|
||||
#pragma mark ====== 添加在window上 =======
|
||||
|
||||
+(YFProgressHUD *) showToastTitle:(NSString *)titleString;
|
||||
|
||||
/**
|
||||
显示title
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param block 加载完成后的操作
|
||||
*/
|
||||
+(YFProgressHUD *) showToastTitle:(NSString *)titleString completionBlock:(void(^)(void))block;
|
||||
|
||||
/**
|
||||
显示UIActivityIndicatorView 和 title
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString;
|
||||
|
||||
/**
|
||||
带有下落动画HUD
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param arr 动画的图片
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString imagesArr:(NSArray *)arr;
|
||||
|
||||
|
||||
/**
|
||||
gif动画HUD
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param gifName gif动画的图片
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString gifImg:(NSString *)gifName;
|
||||
|
||||
/**
|
||||
移除HUD
|
||||
*/
|
||||
+(void) hiddenProgressHUD;
|
||||
|
||||
#pragma mark ====== 添加在view上 =======
|
||||
/**
|
||||
显示UIActivityIndicatorView 和 title
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString;
|
||||
|
||||
/**
|
||||
带有下落动画HUD
|
||||
|
||||
@param view 需要展示HUD的view
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param arr 动画的图片
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString imagesArr:(NSArray *)arr;
|
||||
|
||||
/**
|
||||
gif动画HUD
|
||||
@param view 需要展示HUD的view
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param gifName gif动画的图片
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDinView:(UIView *)view withTitle:(NSString *)titleString gifImg:(NSString *)gifName;
|
||||
/**
|
||||
移除HUD
|
||||
*/
|
||||
+(void) hiddenProgressHUDforView:(UIView *)view;
|
||||
|
||||
/**
|
||||
设置文字
|
||||
*/
|
||||
+ (void)reSetTitleString:(NSString *)titleString forView:(UIView *)view;
|
||||
@end
|
||||
564
OrderScheduling/Video/YFProgressHUD/YFProgressHUD.m
Normal file
@@ -0,0 +1,564 @@
|
||||
//
|
||||
// YFProgressHUD.m
|
||||
// LoadingViewAnimation
|
||||
//made in zhongdao Copyright © 2017年 tracy wang. All rights reserved.
|
||||
//
|
||||
|
||||
#import "YFProgressHUD.h"
|
||||
|
||||
#import "YLImageView.h"
|
||||
#import "YLGIFImage.h"
|
||||
|
||||
|
||||
|
||||
#define ANIMATION_DURATION_SECS 0.5f
|
||||
|
||||
#define KW 120
|
||||
#define KH 120
|
||||
|
||||
typedef NS_ENUM(NSUInteger, YFProgressHUDType) {
|
||||
YFProgressHUDTypeGif, // gifHUD
|
||||
YFProgressHUDTypeRotAni,// 旋转下落HUD
|
||||
YFProgressHUDTypeNormal,// 普通的HUD
|
||||
YFProgressHUDTypeToast,// Toast
|
||||
};
|
||||
|
||||
@interface YFProgressHUD ()
|
||||
// 动画处理的定时器
|
||||
@property (nonatomic, strong) NSTimer *timer;
|
||||
// 动画的view
|
||||
@property(nonatomic,strong)UIImageView * shapView;
|
||||
// 阴影view
|
||||
@property(nonatomic,strong)UIImageView * shadowView;
|
||||
// gif
|
||||
@property (nonatomic, copy) NSString *gifName;
|
||||
// 加载的文字
|
||||
@property (nonatomic, copy) NSString *titleString;
|
||||
// 加载的文字的label
|
||||
@property (nonatomic, strong) UILabel *titleLabel;
|
||||
// 加载的图片数组
|
||||
@property(nonatomic,strong)NSArray *imagesArr;
|
||||
// 切换不同的图片
|
||||
@property (nonatomic, assign) int stepNumber;
|
||||
// 是否正在动画中
|
||||
@property (nonatomic, assign) BOOL isAnimating;
|
||||
// 记录下降动画开始的位置
|
||||
@property(nonatomic,assign)float fromValue;
|
||||
// 记录下降动画结束的位置
|
||||
@property(nonatomic,assign)float toValue;
|
||||
// 记录阴影缩放开始的值
|
||||
@property(nonatomic,assign)float scalefromValue;
|
||||
// 记录阴影缩放结束的值
|
||||
@property(nonatomic,assign)float scaletoValue;
|
||||
// HUD动画类型
|
||||
@property(nonatomic,assign)YFProgressHUDType hudType;
|
||||
@property(nonatomic,strong)UIWindow *window;
|
||||
@end
|
||||
|
||||
@implementation YFProgressHUD
|
||||
|
||||
+(UIWindow *)getWindow{
|
||||
|
||||
static __weak UIWindow *cachedKeyWindow = nil;
|
||||
/* (Bug ID: #23, #25, #73) */
|
||||
UIWindow *originalKeyWindow = nil;
|
||||
|
||||
#if __IPHONE_OS_VERSION_MAX_ALLOWED >= 130000
|
||||
if (@available(iOS 13.0, *)) {
|
||||
NSSet<UIScene *> *connectedScenes = [UIApplication sharedApplication].connectedScenes;
|
||||
for (UIScene *scene in connectedScenes) {
|
||||
if (scene.activationState == UISceneActivationStateForegroundActive && [scene isKindOfClass:[UIWindowScene class]]) {
|
||||
UIWindowScene *windowScene = (UIWindowScene *)scene;
|
||||
for (UIWindow *window in windowScene.windows) {
|
||||
if (window.isKeyWindow) {
|
||||
originalKeyWindow = window;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
#if __IPHONE_OS_VERSION_MIN_REQUIRED < 130000
|
||||
originalKeyWindow = [UIApplication sharedApplication].keyWindow;
|
||||
#endif
|
||||
}
|
||||
|
||||
//If original key window is not nil and the cached keywindow is also not original keywindow then changing keywindow.
|
||||
if (originalKeyWindow)
|
||||
{
|
||||
cachedKeyWindow = originalKeyWindow;
|
||||
}
|
||||
|
||||
return cachedKeyWindow;
|
||||
}
|
||||
|
||||
|
||||
#pragma mark ====== 添加在window上 =======
|
||||
+(YFProgressHUD *) showToastTitle:(NSString *)titleString{
|
||||
|
||||
__block YFProgressHUD *hud;
|
||||
__block UIWindow *window;
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
window = [YFProgressHUD getWindow];
|
||||
// [YFProgressHUD hiddenProgressHUDforView:window];
|
||||
|
||||
hud = [[YFProgressHUD alloc] initWithFrame:window.bounds];
|
||||
hud.hudType = YFProgressHUDTypeToast;
|
||||
hud.titleString = titleString;
|
||||
[hud setupView];
|
||||
[window addSubview:hud];
|
||||
});
|
||||
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||
[hud removeFromSuperview];
|
||||
[YFProgressHUD hiddenProgressHUDforView:window];
|
||||
});
|
||||
|
||||
return hud;
|
||||
}
|
||||
|
||||
+(YFProgressHUD *) showToastTitle:(NSString *)titleString completionBlock:(void(^)(void))block
|
||||
{
|
||||
__block YFProgressHUD *hud;
|
||||
__block UIWindow *window;
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
window = [YFProgressHUD getWindow];
|
||||
// [YFProgressHUD hiddenProgressHUDforView:window];
|
||||
|
||||
hud = [[YFProgressHUD alloc] initWithFrame:window.bounds];
|
||||
hud.hudType = YFProgressHUDTypeToast;
|
||||
hud.titleString = titleString;
|
||||
[hud setupView];
|
||||
[window addSubview:hud];
|
||||
});
|
||||
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||
[hud removeFromSuperview];
|
||||
[YFProgressHUD hiddenProgressHUDforView:window];
|
||||
if (block) {
|
||||
block();
|
||||
}
|
||||
});
|
||||
|
||||
return hud;
|
||||
}
|
||||
/**
|
||||
显示UIActivityIndicatorView 和 title
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString{
|
||||
UIWindow *window = [YFProgressHUD getWindow];
|
||||
return [YFProgressHUD showProgressHUDinView:window title:titleString];
|
||||
}
|
||||
|
||||
/**
|
||||
带有下落动画HUD
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param arr 动画的图片
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString imagesArr:(NSArray *)arr{
|
||||
|
||||
UIWindow *window = [YFProgressHUD getWindow];
|
||||
return [YFProgressHUD showProgressHUDinView:window title:titleString imagesArr:arr];
|
||||
}
|
||||
|
||||
/**
|
||||
gif动画HUD
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param gifName gif动画的图片
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDWithTitle:(NSString *)titleString gifImg:(NSString *)gifName{
|
||||
UIWindow *window = [YFProgressHUD getWindow];
|
||||
return [YFProgressHUD showProgressHUDinView:window withTitle:titleString gifImg:gifName];
|
||||
}
|
||||
|
||||
/**
|
||||
移除HUD
|
||||
*/
|
||||
+(void) hiddenProgressHUD{
|
||||
UIWindow *window = [YFProgressHUD getWindow];
|
||||
[YFProgressHUD hiddenProgressHUDforView:window];
|
||||
}
|
||||
|
||||
#pragma mark ====== 添加在View上 =======
|
||||
/**
|
||||
显示UIActivityIndicatorView 和 title
|
||||
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString{
|
||||
|
||||
[YFProgressHUD hiddenProgressHUDforView:view];
|
||||
YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds];
|
||||
hud.hudType = YFProgressHUDTypeNormal;
|
||||
hud.titleString = titleString;
|
||||
[hud setupView];
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[view addSubview:hud];
|
||||
});
|
||||
[hud startAnimating];
|
||||
|
||||
return hud;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
带有下落动画HUD
|
||||
|
||||
@param view 需要展示HUD的view
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param arr 动画的图片
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDinView:(UIView *)view title:(NSString *)titleString imagesArr:(NSArray *)arr{
|
||||
|
||||
[YFProgressHUD hiddenProgressHUDforView:view];
|
||||
|
||||
YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds];
|
||||
hud.hudType = YFProgressHUDTypeRotAni;
|
||||
hud.titleString = titleString;
|
||||
hud.imagesArr = arr;
|
||||
[hud setupView];
|
||||
[view addSubview:hud];
|
||||
[hud startAnimating];
|
||||
return hud;
|
||||
}
|
||||
|
||||
/**
|
||||
gif动画HUD
|
||||
@param view 需要展示HUD的view
|
||||
@param titleString 加载时展示的文字(可选)
|
||||
@param gifName gif动画的图片
|
||||
*/
|
||||
+(YFProgressHUD *) showProgressHUDinView:(UIView *)view withTitle:(NSString *)titleString gifImg:(NSString *)gifName{
|
||||
|
||||
[YFProgressHUD hiddenProgressHUDforView:view];
|
||||
|
||||
YFProgressHUD *hud = [[YFProgressHUD alloc] initWithFrame:view.bounds];
|
||||
hud.hudType = YFProgressHUDTypeGif;
|
||||
hud.titleString = titleString;
|
||||
hud.gifName = gifName;
|
||||
[hud setupView];
|
||||
[view addSubview:hud];
|
||||
[hud startAnimating];
|
||||
return hud;
|
||||
}
|
||||
|
||||
/**
|
||||
移除HUD
|
||||
*/
|
||||
+(void) hiddenProgressHUDforView:(UIView *)view{
|
||||
if (!view && [view isMemberOfClass:[UIWindow class]]) {
|
||||
view = [YFProgressHUD getWindow];
|
||||
}
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
for (UIView *subView in view.subviews) {
|
||||
if ([subView isKindOfClass:[YFProgressHUD class]]) {
|
||||
[subView removeFromSuperview];
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
+ (void)reSetTitleString:(NSString *)titleString forView:(UIView *)view;
|
||||
{
|
||||
if (!view && [view isMemberOfClass:[UIWindow class]]) {
|
||||
view = [YFProgressHUD getWindow];
|
||||
}
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
for (UIView *subView in view.subviews) {
|
||||
if ([subView isKindOfClass:[YFProgressHUD class]]) {
|
||||
YFProgressHUD *hud = (YFProgressHUD *)subView;
|
||||
hud.titleString = titleString;
|
||||
hud.titleLabel.text = titleString;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark ====== 初始化=======
|
||||
-(void)setupView
|
||||
{
|
||||
|
||||
self.userInteractionEnabled = YES;
|
||||
|
||||
switch (self.hudType) {
|
||||
case YFProgressHUDTypeToast:
|
||||
[self setUpToast];
|
||||
break;
|
||||
case YFProgressHUDTypeNormal:
|
||||
[self setupNormal];
|
||||
break;
|
||||
case YFProgressHUDTypeGif:
|
||||
[self setupGif];
|
||||
break;
|
||||
case YFProgressHUDTypeRotAni:
|
||||
[self setupRotAni];
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
-(void)setUpToast{
|
||||
|
||||
CGFloat strW = [self.titleString boundingRectWithSize:CGSizeMake(10000, 30) options:NSStringDrawingUsesLineFragmentOrigin attributes:@{NSFontAttributeName:[UIFont systemFontOfSize:14]} context:nil].size.width;
|
||||
|
||||
// CGFloat width = [UIScreen mainScreen].bounds.size.width;
|
||||
// CGFloat scale = width/375;
|
||||
// CGFloat w = 80 * scale;
|
||||
//
|
||||
// UIView *centerView = [UIView new];
|
||||
// centerView.bounds = CGRectMake(0, 0, w, w);
|
||||
// centerView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 20);
|
||||
// centerView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.3];
|
||||
// centerView.layer.cornerRadius = 4;
|
||||
// centerView.layer.masksToBounds = YES;
|
||||
//
|
||||
|
||||
UILabel *label=[[UILabel alloc] init];
|
||||
if (strW + 40 > [UIScreen mainScreen].bounds.size.width - 60) {
|
||||
CGFloat strH = [self.titleString boundingRectWithSize:CGSizeMake([UIScreen mainScreen].bounds.size.width - 60, 10000) options:NSStringDrawingUsesLineFragmentOrigin attributes:@{NSFontAttributeName:[UIFont systemFontOfSize:14]} context:nil].size.height;
|
||||
label.frame=CGRectMake(0, 0 , [UIScreen mainScreen].bounds.size.width - 60 , strH + 20);
|
||||
}else{
|
||||
label.frame=CGRectMake(0, 0 , strW + 20 , 40);
|
||||
}
|
||||
label.backgroundColor = [UIColor colorWithWhite:0 alpha:1.0];
|
||||
label.textColor = [UIColor whiteColor];
|
||||
label.textAlignment=NSTextAlignmentCenter;
|
||||
label.numberOfLines = 0;
|
||||
label.layer.cornerRadius = 4.0;
|
||||
label.layer.masksToBounds = YES;
|
||||
label.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 40);
|
||||
label.text=_titleString;
|
||||
label.font=[UIFont systemFontOfSize:14.0f];
|
||||
[self addSubview:label];
|
||||
self.titleLabel = label;
|
||||
|
||||
}
|
||||
|
||||
-(void)setupNormal{
|
||||
|
||||
CGFloat width = [UIScreen mainScreen].bounds.size.width;
|
||||
CGFloat scale = width/375;
|
||||
CGFloat w = 80 * scale;
|
||||
UIView *centerView = [UIView new];
|
||||
centerView.bounds = CGRectMake(0, 0, w, w);
|
||||
centerView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 20);
|
||||
centerView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.3];
|
||||
centerView.layer.cornerRadius = 4;
|
||||
centerView.layer.masksToBounds = YES;
|
||||
|
||||
UIActivityIndicatorView *indicatorView = [[UIActivityIndicatorView alloc]init];
|
||||
indicatorView.frame = CGRectMake(0, 0, w, w);
|
||||
indicatorView.center = CGPointMake(w/2, w/2);
|
||||
indicatorView.color = [UIColor colorWithWhite:1.0 alpha:0.8];
|
||||
indicatorView.transform = CGAffineTransformMakeScale(1.6 * scale, 1.6 * scale);
|
||||
indicatorView.hidesWhenStopped = NO;
|
||||
[centerView addSubview:indicatorView];
|
||||
[indicatorView startAnimating];
|
||||
[self addSubview:centerView];
|
||||
|
||||
if (_titleString.length != 0) {
|
||||
UILabel *label=[[UILabel alloc] init];
|
||||
label.frame=CGRectMake(0, 0 , KW , 35);
|
||||
label.textColor=[UIColor grayColor];
|
||||
label.numberOfLines = 2;
|
||||
label.textAlignment=NSTextAlignmentCenter;
|
||||
label.center=CGPointMake(self.frame.size.width/2, self.frame.size.height/2 + w/2);
|
||||
label.text=_titleString;
|
||||
label.font=[UIFont boldSystemFontOfSize:14.0f];
|
||||
[self addSubview:label];
|
||||
self.titleLabel = label;
|
||||
}
|
||||
}
|
||||
|
||||
-(void)setupRotAni{
|
||||
|
||||
_shapView=[[UIImageView alloc] init];
|
||||
_shapView.frame = CGRectMake(KW/2-31/2, 0, 31, 31);
|
||||
_shapView.image = [UIImage imageNamed:self.imagesArr[0]];
|
||||
_shapView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2-100);
|
||||
_shapView.contentMode = UIViewContentModeScaleAspectFit;
|
||||
[self addSubview:_shapView];
|
||||
|
||||
//阴影
|
||||
_shadowView = [[UIImageView alloc] init];
|
||||
_shadowView.frame = CGRectMake(KW/2-37/2, KH-2.5-30, 37, 2.5);
|
||||
_shadowView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2);
|
||||
_shadowView.image = [UIImage imageNamed:@"loading_shadow"];
|
||||
[self addSubview:_shadowView];
|
||||
|
||||
if (_titleString.length != 0) {
|
||||
UILabel *_label=[[UILabel alloc] init];
|
||||
_label.frame=CGRectMake(0, 0 , KW , 20);
|
||||
_label.textColor=[UIColor grayColor];
|
||||
_label.textAlignment=NSTextAlignmentCenter;
|
||||
_label.center=CGPointMake(self.frame.size.width/2, self.frame.size.height/2+20);
|
||||
_label.text=_titleString;
|
||||
_label.font=[UIFont systemFontOfSize:14.0f];
|
||||
[self addSubview:_label];
|
||||
self.titleLabel = _label;
|
||||
}
|
||||
|
||||
_fromValue=self.frame.size.height/2-100;
|
||||
_toValue=self.frame.size.height/2.0-37/2.0;
|
||||
_scalefromValue=0.1f;
|
||||
_scaletoValue=1.0f;
|
||||
}
|
||||
|
||||
-(void)setupGif{
|
||||
|
||||
// NSString *url = [[NSBundle mainBundle] pathForResource:self.gifName ofType:@""];
|
||||
UIImage *gifImg = [YLGIFImage imageNamed:self.gifName];//[UIImage sd_animatedGIFWithData:[NSData dataWithContentsOfFile:url]];
|
||||
|
||||
CGSize size = gifImg.size;
|
||||
YLImageView *gifView=[[YLImageView alloc] init];
|
||||
gifView.frame = CGRectMake(0, 0, size.width, size.height);
|
||||
gifView.image = [YLGIFImage imageNamed:self.gifName];
|
||||
gifView.center = CGPointMake(self.frame.size.width/2, self.frame.size.height/2 - 30);
|
||||
gifView.contentMode = UIViewContentModeScaleAspectFit;
|
||||
[self addSubview:gifView];
|
||||
|
||||
if (_titleString.length != 0) {
|
||||
UILabel *label=[[UILabel alloc] init];
|
||||
label.frame=CGRectMake(0, gifView.bounds.size.height , KW , 20);
|
||||
label.textColor=[UIColor grayColor];
|
||||
label.textAlignment=NSTextAlignmentCenter;
|
||||
label.center = CGPointMake(gifView.center.x, gifView.center.y + size.height/2 + 20);
|
||||
label.text=_titleString;
|
||||
label.font=[UIFont systemFontOfSize:14.0f];
|
||||
[self addSubview:label];
|
||||
self.titleLabel = label;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark ====== 动画处理 =======
|
||||
// 开始动画
|
||||
-(void) startAnimating
|
||||
{
|
||||
if (!_isAnimating)
|
||||
{
|
||||
_isAnimating = YES;
|
||||
if (self.hudType == YFProgressHUDTypeRotAni) {
|
||||
_timer = [NSTimer scheduledTimerWithTimeInterval:ANIMATION_DURATION_SECS target:self selector:@selector(animateNextStep) userInfo:nil repeats:YES];
|
||||
[[NSRunLoop mainRunLoop] addTimer:_timer forMode:NSDefaultRunLoopMode];
|
||||
[self animateNextStep];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// 结束动画
|
||||
-(void) stopAnimating
|
||||
{
|
||||
_isAnimating = NO;
|
||||
if (self.hudType == YFProgressHUDTypeRotAni) {
|
||||
[_timer invalidate];
|
||||
_timer=nil;
|
||||
_stepNumber = 0;
|
||||
[_shapView.layer removeAllAnimations];
|
||||
[_shadowView.layer removeAllAnimations];
|
||||
}
|
||||
}
|
||||
|
||||
// 动画方法
|
||||
-(void)animateNextStep
|
||||
{
|
||||
|
||||
if (_stepNumber%2==0) {
|
||||
[self loadingAnimation:_toValue toValue:_fromValue timingFunction:kCAMediaTimingFunctionEaseOut];
|
||||
[self scaleAnimation:_scaletoValue toValue:_scalefromValue timingFunction:kCAMediaTimingFunctionEaseIn];
|
||||
_shapView.image=[UIImage imageNamed:self.imagesArr[_stepNumber]];
|
||||
}else {
|
||||
[self loadingAnimation:_fromValue toValue:_toValue timingFunction:kCAMediaTimingFunctionEaseIn];
|
||||
[self scaleAnimation:_scalefromValue toValue:_scaletoValue timingFunction:kCAMediaTimingFunctionEaseOut];
|
||||
}
|
||||
|
||||
if (_stepNumber==self.imagesArr.count-1) {
|
||||
_stepNumber = -1;
|
||||
}
|
||||
_stepNumber++;
|
||||
}
|
||||
|
||||
// 下落动画
|
||||
-(void) loadingAnimation:(float)fromValue toValue:(float)toValue timingFunction:(NSString * const)tf
|
||||
{
|
||||
//位置
|
||||
CABasicAnimation *panimation = [CABasicAnimation animation];
|
||||
panimation.keyPath = @"position.y";
|
||||
panimation.fromValue =@(fromValue);
|
||||
panimation.toValue = @(toValue);
|
||||
panimation.duration = ANIMATION_DURATION_SECS;
|
||||
panimation.timingFunction = [CAMediaTimingFunction functionWithName:tf];
|
||||
|
||||
//旋转
|
||||
CABasicAnimation *ranimation = [CABasicAnimation animation];
|
||||
ranimation.keyPath = @"transform.rotation";
|
||||
ranimation.fromValue =@(0);
|
||||
ranimation.toValue = @(M_PI_2);
|
||||
ranimation.duration = ANIMATION_DURATION_SECS;
|
||||
|
||||
ranimation.timingFunction = [CAMediaTimingFunction functionWithName:tf];
|
||||
|
||||
//组合
|
||||
CAAnimationGroup *group = [[CAAnimationGroup alloc] init];
|
||||
group.animations = @[panimation,ranimation];
|
||||
group.duration = ANIMATION_DURATION_SECS;
|
||||
group.beginTime = 0;
|
||||
group.fillMode=kCAFillModeForwards;
|
||||
group.removedOnCompletion = NO;
|
||||
|
||||
[_shapView.layer addAnimation:group forKey:@"basic"];
|
||||
|
||||
}
|
||||
|
||||
// 缩放动画
|
||||
-(void) scaleAnimation:(float) fromeValue toValue:(float)toValue timingFunction:(NSString * const)tf
|
||||
{
|
||||
|
||||
CABasicAnimation *sanimation = [CABasicAnimation animation];
|
||||
sanimation.keyPath = @"transform.scale";
|
||||
sanimation.fromValue =@(fromeValue);
|
||||
sanimation.toValue = @(toValue);
|
||||
sanimation.duration = ANIMATION_DURATION_SECS;
|
||||
sanimation.fillMode = kCAFillModeForwards;
|
||||
sanimation.timingFunction = [CAMediaTimingFunction functionWithName:tf];
|
||||
sanimation.removedOnCompletion = NO;
|
||||
[_shadowView.layer addAnimation:sanimation forKey:@"shadow"];
|
||||
|
||||
}
|
||||
|
||||
#pragma mark ====== setter =======
|
||||
-(void)setImagesArr:(NSArray *)imagesArr{
|
||||
NSMutableArray *arr=[NSMutableArray array];
|
||||
for (int i=0; i<imagesArr.count; i++) {
|
||||
if (i==0) [arr addObject:imagesArr[i]];
|
||||
else {
|
||||
for (int j=0; j<2; j++) {// 除了第一个其他都需要两次动画
|
||||
[arr addObject:imagesArr[i]];
|
||||
}
|
||||
}
|
||||
if (i==imagesArr.count-1) [arr addObject:imagesArr[0]];
|
||||
}
|
||||
_imagesArr = arr.copy;
|
||||
}
|
||||
|
||||
#pragma mark ====== 销毁 =======
|
||||
- (void)dealloc
|
||||
{
|
||||
[_timer invalidate];
|
||||
_timer=nil;
|
||||
}
|
||||
|
||||
@end
|
||||
34
OrderScheduling/Video/YFProgressHUD/YLGIFImage.h
Executable file
@@ -0,0 +1,34 @@
|
||||
//
|
||||
// YLGIFImage.h
|
||||
// YLGIFImage
|
||||
//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface YLGIFImage : UIImage
|
||||
|
||||
///-----------------------
|
||||
/// @name Image Attributes
|
||||
///-----------------------
|
||||
|
||||
/**
|
||||
A C array containing the frame durations.
|
||||
|
||||
The number of frames is defined by the count of the `images` array property.
|
||||
*/
|
||||
@property (nonatomic, readonly) NSTimeInterval *frameDurations;
|
||||
|
||||
/**
|
||||
Total duration of the animated image.
|
||||
*/
|
||||
@property (nonatomic, readonly) NSTimeInterval totalDuration;
|
||||
|
||||
/**
|
||||
Number of loops the image can do before it stops
|
||||
*/
|
||||
@property (nonatomic, readonly) NSUInteger loopCount;
|
||||
|
||||
- (UIImage*)getFrameWithIndex:(NSUInteger)idx;
|
||||
|
||||
@end
|
||||
305
OrderScheduling/Video/YFProgressHUD/YLGIFImage.m
Executable file
@@ -0,0 +1,305 @@
|
||||
//
|
||||
// YLGIFImage.m
|
||||
// YLGIFImage
|
||||
//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved.
|
||||
//
|
||||
|
||||
#import "YLGIFImage.h"
|
||||
#import <MobileCoreServices/MobileCoreServices.h>
|
||||
#import <ImageIO/ImageIO.h>
|
||||
|
||||
|
||||
//Define FLT_EPSILON because, reasons.
|
||||
//Actually, I don't know why but it seems under certain circumstances it is not defined
|
||||
#ifndef FLT_EPSILON
|
||||
#define FLT_EPSILON __FLT_EPSILON__
|
||||
#endif
|
||||
|
||||
inline static NSTimeInterval CGImageSourceGetGifFrameDelay(CGImageSourceRef imageSource, NSUInteger index)
|
||||
{
|
||||
NSTimeInterval frameDuration = 0;
|
||||
CFDictionaryRef theImageProperties;
|
||||
if ((theImageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, index, NULL))) {
|
||||
CFDictionaryRef gifProperties;
|
||||
if (CFDictionaryGetValueIfPresent(theImageProperties, kCGImagePropertyGIFDictionary, (const void **)&gifProperties)) {
|
||||
const void *frameDurationValue;
|
||||
if (CFDictionaryGetValueIfPresent(gifProperties, kCGImagePropertyGIFUnclampedDelayTime, &frameDurationValue)) {
|
||||
frameDuration = [(__bridge NSNumber *)frameDurationValue doubleValue];
|
||||
if (frameDuration <= 0) {
|
||||
if (CFDictionaryGetValueIfPresent(gifProperties, kCGImagePropertyGIFDelayTime, &frameDurationValue)) {
|
||||
frameDuration = [(__bridge NSNumber *)frameDurationValue doubleValue];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
CFRelease(theImageProperties);
|
||||
}
|
||||
|
||||
#ifndef OLExactGIFRepresentation
|
||||
//Implement as Browsers do.
|
||||
//See: http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser-compatibility
|
||||
//Also: http://blogs.msdn.com/b/ieinternals/archive/2010/06/08/animated-gifs-slow-down-to-under-20-frames-per-second.aspx
|
||||
|
||||
if (frameDuration < 0.02 - FLT_EPSILON) {
|
||||
frameDuration = 0.1;
|
||||
}
|
||||
#endif
|
||||
return frameDuration;
|
||||
}
|
||||
|
||||
inline static BOOL CGImageSourceContainsAnimatedGif(CGImageSourceRef imageSource)
|
||||
{
|
||||
return imageSource && UTTypeConformsTo(CGImageSourceGetType(imageSource), kUTTypeGIF) && CGImageSourceGetCount(imageSource) > 1;
|
||||
}
|
||||
|
||||
inline static BOOL isRetinaFilePath(NSString *path)
|
||||
{
|
||||
NSRange retinaSuffixRange = [[path lastPathComponent] rangeOfString:@"@2x" options:NSCaseInsensitiveSearch];
|
||||
return retinaSuffixRange.length && retinaSuffixRange.location != NSNotFound;
|
||||
}
|
||||
|
||||
@interface YLGIFImage ()
|
||||
|
||||
@property (nonatomic, readwrite) NSMutableArray *images;
|
||||
@property (nonatomic, readwrite) NSTimeInterval *frameDurations;
|
||||
@property (nonatomic, readwrite) NSTimeInterval totalDuration;
|
||||
@property (nonatomic, readwrite) NSUInteger loopCount;
|
||||
@property (nonatomic, readwrite) CGImageSourceRef incrementalSource;
|
||||
@property(nonatomic,assign)CGImageSourceRef imageSourceRef;
|
||||
@end
|
||||
|
||||
static NSUInteger _prefetchedNum = 10;
|
||||
|
||||
@implementation YLGIFImage
|
||||
{
|
||||
dispatch_queue_t readFrameQueue;
|
||||
// CGImageSourceRef _imageSourceRef;
|
||||
CGFloat _scale;
|
||||
}
|
||||
|
||||
@synthesize images;
|
||||
|
||||
#pragma mark - Class Methods
|
||||
|
||||
+ (id)imageNamed:(NSString *)name
|
||||
{
|
||||
NSString *path = [[NSBundle mainBundle] pathForResource:name ofType:nil];
|
||||
|
||||
return ([[NSFileManager defaultManager] fileExistsAtPath:path]) ? [self imageWithContentsOfFile:path] : nil;
|
||||
}
|
||||
|
||||
+ (id)imageWithContentsOfFile:(NSString *)path
|
||||
{
|
||||
return [self imageWithData:[NSData dataWithContentsOfFile:path]
|
||||
scale:isRetinaFilePath(path) ? 2.0f : 1.0f];
|
||||
}
|
||||
|
||||
+ (id)imageWithData:(NSData *)data
|
||||
{
|
||||
return [self imageWithData:data scale:1.0f];
|
||||
}
|
||||
|
||||
+ (id)imageWithData:(NSData *)data scale:(CGFloat)scale
|
||||
{
|
||||
if (!data) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)(data), NULL);
|
||||
UIImage *image;
|
||||
|
||||
if (CGImageSourceContainsAnimatedGif(imageSource)) {
|
||||
image = [[self alloc] initWithCGImageSource:imageSource scale:scale];
|
||||
} else {
|
||||
image = [super imageWithData:data scale:scale];
|
||||
}
|
||||
|
||||
if (imageSource) {
|
||||
CFRelease(imageSource);
|
||||
}
|
||||
|
||||
return image;
|
||||
}
|
||||
|
||||
#pragma mark - Initialization methods
|
||||
|
||||
- (id)initWithContentsOfFile:(NSString *)path
|
||||
{
|
||||
return [self initWithData:[NSData dataWithContentsOfFile:path]
|
||||
scale:isRetinaFilePath(path) ? 2.0f : 1.0f];
|
||||
}
|
||||
|
||||
- (id)initWithData:(NSData *)data
|
||||
{
|
||||
return [self initWithData:data scale:1.0f];
|
||||
}
|
||||
|
||||
- (id)initWithData:(NSData *)data scale:(CGFloat)scale
|
||||
{
|
||||
if (!data) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)(data), NULL);
|
||||
|
||||
if (CGImageSourceContainsAnimatedGif(imageSource)) {
|
||||
self = [self initWithCGImageSource:imageSource scale:scale];
|
||||
} else {
|
||||
if (scale == 1.0f) {
|
||||
self = [super initWithData:data];
|
||||
} else {
|
||||
self = [super initWithData:data scale:scale];
|
||||
}
|
||||
}
|
||||
|
||||
if (imageSource) {
|
||||
CFRelease(imageSource);
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id)initWithCGImageSource:(CGImageSourceRef)imageSource scale:(CGFloat)scale
|
||||
{
|
||||
self = [super init];
|
||||
if (!imageSource || !self) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
CFRetain(imageSource);
|
||||
|
||||
NSUInteger numberOfFrames = CGImageSourceGetCount(imageSource);
|
||||
|
||||
NSDictionary *imageProperties = CFBridgingRelease(CGImageSourceCopyProperties(imageSource, NULL));
|
||||
NSDictionary *gifProperties = [imageProperties objectForKey:(NSString *)kCGImagePropertyGIFDictionary];
|
||||
|
||||
self.frameDurations = (NSTimeInterval *)malloc(numberOfFrames * sizeof(NSTimeInterval));
|
||||
self.loopCount = [gifProperties[(NSString *)kCGImagePropertyGIFLoopCount] unsignedIntegerValue];
|
||||
self.images = [NSMutableArray arrayWithCapacity:numberOfFrames];
|
||||
|
||||
NSNull *aNull = [NSNull null];
|
||||
for (NSUInteger i = 0; i < numberOfFrames; ++i) {
|
||||
[self.images addObject:aNull];
|
||||
NSTimeInterval frameDuration = CGImageSourceGetGifFrameDelay(imageSource, i);
|
||||
self.frameDurations[i] = frameDuration;
|
||||
self.totalDuration += frameDuration;
|
||||
}
|
||||
//CFTimeInterval start = CFAbsoluteTimeGetCurrent();
|
||||
// Load first frame
|
||||
NSUInteger num = MIN(_prefetchedNum, numberOfFrames);
|
||||
for (NSUInteger i=0; i<num; i++) {
|
||||
CGImageRef image = CGImageSourceCreateImageAtIndex(imageSource, i, NULL);
|
||||
if (image != NULL) {
|
||||
[self.images replaceObjectAtIndex:i withObject:[UIImage imageWithCGImage:image scale:_scale orientation:UIImageOrientationUp]];
|
||||
CFRelease(image);
|
||||
} else {
|
||||
[self.images replaceObjectAtIndex:i withObject:[NSNull null]];
|
||||
}
|
||||
}
|
||||
_imageSourceRef = imageSource;
|
||||
CFRetain(_imageSourceRef);
|
||||
CFRelease(imageSource);
|
||||
//});
|
||||
|
||||
_scale = scale;
|
||||
readFrameQueue = dispatch_queue_create("com.ronnie.gifreadframe", DISPATCH_QUEUE_SERIAL);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (UIImage*)getFrameWithIndex:(NSUInteger)idx
|
||||
{
|
||||
// if([self.images[idx] isKindOfClass:[NSNull class]])
|
||||
// return nil;
|
||||
UIImage* frame = nil;
|
||||
@synchronized(self.images) {
|
||||
frame = self.images[idx];
|
||||
}
|
||||
if(!frame) {
|
||||
CGImageRef image = CGImageSourceCreateImageAtIndex(_imageSourceRef, idx, NULL);
|
||||
if (image != NULL) {
|
||||
frame = [UIImage imageWithCGImage:image scale:_scale orientation:UIImageOrientationUp];
|
||||
CFRelease(image);
|
||||
}
|
||||
}
|
||||
if(self.images.count > _prefetchedNum) {
|
||||
if(idx != 0) {
|
||||
[self.images replaceObjectAtIndex:idx withObject:[NSNull null]];
|
||||
}
|
||||
__weak typeof(self) weakSelf=self;
|
||||
NSUInteger nextReadIdx = (idx + _prefetchedNum);
|
||||
for(NSUInteger i=idx+1; i<=nextReadIdx; i++) {
|
||||
NSUInteger _idx = i%self.images.count;
|
||||
CGFloat scale = _scale;
|
||||
if([self.images[_idx] isKindOfClass:[NSNull class]]) {
|
||||
dispatch_async(readFrameQueue, ^{
|
||||
CGImageRef image = CGImageSourceCreateImageAtIndex(weakSelf.imageSourceRef, _idx, NULL);
|
||||
@synchronized(weakSelf.images) {
|
||||
if (image != NULL) {
|
||||
[weakSelf.images replaceObjectAtIndex:_idx withObject:[UIImage imageWithCGImage:image scale:scale orientation:UIImageOrientationUp]];
|
||||
CFRelease(image);
|
||||
} else {
|
||||
[weakSelf.images replaceObjectAtIndex:_idx withObject:[NSNull null]];
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return frame;
|
||||
}
|
||||
|
||||
#pragma mark - Compatibility methods
|
||||
|
||||
- (CGSize)size
|
||||
{
|
||||
if (self.images.count) {
|
||||
|
||||
return [(UIImage *)[self.images objectAtIndex:0] size];
|
||||
}
|
||||
return [super size];
|
||||
}
|
||||
|
||||
- (CGImageRef)CGImage
|
||||
{
|
||||
if (self.images.count) {
|
||||
return [[self.images objectAtIndex:0] CGImage];
|
||||
} else {
|
||||
return [super CGImage];
|
||||
}
|
||||
}
|
||||
|
||||
- (UIImageOrientation)imageOrientation
|
||||
{
|
||||
if (self.images.count) {
|
||||
return [[self.images objectAtIndex:0] imageOrientation];
|
||||
} else {
|
||||
return [super imageOrientation];
|
||||
}
|
||||
}
|
||||
|
||||
- (CGFloat)scale
|
||||
{
|
||||
if (self.images.count) {
|
||||
return [(UIImage *)[self.images objectAtIndex:0] scale];
|
||||
} else {
|
||||
return [super scale];
|
||||
}
|
||||
}
|
||||
|
||||
- (NSTimeInterval)duration
|
||||
{
|
||||
return self.images ? self.totalDuration : [super duration];
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
if(_imageSourceRef) {
|
||||
CFRelease(_imageSourceRef);
|
||||
}
|
||||
free(_frameDurations);
|
||||
if (_incrementalSource) {
|
||||
CFRelease(_incrementalSource);
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
13
OrderScheduling/Video/YFProgressHUD/YLImageView.h
Executable file
@@ -0,0 +1,13 @@
|
||||
//
|
||||
// YLImageView.h
|
||||
// YLGIFImage
|
||||
//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface YLImageView : UIImageView
|
||||
|
||||
@property (nonatomic, copy) NSString *runLoopMode;
|
||||
|
||||
@end
|
||||
219
OrderScheduling/Video/YFProgressHUD/YLImageView.m
Executable file
@@ -0,0 +1,219 @@
|
||||
//
|
||||
// YLImageView.m
|
||||
// YLGIFImage
|
||||
//made in zhongdao Copyright (c) 2014年 Yong Li. All rights reserved.
|
||||
//
|
||||
|
||||
#import "YLImageView.h"
|
||||
#import "YLGIFImage.h"
|
||||
#import <QuartzCore/QuartzCore.h>
|
||||
|
||||
@interface YLImageView ()
|
||||
|
||||
@property (nonatomic, strong) YLGIFImage *animatedImage;
|
||||
@property (nonatomic, strong) CADisplayLink *displayLink;
|
||||
@property (nonatomic) NSTimeInterval accumulator;
|
||||
@property (nonatomic) NSUInteger currentFrameIndex;
|
||||
@property (nonatomic, strong) UIImage* currentFrame;
|
||||
@property (nonatomic) NSUInteger loopCountdown;
|
||||
|
||||
@end
|
||||
|
||||
@implementation YLImageView
|
||||
|
||||
const NSTimeInterval kMaxTimeStep = 1; // note: To avoid spiral-o-death
|
||||
|
||||
@synthesize runLoopMode = _runLoopMode;
|
||||
@synthesize displayLink = _displayLink;
|
||||
|
||||
- (id)init
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
self.currentFrameIndex = 0;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (CADisplayLink *)displayLink
|
||||
{
|
||||
if (self.superview) {
|
||||
if (!_displayLink && self.animatedImage) {
|
||||
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(changeKeyframe:)];
|
||||
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:self.runLoopMode];
|
||||
}
|
||||
} else {
|
||||
[_displayLink invalidate];
|
||||
_displayLink = nil;
|
||||
}
|
||||
return _displayLink;
|
||||
}
|
||||
|
||||
- (NSString *)runLoopMode
|
||||
{
|
||||
return _runLoopMode ?: NSRunLoopCommonModes;
|
||||
}
|
||||
|
||||
- (void)setRunLoopMode:(NSString *)runLoopMode
|
||||
{
|
||||
if (runLoopMode != _runLoopMode) {
|
||||
[self stopAnimating];
|
||||
|
||||
NSRunLoop *runloop = [NSRunLoop mainRunLoop];
|
||||
[self.displayLink removeFromRunLoop:runloop forMode:_runLoopMode];
|
||||
[self.displayLink addToRunLoop:runloop forMode:runLoopMode];
|
||||
|
||||
_runLoopMode = runLoopMode;
|
||||
|
||||
[self startAnimating];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setImage:(UIImage *)image
|
||||
{
|
||||
if (image == self.image) {
|
||||
return;
|
||||
}
|
||||
|
||||
[self stopAnimating];
|
||||
|
||||
self.currentFrameIndex = 0;
|
||||
self.loopCountdown = 0;
|
||||
self.accumulator = 0;
|
||||
|
||||
if ([image isKindOfClass:[YLGIFImage class]] && image.images) {
|
||||
if([image.images[0] isKindOfClass:UIImage.class])
|
||||
[super setImage:image.images[0]];
|
||||
else
|
||||
[super setImage:nil];
|
||||
self.currentFrame = nil;
|
||||
self.animatedImage = (YLGIFImage *)image;
|
||||
self.loopCountdown = self.animatedImage.loopCount ?: NSUIntegerMax;
|
||||
[self startAnimating];
|
||||
} else {
|
||||
self.animatedImage = nil;
|
||||
[super setImage:image];
|
||||
}
|
||||
[self.layer setNeedsDisplay];
|
||||
}
|
||||
|
||||
- (void)setAnimatedImage:(YLGIFImage *)animatedImage
|
||||
{
|
||||
_animatedImage = animatedImage;
|
||||
if (animatedImage == nil) {
|
||||
self.layer.contents = nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)isAnimating
|
||||
{
|
||||
return [super isAnimating] || (self.displayLink && !self.displayLink.isPaused);
|
||||
}
|
||||
|
||||
- (void)stopAnimating
|
||||
{
|
||||
if (!self.animatedImage) {
|
||||
[super stopAnimating];
|
||||
return;
|
||||
}
|
||||
|
||||
self.loopCountdown = 0;
|
||||
|
||||
self.displayLink.paused = YES;
|
||||
}
|
||||
|
||||
- (void)startAnimating
|
||||
{
|
||||
if (!self.animatedImage) {
|
||||
[super startAnimating];
|
||||
return;
|
||||
}
|
||||
|
||||
if (self.isAnimating) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.loopCountdown = self.animatedImage.loopCount ?: NSUIntegerMax;
|
||||
|
||||
self.displayLink.paused = NO;
|
||||
}
|
||||
|
||||
- (void)changeKeyframe:(CADisplayLink *)displayLink
|
||||
{
|
||||
if (self.currentFrameIndex >= [self.animatedImage.images count]) {
|
||||
return;
|
||||
}
|
||||
self.accumulator += fmin(displayLink.duration, kMaxTimeStep);
|
||||
|
||||
while (self.accumulator >= self.animatedImage.frameDurations[self.currentFrameIndex]) {
|
||||
self.accumulator -= self.animatedImage.frameDurations[self.currentFrameIndex];
|
||||
if (++self.currentFrameIndex >= [self.animatedImage.images count]) {
|
||||
if (--self.loopCountdown == 0) {
|
||||
[self stopAnimating];
|
||||
return;
|
||||
}
|
||||
self.currentFrameIndex = 0;
|
||||
}
|
||||
self.currentFrameIndex = MIN(self.currentFrameIndex, [self.animatedImage.images count] - 1);
|
||||
self.currentFrame = [self.animatedImage getFrameWithIndex:self.currentFrameIndex];
|
||||
[self.layer setNeedsDisplay];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)displayLayer:(CALayer *)layer
|
||||
{
|
||||
if (!self.animatedImage || [self.animatedImage.images count] == 0) {
|
||||
return;
|
||||
}
|
||||
//NSLog(@"display index: %luu", (unsigned long)self.currentFrameIndex);
|
||||
if(self.currentFrame && ![self.currentFrame isKindOfClass:[NSNull class]])
|
||||
layer.contents = (__bridge id)([self.currentFrame CGImage]);
|
||||
}
|
||||
|
||||
- (void)didMoveToWindow
|
||||
{
|
||||
[super didMoveToWindow];
|
||||
if (self.window) {
|
||||
[self startAnimating];
|
||||
} else {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (!self.window) {
|
||||
[self stopAnimating];
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
- (void)didMoveToSuperview
|
||||
{
|
||||
[super didMoveToSuperview];
|
||||
if (self.superview) {
|
||||
//Has a superview, make sure it has a displayLink
|
||||
[self displayLink];
|
||||
} else {
|
||||
//Doesn't have superview, let's check later if we need to remove the displayLink
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[self displayLink];
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setHighlighted:(BOOL)highlighted
|
||||
{
|
||||
if (!self.animatedImage) {
|
||||
[super setHighlighted:highlighted];
|
||||
}
|
||||
}
|
||||
|
||||
- (UIImage *)image
|
||||
{
|
||||
return self.animatedImage ?: [super image];
|
||||
}
|
||||
|
||||
- (CGSize)sizeThatFits:(CGSize)size
|
||||
{
|
||||
return self.image.size;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
32
OrderScheduling/Video/YFTimerTool/YFTimer.h
Normal file
@@ -0,0 +1,32 @@
|
||||
//
|
||||
// YFTimer.h
|
||||
// Timer_Demo
|
||||
//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@protocol YFTimerDelegate <NSObject>
|
||||
@optional
|
||||
-(void)toDoThingsWhenTimeCome:(NSTimeInterval)interval;
|
||||
|
||||
@end
|
||||
|
||||
@interface YFTimer : NSObject
|
||||
// 定时器的间隔
|
||||
@property(nonatomic,assign)NSTimeInterval interval;
|
||||
|
||||
// 添加代理
|
||||
-(void)timerAddDelegate:(id<YFTimerDelegate>)delegate;
|
||||
|
||||
// 取消代理
|
||||
-(void)timerDeleteDelegate:(id<YFTimerDelegate>)delegate;
|
||||
|
||||
// 创建定时器
|
||||
-(void)fireTimeWithInterval:(NSTimeInterval)interval;
|
||||
|
||||
// 取消定时器
|
||||
-(void)invalidate;
|
||||
|
||||
@end
|
||||
|
||||
87
OrderScheduling/Video/YFTimerTool/YFTimer.m
Normal file
@@ -0,0 +1,87 @@
|
||||
//
|
||||
// YFTimer.m
|
||||
// Timer_Demo
|
||||
//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved.
|
||||
//
|
||||
|
||||
#import "YFTimer.h"
|
||||
#import "YFTimerManager.h"
|
||||
|
||||
@interface YFTimer()
|
||||
// NSPointerArray 可以让数组中的引用是弱引用
|
||||
// 关于NSPointerArray的使用 https://blog.csdn.net/weixin_34387468/article/details/90334534
|
||||
// 所有定时器的代理
|
||||
@property(nonatomic,strong)NSPointerArray *delegates;
|
||||
// 定时器
|
||||
@property (nonatomic,strong)dispatch_source_t timer;
|
||||
|
||||
@end
|
||||
|
||||
@implementation YFTimer
|
||||
|
||||
// 添加定时器
|
||||
-(void)fireTimeWithInterval:(NSTimeInterval)interval{
|
||||
|
||||
self.interval = interval;
|
||||
|
||||
dispatch_source_t timer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, dispatch_get_main_queue());
|
||||
dispatch_source_set_timer(timer, dispatch_walltime(NULL, 0), interval * NSEC_PER_SEC, 0); //每多少秒触发timer,误差多少秒
|
||||
dispatch_source_set_event_handler(timer, ^{
|
||||
// 定时器触发时执行的 block
|
||||
[self isTimeToDoThing];
|
||||
});
|
||||
dispatch_resume(timer);
|
||||
|
||||
self.timer = timer;
|
||||
|
||||
}
|
||||
|
||||
// 取消定时器
|
||||
-(void)invalidate{
|
||||
self.delegates = nil;
|
||||
self.timer = nil;
|
||||
}
|
||||
|
||||
// 添加代理
|
||||
-(void)timerAddDelegate:(id<YFTimerDelegate>)delegate{
|
||||
if (![self.delegates.allObjects containsObject:delegate]) {
|
||||
|
||||
[self.delegates addPointer:NULL];
|
||||
[self.delegates compact];
|
||||
|
||||
[self.delegates addPointer:(__bridge void * _Nullable)(delegate)];
|
||||
}
|
||||
}
|
||||
|
||||
// 取消代理
|
||||
-(void)timerDeleteDelegate:(id<YFTimerDelegate>)delegate{
|
||||
if ([self.delegates.allObjects containsObject:delegate]) {
|
||||
NSInteger index = [self.delegates.allObjects indexOfObject:delegate];
|
||||
[self.delegates removePointerAtIndex:index];
|
||||
}
|
||||
}
|
||||
|
||||
// 倒计时要做的事
|
||||
-(void)isTimeToDoThing{
|
||||
|
||||
if (self.delegates.allObjects.count == 0) {
|
||||
[YFTimerManager invalidateTimerForTimeInterval:self.interval];
|
||||
return;
|
||||
}
|
||||
|
||||
for (id<YFTimerDelegate>delegate in self.delegates.allObjects) {
|
||||
if (delegate && [delegate respondsToSelector:@selector(toDoThingsWhenTimeCome:)]) {
|
||||
[delegate toDoThingsWhenTimeCome:self.interval];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
-(NSPointerArray *)delegates{
|
||||
if (!_delegates) {
|
||||
_delegates = [NSPointerArray weakObjectsPointerArray];
|
||||
}
|
||||
return _delegates;
|
||||
}
|
||||
|
||||
@end
|
||||
58
OrderScheduling/Video/YFTimerTool/YFTimerManager.h
Normal file
@@ -0,0 +1,58 @@
|
||||
//
|
||||
// YFTimerManager.h
|
||||
// Timer_Demo
|
||||
//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import "YFTimer.h"
|
||||
|
||||
//.h文件
|
||||
#define YFSingleTonH(ClassName) +(instancetype)share##ClassName;
|
||||
|
||||
//.m文件
|
||||
#define YFSingleTonM(ClassName) \
|
||||
static id _instance=nil;\
|
||||
+(instancetype)allocWithZone:(struct _NSZone *)zone{\
|
||||
static dispatch_once_t onceToken;\
|
||||
dispatch_once(&onceToken, ^{\
|
||||
_instance=[super allocWithZone:zone];\
|
||||
});\
|
||||
return _instance;\
|
||||
}\
|
||||
+(instancetype)share##ClassName{\
|
||||
static dispatch_once_t onceToken;\
|
||||
dispatch_once(&onceToken, ^{\
|
||||
_instance=[[self alloc] init];\
|
||||
});\
|
||||
return _instance;\
|
||||
}\
|
||||
-(instancetype)copyWithZone:(NSZone *)zone{\
|
||||
return _instance;\
|
||||
}
|
||||
|
||||
@protocol YFTimerDelegate;
|
||||
|
||||
@interface YFTimerManager : NSObject
|
||||
|
||||
@property(nonatomic,strong)NSMutableArray *timers;
|
||||
|
||||
YFSingleTonH(YFTimerManager)
|
||||
|
||||
// 添加一个时间间隔是interval的定时器
|
||||
+(void)addTimerWithTimeInterval:(NSTimeInterval)interval;
|
||||
|
||||
// 给时间间隔是interval的定时器设置代理
|
||||
+(void)addTimerDelegate:(id<YFTimerDelegate>)delegate forTimeInterval:(NSTimeInterval)interval;
|
||||
|
||||
// 给时间间隔是interval的定时器取消代理
|
||||
+(void)deleteTimerDelegate:(id<YFTimerDelegate>)delegate forTimeInterval:(NSTimeInterval)interval;
|
||||
|
||||
// 取消一个时间间隔是interval的定时器
|
||||
+(void)invalidateTimerForTimeInterval:(NSTimeInterval)interval;
|
||||
|
||||
// 取消所有的定时器
|
||||
+(void)invalidateAllTimer;
|
||||
|
||||
@end
|
||||
|
||||
90
OrderScheduling/Video/YFTimerTool/YFTimerManager.m
Normal file
@@ -0,0 +1,90 @@
|
||||
//
|
||||
// YFTimerManager.m
|
||||
// Timer_Demo
|
||||
//made in zhongdao Copyright © 2018年 jianghu3. All rights reserved.
|
||||
//
|
||||
|
||||
#import "YFTimerManager.h"
|
||||
|
||||
@interface YFTimerManager ()<YFTimerDelegate>
|
||||
|
||||
@end
|
||||
|
||||
@implementation YFTimerManager
|
||||
|
||||
YFSingleTonM(YFTimerManager)
|
||||
|
||||
// 添加一个时间间隔是interval的定时器
|
||||
+(void)addTimerWithTimeInterval:(NSTimeInterval)interval{
|
||||
|
||||
for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) {
|
||||
if (timer.interval == interval) {// 防止重复添加定时器
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
YFTimer *timer = [[YFTimer alloc] init];
|
||||
[timer fireTimeWithInterval:interval];
|
||||
|
||||
[[YFTimerManager shareYFTimerManager].timers addObject:timer];
|
||||
|
||||
}
|
||||
|
||||
// 取消一个时间间隔是interval的定时器
|
||||
+(void)invalidateTimerForTimeInterval:(NSTimeInterval)interval{
|
||||
for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) {
|
||||
if (timer.interval == interval) {// 防止重复添加定时器
|
||||
[timer invalidate];
|
||||
[[YFTimerManager shareYFTimerManager].timers removeObject:timer];
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 取消所有的定时器
|
||||
+(void)invalidateAllTimer{
|
||||
|
||||
for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) {
|
||||
[timer invalidate];
|
||||
}
|
||||
[[YFTimerManager shareYFTimerManager].timers removeAllObjects];
|
||||
}
|
||||
|
||||
// 给时间间隔是interval的定时器设置代理
|
||||
+(void)addTimerDelegate:(id<YFTimerDelegate>)delegate forTimeInterval :(NSTimeInterval)interval{
|
||||
|
||||
if ([YFTimerManager shareYFTimerManager].timers.count == 0) {
|
||||
[self addTimerWithTimeInterval:interval];
|
||||
[[YFTimerManager shareYFTimerManager].timers.firstObject timerAddDelegate:delegate];
|
||||
}else{
|
||||
for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) {
|
||||
if (timer.interval == interval) {// 防止重复添加定时器
|
||||
[timer timerAddDelegate:delegate];
|
||||
return;
|
||||
}else{
|
||||
[self addTimerWithTimeInterval:interval];
|
||||
[[YFTimerManager shareYFTimerManager].timers.lastObject timerAddDelegate:delegate];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// 给时间间隔是interval的定时器取消代理
|
||||
+(void)deleteTimerDelegate:(id<YFTimerDelegate>)delegate forTimeInterval:(NSTimeInterval)interval{
|
||||
for (YFTimer *timer in [YFTimerManager shareYFTimerManager].timers) {
|
||||
if (timer.interval == interval) {// 防止重复添加定时器
|
||||
[timer timerDeleteDelegate:delegate];
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
-(NSMutableArray *)timers{
|
||||
if (_timers==nil) {
|
||||
_timers=[[NSMutableArray alloc] init];
|
||||
}
|
||||
return _timers;
|
||||
}
|
||||
|
||||
@end
|
||||