add operations for video call

pull/560/head
ryanzhao 4 years ago
parent 6e07c56e7d
commit 8013cdacff

@ -7,6 +7,7 @@ final class CallVC : UIViewController, WebRTCSessionDelegate {
let sessionID: String
let mode: Mode
let webRTCSession: WebRTCSession
var isMuted = false
lazy var cameraManager: CameraManager = {
let result = CameraManager()
@ -19,6 +20,14 @@ final class CallVC : UIViewController, WebRTCSessionDelegate {
}()
// MARK: UI Components
private lazy var localVideoView: RTCMTLVideoView = {
let result = RTCMTLVideoView()
result.contentMode = .scaleAspectFill
result.set(.width, to: 80)
result.set(.height, to: 173)
return result
}()
private lazy var remoteVideoView: RTCMTLVideoView = {
let result = RTCMTLVideoView()
result.contentMode = .scaleAspectFill
@ -48,6 +57,42 @@ final class CallVC : UIViewController, WebRTCSessionDelegate {
return result
}()
private lazy var hangUpButton: UIButton = {
let result = UIButton(type: .custom)
let image = UIImage(named: "EndCall")!.withTint(.white)
result.setImage(image, for: UIControl.State.normal)
result.set(.width, to: 60)
result.set(.height, to: 60)
result.backgroundColor = Colors.destructive
result.layer.cornerRadius = 30
result.addTarget(self, action: #selector(close), for: UIControl.Event.touchUpInside)
return result
}()
private lazy var switchCameraButton: UIButton = {
let result = UIButton(type: .custom)
let image = UIImage(named: "SwitchCamera")!.withTint(.white)
result.setImage(image, for: UIControl.State.normal)
result.set(.width, to: 60)
result.set(.height, to: 60)
result.backgroundColor = UIColor(hex: 0x1F1F1F)
result.layer.cornerRadius = 30
result.addTarget(self, action: #selector(switchCamera), for: UIControl.Event.touchUpInside)
return result
}()
private lazy var switchAudioButton: UIButton = {
let result = UIButton(type: .custom)
let image = UIImage(named: "AudioOn")!.withTint(.white)
result.setImage(image, for: UIControl.State.normal)
result.set(.width, to: 60)
result.set(.height, to: 60)
result.backgroundColor = UIColor(hex: 0x1F1F1F)
result.layer.cornerRadius = 30
result.addTarget(self, action: #selector(switchAudio), for: UIControl.Event.touchUpInside)
return result
}()
private lazy var titleLabel: UILabel = {
let result = UILabel()
result.textColor = .white
@ -115,15 +160,11 @@ final class CallVC : UIViewController, WebRTCSessionDelegate {
remoteVideoView.translatesAutoresizingMaskIntoConstraints = false
remoteVideoView.pin(to: view)
// Local video view
let localVideoView = RTCMTLVideoView()
localVideoView.contentMode = .scaleAspectFill
webRTCSession.attachLocalRenderer(localVideoView)
localVideoView.set(.width, to: 80)
localVideoView.set(.height, to: 173)
view.addSubview(localVideoView)
localVideoView.pin(.right, to: .right, of: view, withInset: -Values.largeSpacing)
let bottomMargin = UIApplication.shared.keyWindow!.safeAreaInsets.bottom + Values.largeSpacing
localVideoView.pin(.bottom, to: .bottom, of: view, withInset: -bottomMargin)
localVideoView.pin(.right, to: .right, of: view, withInset: -Values.smallSpacing)
let topMargin = UIApplication.shared.keyWindow!.safeAreaInsets.top + Values.veryLargeSpacing
localVideoView.pin(.top, to: .top, of: view, withInset: topMargin)
// Fade view
view.addSubview(fadeView)
fadeView.translatesAutoresizingMaskIntoConstraints = false
@ -138,6 +179,21 @@ final class CallVC : UIViewController, WebRTCSessionDelegate {
titleLabel.translatesAutoresizingMaskIntoConstraints = false
titleLabel.center(.vertical, in: closeButton)
titleLabel.center(.horizontal, in: view)
// End call button
view.addSubview(hangUpButton)
hangUpButton.translatesAutoresizingMaskIntoConstraints = false
hangUpButton.center(.horizontal, in: view)
hangUpButton.pin(.bottom, to: .bottom, of: view, withInset: -Values.newConversationButtonBottomOffset)
// Switch camera button
view.addSubview(switchCameraButton)
switchCameraButton.translatesAutoresizingMaskIntoConstraints = false
switchCameraButton.center(.vertical, in: hangUpButton)
switchCameraButton.pin(.right, to: .left, of: hangUpButton, withInset: -Values.veryLargeSpacing)
// Switch audio button
view.addSubview(switchAudioButton)
switchAudioButton.translatesAutoresizingMaskIntoConstraints = false
switchAudioButton.center(.vertical, in: hangUpButton)
switchAudioButton.pin(.left, to: .right, of: hangUpButton, withInset: Values.veryLargeSpacing)
}
override func viewDidAppear(_ animated: Bool) {
@ -174,4 +230,24 @@ final class CallVC : UIViewController, WebRTCSessionDelegate {
}
presentingViewController?.dismiss(animated: true, completion: nil)
}
@objc private func switchCamera() {
cameraManager.switchCamera()
}
@objc private func switchAudio() {
if isMuted {
switchAudioButton.backgroundColor = UIColor(hex: 0x1F1F1F)
let image = UIImage(named: "AudioOn")!.withTint(.white)
switchAudioButton.setImage(image, for: UIControl.State.normal)
isMuted = false
webRTCSession.unmute()
} else {
switchAudioButton.backgroundColor = Colors.destructive
let image = UIImage(named: "AudioOff")!.withTint(.white)
switchAudioButton.setImage(image, for: UIControl.State.normal)
isMuted = true
webRTCSession.mute()
}
}
}

@ -17,15 +17,20 @@ final class CameraManager : NSObject {
private var isCapturing = false
weak var delegate: CameraManagerDelegate?
private lazy var videoCaptureDevice: AVCaptureDevice? = {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
}()
private var videoCaptureDevice: AVCaptureDevice?
private var videoInput: AVCaptureDeviceInput?
func prepare() {
print("[Calls] Preparing camera.")
if let videoCaptureDevice = videoCaptureDevice,
addNewVideoIO(position: .front)
}
private func addNewVideoIO(position: AVCaptureDevice.Position) {
if let videoCaptureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position),
let videoInput = try? AVCaptureDeviceInput(device: videoCaptureDevice), captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
self.videoCaptureDevice = videoCaptureDevice
self.videoInput = videoInput
}
if captureSession.canAddOutput(videoDataOutput) {
captureSession.addOutput(videoDataOutput)
@ -34,7 +39,7 @@ final class CameraManager : NSObject {
guard let connection = videoDataOutput.connection(with: AVMediaType.video) else { return }
connection.videoOrientation = .portrait
connection.automaticallyAdjustsVideoMirroring = false
connection.isVideoMirrored = true
connection.isVideoMirrored = (position == .front)
} else {
SNLog("Couldn't add video data output to capture session.")
}
@ -53,6 +58,21 @@ final class CameraManager : NSObject {
isCapturing = false
captureSession.stopRunning()
}
func switchCamera() {
guard let videoCaptureDevice = videoCaptureDevice, let videoInput = videoInput else { return }
stop()
if videoCaptureDevice.position == .front {
captureSession.removeInput(videoInput)
captureSession.removeOutput(videoDataOutput)
addNewVideoIO(position: .back)
} else {
captureSession.removeInput(videoInput)
captureSession.removeOutput(videoDataOutput)
addNewVideoIO(position: .front)
}
start()
}
}
extension CameraManager : AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "AudioOff.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "Shape.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "Path.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "SwtichCamera.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

@ -204,6 +204,14 @@ public final class WebRTCSession : NSObject, RTCPeerConnectionDelegate {
peerConnection.close()
}
public func mute() {
audioTrack.isEnabled = false
}
public func unmute() {
audioTrack.isEnabled = true
}
// MARK: Delegate
public func peerConnection(_ peerConnection: RTCPeerConnection, didChange state: RTCSignalingState) {
print("[Calls] Signaling state changed to: \(state).")

Loading…
Cancel
Save