iOS screen sharing (ReplayKit) using WebRTC in swift - swift

I implemented webrtc SDK for video calling and its working fine . During video call user can share screen with another user.
I am using RePlayKit for screen sharing.
Here is my code
class SampleHandler: RPBroadcastSampleHandler {
var peerConnectionFactory: RTCPeerConnectionFactory?
var localVideoSource: RTCVideoSource?
var videoCapturer: RTCVideoCapturer?
var peerConnection: RTCPeerConnection?
var localVideoTrack: RTCVideoTrack?
var disconnectSemaphore: DispatchSemaphore?
var videodelegate:VideoViewExtensionDelegate?
var signalClient: SignalingClient? = nil
let config = Config.default
let peerConnectionfactory: RTCPeerConnectionFactory = {
RTCInitializeSSL()
let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
let videoDecoderFactory = RTCDefaultVideoDecoderFactory()
return RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
}()
private let mediaConstrains = [kRTCMediaConstraintsOfferToReceiveAudio: kRTCMediaConstraintsValueFalse,
kRTCMediaConstraintsOfferToReceiveVideo: kRTCMediaConstraintsValueTrue]
static let kAudioSampleType = RPSampleBufferType.audioMic
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
self.SetupVideo()
}
override func broadcastPaused() {
// User has requested to pause the broadcast. Samples will stop being delivered.
// self.audioTrack?.isEnabled = false
// self.screenTrack?.isEnabled = false
}
override func broadcastResumed() {
// User has requested to resume the broadcast. Samples delivery will resume.
// self.audioTrack?.isEnabled = true
// self.screenTrack?.isEnabled = true
}
override func broadcastFinished() {
// User has requested to finish the broadcast.
}
func SetupVideo() {
if #available(iOS 13.0, *) {
let webSocketProvider: WebSocketProvider
webSocketProvider = NativeWebSocket(url: self.config.signalingServerUrl)
self.signalClient = SignalingClient(webSocket: webSocketProvider)
let config = RTCConfiguration()
// config.iceServers = [RTCIceServer(urlStrings: iceServers)]
config.iceServers = [RTCIceServer(urlStrings:["//turn & sturn serber url"],
username:"//username",
credential:"//password")]
// Unified plan is more superior than planB
// config.sdpSemantics = .unifiedPlan
// gatherContinually will let WebRTC to listen to any network changes and send any new candidates to the other client
config.continualGatheringPolicy = .gatherContinually
let screenSharefactory = self.peerConnectionfactory
let constraints = RTCMediaConstraints(mandatoryConstraints: nil,
optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue])
self.peerConnection = screenSharefactory.peerConnection(with: config, constraints: constraints, delegate: nil)
self.peerConnection?.delegate = self
self.localVideoSource = screenSharefactory.videoSource()
self.videoCapturer = RTCVideoCapturer(delegate: self.localVideoSource!)
self.localVideoTrack = screenSharefactory.videoTrack(with: self.localVideoSource!, trackId:"video0")
// let videoSender = newpeerConnection.sender(withKind: kRTCMediaStreamTrackKindVideo, streamId: "stream")
// videoSender.track = videoTrack
let mediaStream: RTCMediaStream = (screenSharefactory.mediaStream(withStreamId: "1"))
mediaStream.addVideoTrack(self.localVideoTrack!)
self.peerConnection?.add(mediaStream)
self.offer(peerconnection: self.peerConnection!) { (sdp) in
self.signalClient?.send(sdp: sdp)
}
}
}
func offer(peerconnection : RTCPeerConnection ,completion: #escaping (_ sdp: RTCSessionDescription) -> Void) {
let constrains = RTCMediaConstraints(mandatoryConstraints: self.mediaConstrains,
optionalConstraints: nil)
peerconnection.offer(for: constrains) { (sdp, error) in
guard let sdp = sdp else {
return
}
peerconnection.setLocalDescription(sdp, completionHandler: { (error) in
completion(sdp)
})
}
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
switch sampleBufferType {
case RPSampleBufferType.video:
guard let imageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
break
}
let rtcpixelBuffer = RTCCVPixelBuffer(pixelBuffer: imageBuffer)
let timeStampNs: Int64 = Int64(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000)
let videoFrame = RTCVideoFrame(buffer: rtcpixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: timeStampNs)
print(videoFrame)
self.localVideoSource?.capturer(self.videoCapturer!, didCapture: videoFrame)
break
case RPSampleBufferType.audioApp:
if (SampleHandler.kAudioSampleType == RPSampleBufferType.audioApp) {
// ExampleCoreAudioDeviceCapturerCallback(audioDevice, sampleBuffer)
}
break
case RPSampleBufferType.audioMic:
if (SampleHandler.kAudioSampleType == RPSampleBufferType.audioMic) {
}
break
#unknown default:
return
}
}
}
extension SampleHandler: RTCPeerConnectionDelegate {
func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {
debugPrint("peerConnection new signaling state: \(stateChanged)")
}
func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
debugPrint("peerConnection did add stream")
}
func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) {
debugPrint("peerConnection did remote stream")
}
func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection) {
debugPrint("peerConnection should negotiate")
}
func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState) {
debugPrint("peerConnection new connection state: \(newState)")
}
func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState) {
debugPrint("peerConnection new gathering state: \(newState)")
}
func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
debugPrint("peerConnection did Generate")
}
func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]) {
debugPrint("peerConnection did remove candidate(s)")
}
func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {
debugPrint("peerConnection did open data channel")
// self.remoteDataChannel = dataChannel
}
}
extension SampleHandler: RTCDataChannelDelegate {
func dataChannelDidChangeState(_ dataChannel: RTCDataChannel) {
debugPrint("dataChannel did change state: \(dataChannel.readyState)")
}
func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessageWith buffer: RTCDataBuffer) {
}
}
I am using this WEBRTC project https://github.com/stasel/WebRTC-iOS
I am getting the CMSampleBuffer data and RTCVideoFrame and passing correctly.
CMSampleBuffer Data for refrence.
CMSampleBuffer 0x100918370 retainCount: 5 allocator: 0x1e32175e0
invalid = NO
dataReady = YES
makeDataReadyCallback = 0x0
makeDataReadyRefcon = 0x0
formatDescription = <CMAudioFormatDescription 0x282bf0e60 [0x1e32175e0]> {
mediaType:'soun'
mediaSubType:'lpcm'
mediaSpecific: {
ASBD: {
mSampleRate: 44100.000000
mFormatID: 'lpcm'
mFormatFlags: 0xe
mBytesPerPacket: 4
mFramesPerPacket: 1
mBytesPerFrame: 4
mChannelsPerFrame: 2
mBitsPerChannel: 16 }
cookie: {(null)}
ACL: {(null)}
FormatList Array: {
Index: 0
ChannelLayoutTag: 0x650002
ASBD: {
mSampleRate: 44100.000000
mFormatID: 'lpcm'
mFormatFlags: 0xe
mBytesPerPacket: 4
mFramesPerPacket: 1
mBytesPerFrame: 4
mChannelsPerFrame: 2
mBitsPerChannel: 16 }}
}
extensions: {(null)}
}
sbufToTrackReadiness = 0x0
numSamples = 1024
outputPTS = {190371138262458/1000000000 = 190371.138}(based on cachedOutputPresentationTimeStamp)
sampleTimingArray[1] = {
{PTS = {190371138262458/1000000000 = 190371.138}, DTS = {INVALID}, duration = {1/44100 = 0.000}},
}
dataBuffer = 0x2828f1050
I am stuck here ,don't know what is the wrong with my code.ANy help is highly appreciated.

webrtc is peer to peer connection. If you want to share your screen with another one.
You have to create cvpixelBuffer from screen (use a class called RTCCustomcaptureframe) and create webrtcclient to connect with another device. (For simpler setup webrtc client, just split it)
You cannot connect 3 device with a single peer connection.

Related

How do I pass a scanned barcode ID from first view controller to second View Controller's UILabel?

This is the barcode scanning tutorial I used in my program, so that you have a lot more context when you read my code: Link
Here is what my program does so far: Essentially, when I scan an item's barcode with my phone, the UIAlert pops up with the barcode ID displayed and a button prompting the user to open the "Results" page. This is all fine and good, but how do I pass that same scanned barcode ID into a label on the Result's page? I have been stuck on this for 2 days now, even though it seems like such an easy task.
Any help is much appreciated <3
Here is my relevant code:
ProductCatalog.plist ->
Link to Image
Scanner_ViewController.swift (first View Controller) ->
import UIKit
import AVFoundation
class Scanner_ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate, ScannerDelegate
{
private var scanner: Scanner?
override func viewDidLoad()
{
super.viewDidLoad()
self.scanner = Scanner(withDelegate: self)
guard let scanner = self.scanner else
{
return
}
scanner.requestCaptureSessionStartRunning()
}
override func didReceiveMemoryWarning()
{
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Mark - AVFoundation delegate methods
public func metadataOutput(_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection)
{
guard let scanner = self.scanner else
{
return
}
scanner.metadataOutput(output,
didOutput: metadataObjects,
from: connection)
}
// Mark - Scanner delegate methods
func cameraView() -> UIView
{
return self.view
}
func delegateViewController() -> UIViewController
{
return self
}
func scanCompleted(withCode code: String)
{
print(code)
showAlert_Success(withTitle: (code))
}
private func showAlert_Success(withTitle title: String)
{
let alertController = UIAlertController(title: title, message: "Product has been successfully scanned", preferredStyle: .alert)
// programatically segue to the next view controller when the UIAlert pops up
alertController.addAction(UIAlertAction(title:"Get Results", style: .default, handler:{ action in self.performSegue(withIdentifier: "toAnalysisPage", sender: self) }))
present(alertController, animated: true)
}
}
Scanner.Swift (accompanies Scanner_ViewController.swift)->
import Foundation
import UIKit
import AVFoundation
protocol ScannerDelegate: class
{
func cameraView() -> UIView
func delegateViewController() -> UIViewController
func scanCompleted(withCode code: String)
}
class Scanner: NSObject
{
public weak var delegate: ScannerDelegate?
private var captureSession : AVCaptureSession?
init(withDelegate delegate: ScannerDelegate)
{
self.delegate = delegate
super.init()
self.scannerSetup()
}
private func scannerSetup()
{
guard let captureSession = self.createCaptureSession()
else
{
return
}
self.captureSession = captureSession
guard let delegate = self.delegate
else
{
return
}
let cameraView = delegate.cameraView()
let previewLayer = self.createPreviewLayer(withCaptureSession: captureSession,
view: cameraView)
cameraView.layer.addSublayer(previewLayer)
}
private func createCaptureSession() -> AVCaptureSession?
{
do
{
let captureSession = AVCaptureSession()
guard let captureDevice = AVCaptureDevice.default(for: .video) else
{
return nil
}
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
let metaDataOutput = AVCaptureMetadataOutput()
// add device input
if captureSession.canAddInput(deviceInput) && captureSession.canAddOutput(metaDataOutput)
{
captureSession.addInput(deviceInput)
captureSession.addOutput(metaDataOutput)
guard let delegate = self.delegate,
let viewController = delegate.delegateViewController() as? AVCaptureMetadataOutputObjectsDelegate else
{
return nil
}
metaDataOutput.setMetadataObjectsDelegate(viewController,
queue: DispatchQueue.main)
metaDataOutput.metadataObjectTypes = self.metaObjectTypes()
return captureSession
}
}
catch
{
// handle error
}
return nil
}
private func createPreviewLayer(withCaptureSession captureSession: AVCaptureSession,
view: UIView) -> AVCaptureVideoPreviewLayer
{
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
return previewLayer
}
private func metaObjectTypes() -> [AVMetadataObject.ObjectType]
{
return [.qr,
.code128,
.code39,
.code39Mod43,
.code93,
.ean13,
.ean8,
.interleaved2of5,
.itf14,
.pdf417,
.upce
]
}
public func metadataOutput(_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection)
{
self.requestCaptureSessionStopRunning()
guard let metadataObject = metadataObjects.first,
let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject,
let scannedValue = readableObject.stringValue,
let delegate = self.delegate
else
{
return
}
delegate.scanCompleted(withCode: scannedValue)
}
public func requestCaptureSessionStartRunning()
{
self.toggleCaptureSessionRunningState()
}
public func requestCaptureSessionStopRunning()
{
self.toggleCaptureSessionRunningState()
}
private func toggleCaptureSessionRunningState()
{
guard let captureSession = self.captureSession
else
{
return
}
if !captureSession.isRunning
{
captureSession.startRunning()
}
else
{
captureSession.stopRunning()
}
}
}
Analysis_ViewController.swift (second view controller) ->
Right now, the forKey: has been hard-coded to item ID 8710908501708 because I have no idea how to actually pass camera-scanned ID's into the second View Controller :/
import UIKit
class Analysis_ViewController: UIViewController
{
#IBOutlet weak var productTitle: UILabel!
func getData()
{
let path = Bundle.main.path(forResource:"ProductCatalog", ofType: "plist")
let dict:NSDictionary = NSDictionary(contentsOfFile: path!)!
if (dict.object(forKey: "8710908501708" as Any) != nil)
{
if let levelDict:[String : Any] = dict.object(forKey: "8710908501708" as Any) as? [String : Any]
{
// use a for loop to iterate through all the keys and values in side the "Levels" dictionary
for (key, value) in levelDict
{
// if we find a key named whatever we care about, we can print out the value
if (key == "name")
{
productTitle.text = (value as! String)
}
}
}
}
}
// listing the better options that are safer in comparison to the scanned product image
override func viewDidLoad()
{
super.viewDidLoad()
getData()
}
}
Do you have a variable to hold the scanned ID in your view controllers? If not, you can add var itemID: String? to both Scanner_ViewController and Analysis_ViewController.
Then in your func where you get the scanned code, you can set it to the variable.
func scanCompleted(withCode code: String) {
print(code)
itemID = code // Saves the scanned code to your var
showAlert_Success(withTitle: (code))
}
For passing data to another view controller via segue, you might want to look into this UIViewController method for segues: documentation here. This answer also might help.
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toAnalysisPage" {
if let viewController = segue.destination as? Analysis_ViewController {
viewController.itemID = itemID
}
}
}

AVCaptureMetadataObjectDelegate not receiving callback

I am making QR scanner. My code is working when all of it written in one place inside ViewController but when I modularised it then I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate.
import Foundation
import UIKit
import AVFoundation
class CameraSource : NSObject {
private var session : AVCaptureSession?
private var inputDevice : AVCaptureDeviceInput?
private var videoPreviewLayer : AVCaptureVideoPreviewLayer?
private var captureMetadataOutput : AVCaptureMetadataOutput?
func setCaptureMetadataOutput() {
self.captureMetadataOutput = nil
self.captureMetadataOutput = AVCaptureMetadataOutput()
}
func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
return self.captureMetadataOutput
}
func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
self.inputDevice = inputDevice
}
func getInputDevice() -> AVCaptureDeviceInput? {
return self.inputDevice
}
func setSession(session : AVCaptureSession?) {
self.session = session
}
func getSession() -> AVCaptureSession? {
return self.session
}
func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
assert(self.captureMetadataOutput != nil)
self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
self.captureMetadataOutput!.metadataObjectTypes = metaObjects
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
assert(session != nil)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
videoPreviewLayer!.videoGravity = videoGravity
videoPreviewLayer!.connection!.videoOrientation = orientation
}
func addVideoLayerToImageView(imageView : UIImageView) {
assert(self.videoPreviewLayer != nil)
imageView.layer.addSublayer(self.videoPreviewLayer!)
self.videoPreviewLayer!.frame = imageView.bounds
}
func startSession() {
assert(session != nil)
self.session!.startRunning()
}
/*==========================================================================
STATIC FUNCTIONS
==========================================================================*/
static func getBackCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
}
static func getFrontCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
}
static func isCameraAvailable() -> Bool {
if #available(iOS 10.0, *) {
let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaType.video,
position: .unspecified).devices.count
if count > 0 { return true }
}
else {
let count = AVCaptureDevice.devices(for: AVMediaType.video).count
if count > 0 { return true }
}
return false
}
/*==========================================================================
CAMERA BUILDER CLASS
==========================================================================*/
class Builder {
var cameraSource : CameraSource
init() {
cameraSource = CameraSource()
}
func createSession() -> Builder {
if (cameraSource.getSession() != nil) {
cameraSource.setSession(session: nil)
}
cameraSource.setSession(session: AVCaptureSession())
return self
}
func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
assert(cameraSource.getSession() != nil)
cameraSource.getSession()!.sessionPreset = preset
return self
}
func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {
try self.prepareInputDevice(camera: camera)
try self.addInputToSession()
assert(cameraSource.inputDevice != nil)
return self
}
func addOutputToSessionForMetaData() throws -> CameraSource {
cameraSource.setCaptureMetadataOutput()
assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)
if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
throw AppErrorCode.cameraError("Unable to attach output to camera session")
}
cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)
return self.cameraSource
}
/*==========================================================================
BUILDER PRIVATE FUNCTIONS
==========================================================================*/
private func prepareInputDevice(camera : AVCaptureDevice) throws {
do {
let inputDevice = try AVCaptureDeviceInput(device: camera)
cameraSource.setInputDevice(inputDevice: inputDevice)
} catch let error as NSError {
print(error.localizedDescription)
throw AppErrorCode.cameraError("Unable to attach input to camera session")
}
}
private func addInputToSession() throws {
if(cameraSource.getSession() == nil) {
throw AppErrorCode.cameraError("Unable to create camera session")
}
assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))
cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
}
}
}
My QR scanner Code looks like
import UIKit
import Foundation
import AVFoundation
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
class QRScanner : NSObject {
private var cameraSource : CameraSource?
var delegate : QRScannerDelegate?
func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
do {
self.delegate = delegate
self.cameraSource = try CameraSource
.Builder()
.createSession()
.setSessionPreset(preset: .photo)
.attachInputDevice(camera: CameraSource.getBackCamera())
.addOutputToSessionForMetaData()
self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)
} catch let err as NSError {
print(err.localizedDescription)
self.cameraSource = nil
throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
}
return self
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
return self
}
func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
return self
}
func startSession() {
assert(cameraSource != nil)
self.cameraSource!.startSession()
}
}
extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
print("Delegate called")
if metadataObjects.count == 0 {
self.delegate?.scannedData("No Data")
} else {
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObject.ObjectType.qr {
if metadataObj.stringValue != nil {
print("Scanner Getting data: \(metadataObj.stringValue!)")
self.delegate?.scannedData(metadataObj.stringValue!)
}
}
}
}
}
I have implemented the QRScannerDelegate in my ViewController but I am not getting anything in there. Moreover I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate even.
I tried passing the ViewController instance as AVCaptureMetadataOutputObjectsDelegate then I was getting callback with the scanned info.
So My question is why is this happening?
1) When I am passing normal class as AVCaptureMetadataOutputObjectsDelegate I am not getting callback. But.
2) Whe I am passing UIViewController instance as AVCaptureMetadataOutputObjectsDelegate then I am able to get callback.
UPDATE
This is how I am calling prepareCamera from my View Controller
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
Its hard to say for sure without knowing how you called prepareCamera as this is what triggers setMetadataObjectsDelegate but to me it looks like you may not be keeping a strong reference to QRScanner in your ViewController (instantiating it as in instance variable) Which could explain why the callback is getting hit when your ViewController is your AVCaptureMetadataOutputObjectsDelegate as the ViewController is still in memory.
It's also worth noting that if the ViewController is your QRScannerDelegate you will want to define delegate as weak var delegate : QRScannerDelegate? to prevent a memory leak.
EDIT:
Change
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
to
var qrScanner = QRScanner()
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try self.qrScanner.prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
and change
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
to
protocol QRScannerDelegate: class {
func scannedData(_ scannedString : String)
}
To Allow a weak delegate
AVCaptureMetadataOutputObjectsDelegate is tough, but you can do some really cool stuff with it! So keep at it.
I pulled some QRScanner code I wrote a while ago and put it into a gist for you if you want to check it out. Its a bit more stripped down than what you have, but you may find it helpful.
https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2

One to many webrtc

I want to create an "one to many" (with the max of 3 devices) webrtc setup. I have one device that is my main device. Other devices are connecting to that device. You can think about an walky talky. With one device who they are connecting to.
I have this code that works with an one to one connection.
import AVFoundation
import UIKit
import WebRTC
import SocketIO
import CoreTelephony
import ReachabilitySwift
let TAG = "ViewController"
let AUDIO_TRACK_ID = TAG + "AUDIO"
let LOCAL_MEDIA_STREAM_ID = TAG + "STREAM"
class ViewController: UIViewController, RTCPeerConnectionDelegate, RTCDataChannelDelegate {
var mediaStream: RTCMediaStream!
var localAudioTrack: RTCAudioTrack!
var remoteAudioTrack: RTCAudioTrack!
var dataChannel: RTCDataChannel!
var dataChannelRemote: RTCDataChannel!
var roomName: String!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
initWebRTC();
sigConnect(wsUrl: "http://192.168.1.69:3000");
localAudioTrack = peerConnectionFactory.audioTrack(withTrackId: AUDIO_TRACK_ID)
mediaStream = peerConnectionFactory.mediaStream(withStreamId: LOCAL_MEDIA_STREAM_ID)
mediaStream.addAudioTrack(localAudioTrack)
}
func getRoomName() -> String {
return (roomName == nil || roomName.isEmpty) ? "_defaultroom": roomName;
}
// webrtc
var peerConnectionFactory: RTCPeerConnectionFactory! = nil
var peerConnection: RTCPeerConnection! = nil
var mediaConstraints: RTCMediaConstraints! = nil
var socket: SocketIOClient! = nil
var wsServerUrl: String! = nil
var peerStarted: Bool = false
func initWebRTC() {
RTCInitializeSSL()
peerConnectionFactory = RTCPeerConnectionFactory()
let mandatoryConstraints = ["OfferToReceiveAudio": "true", "OfferToReceiveVideo": "false"]
let optionalConstraints = [ "DtlsSrtpKeyAgreement": "true", "RtpDataChannels" : "true", "internalSctpDataChannels" : "true"]
mediaConstraints = RTCMediaConstraints.init(mandatoryConstraints: mandatoryConstraints, optionalConstraints: optionalConstraints)
}
func connect() {
if (!peerStarted) {
sendOffer()
peerStarted = true
}
}
func hangUp() {
sendDisconnect()
stop()
}
func stop() {
if (peerConnection != nil) {
peerConnection.close()
peerConnection = nil
peerStarted = false
}
}
func prepareNewConnection() -> RTCPeerConnection {
var icsServers: [RTCIceServer] = []
icsServers.append(RTCIceServer(urlStrings: ["stun:stun.l.google.com:19302"], username:"",credential: ""))
let rtcConfig: RTCConfiguration = RTCConfiguration()
rtcConfig.tcpCandidatePolicy = RTCTcpCandidatePolicy.disabled
rtcConfig.bundlePolicy = RTCBundlePolicy.maxBundle
rtcConfig.rtcpMuxPolicy = RTCRtcpMuxPolicy.require
rtcConfig.iceServers = icsServers;
peerConnection = peerConnectionFactory.peerConnection(with: rtcConfig, constraints: mediaConstraints, delegate: self)
peerConnection.add(mediaStream);
let tt = RTCDataChannelConfiguration();
tt.isOrdered = false;
self.dataChannel = peerConnection.dataChannel(forLabel: "testt", configuration: tt)
self.dataChannel.delegate = self
print("Make datachannel")
return peerConnection;
}
// RTCPeerConnectionDelegate - begin [ ///////////////////////////////////////////////////////////////////////////////
/** Called when the SignalingState changed. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState){
print("signal state: \(stateChanged.rawValue)")
}
/** Called when media is received on a new stream from remote peer. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream){
if (peerConnection == nil) {
return
}
if (stream.audioTracks.count > 1) {
print("Weird-looking stream: " + stream.description)
return
}
}
/** Called when a remote peer closes a stream. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream){}
/** Called when negotiation is needed, for example ICE has restarted. */
public func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection){}
/** Called any time the IceConnectionState changes. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState){}
/** Called any time the IceGatheringState changes. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState){}
/** New ice candidate has been found. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate){
print("iceCandidate: " + candidate.description)
let json:[String: AnyObject] = [
"type" : "candidate" as AnyObject,
"sdpMLineIndex" : candidate.sdpMLineIndex as AnyObject,
"sdpMid" : candidate.sdpMid as AnyObject,
"candidate" : candidate.sdp as AnyObject
]
sigSendIce(msg: json as NSDictionary)
}
/** Called when a group of local Ice candidates have been removed. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]){}
/** New data channel has been opened. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel){
print("Datachannel is open, name: \(dataChannel.label)")
dataChannel.delegate = self
self.dataChannelRemote = dataChannel
}
// RTCPeerConnectionDelegate - end ]/////////////////////////////////////////////////////////////////////////////////
public func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessageWith buffer: RTCDataBuffer){
print("iets ontvangen");
}
public func dataChannelDidChangeState(_ dataChannel: RTCDataChannel){
print("channel.state \(dataChannel.readyState.rawValue)");
}
func sendData(message: String) {
let newData = message.data(using: String.Encoding.utf8)
let dataBuff = RTCDataBuffer(data: newData!, isBinary: false)
self.dataChannel.sendData(dataBuff)
}
func onOffer(sdp:RTCSessionDescription) {
print("on offer shizzle")
setOffer(sdp: sdp)
sendAnswer()
peerStarted = true;
}
func onAnswer(sdp:RTCSessionDescription) {
setAnswer(sdp: sdp)
}
func onCandidate(candidate:RTCIceCandidate) {
peerConnection.add(candidate)
}
func sendSDP(sdp:RTCSessionDescription) {
print("Converting sdp...")
let json:[String: AnyObject] = [
"type" : sdp.type.rawValue as AnyObject,
"sdp" : sdp.sdp.description as AnyObject
]
sigSend(msg: json as NSDictionary);
}
func sendOffer() {
peerConnection = prepareNewConnection();
peerConnection.offer(for: mediaConstraints) { (RTCSessionDescription, Error) in
if(Error == nil){
print("send offer")
self.peerConnection.setLocalDescription(RTCSessionDescription!, completionHandler: { (Error) in
print("Sending: SDP")
print(RTCSessionDescription as Any)
self.sendSDP(sdp: RTCSessionDescription!)
})
} else {
print("sdp creation error: \(Error)")
}
}
}
func setOffer(sdp:RTCSessionDescription) {
if (peerConnection != nil) {
print("peer connection already exists")
}
peerConnection = prepareNewConnection();
peerConnection.setRemoteDescription(sdp) { (Error) in
}
}
func sendAnswer() {
print("sending Answer. Creating remote session description...")
if (peerConnection == nil) {
print("peerConnection NOT exist!")
return
}
peerConnection.answer(for: mediaConstraints) { (RTCSessionDescription, Error) in
print("ice shizzle")
if(Error == nil){
self.peerConnection.setLocalDescription(RTCSessionDescription!, completionHandler: { (Error) in
print("Sending: SDP")
print(RTCSessionDescription as Any)
self.sendSDP(sdp: RTCSessionDescription!)
})
} else {
print("sdp creation error: \(Error)")
}
}
}
func setAnswer(sdp:RTCSessionDescription) {
if (peerConnection == nil) {
print("peerConnection NOT exist!")
return
}
peerConnection.setRemoteDescription(sdp) { (Error) in
print("remote description")
}
}
func sendDisconnect() {
let json:[String: AnyObject] = [
"type" : "user disconnected" as AnyObject
]
sigSend(msg: json as NSDictionary);
}
// websocket related operations
func sigConnect(wsUrl:String) {
wsServerUrl = wsUrl;
print("connecting to " + wsServerUrl)
socket = SocketIOClient(socketURL: NSURL(string: wsServerUrl)! as URL)
socket.on("connect") { data in
print("WebSocket connection opened to: " + self.wsServerUrl);
self.sigEnter();
}
socket.on("disconnect") { data in
print("WebSocket connection closed.")
}
socket.on("message") { (data, emitter) in
if (data.count == 0) {
return
}
let json = data[0] as! NSDictionary
print("WSS->C: " + json.description);
let type = json["type"] as! Int
if (type == RTCSdpType.offer.rawValue) {
print("Received offer, set offer, sending answer....");
let sdp = RTCSessionDescription(type: RTCSdpType(rawValue: type)!, sdp: json["sdp"] as! String)
self.onOffer(sdp: sdp);
} else if (type == RTCSdpType.answer.rawValue && self.peerStarted) {
print("Received answer, setting answer SDP");
let sdp = RTCSessionDescription(type: RTCSdpType(rawValue: type)!, sdp: json["sdp"] as! String)
self.onAnswer(sdp: sdp);
} else {
print("Unexpected websocket message");
}
}
socket.on("ice") { (data, emitter) in
if (data.count == 0) {
return
}
let json = data[0] as! NSDictionary
print("WSS->C: " + json.description);
let type = json["type"] as! String
if (type == "candidate" && self.peerStarted) {
print("Received ICE candidate...");
let candidate = RTCIceCandidate(
sdp: json["candidate"] as! String,
sdpMLineIndex: Int32(json["sdpMLineIndex"] as! Int),
sdpMid: json["sdpMid"] as? String)
self.onCandidate(candidate: candidate);
} else {
print("Unexpected websocket message");
}
}
socket.connect();
}
func sigRecoonect() {
socket.disconnect();
socket.connect();
}
func sigEnter() {
let roomName = getRoomName();
print("Entering room: " + roomName);
socket.emit("enter", roomName);
}
func sigSend(msg:NSDictionary) {
socket.emit("message", msg)
}
func sigSendIce(msg:NSDictionary) {
socket.emit("ice", msg)
}
}
So I thought that I need an array with the peers. And the mediaStream, localAudioTrack and the dataChannel needs to be one object because the local audio is the same? Are there good solutions for this? Because I don't know how to properly implement this.
I am investigating different questions and project referencing to an multi call webrtc setup.
I saw this (website) webrtc setup at GitHub:
https://github.com/anoek/webrtc-group-chat-example/blob/master/client.html
I'm going to try to reverse engineer this to swift:). Any help is really appreciated.
I would suggest against a one-to-many architecture where a single device needs to send its media to all others. This breaks awfully fast (like after 2-3 devices it needs to connect to).
The reason for that is that uplinks are usually limited in capacity and even when they aren't, devices aren't really geared to streaming so much data to many other devices.
To do what you want at "scale", use a server component that routes media to the other devices. Look at https://jitsi.org/ and http://www.kurento.org/ for starting points.
What you're trying to achieve can be achieved by multi-peer connection. Where one creates multiple peer connection with others which is a mesh topology from application level of view. This is most straight forward way and if it's the case that you're recently learning WebRTC and learnt how to implement a peer-connection you may try how to handle multiple peer connections. Look at this for more details on it click. But in practical where you will have dozens of clients, it will become hard because it will eats up the hardware resources and bandwidth. In that case what people do is: they maintain a shared server where all the client connects to and that server mixes the individual streams and distribute it. Which is a start topology. There are some well-known services for this like- tokbox, jitsi-meet.
You may also like to look into SFU model- https://webrtcglossary.com/sfu/

Where is the audio stream added libjingle?

I can't find out where the audio stream is added to the 'speaker'. Is it possible that I modify the stream and add it later by myself? I have the feeling that libjingle is handling the stream and adding it.
I have added the libjingle part of my code:
import AVFoundation
import UIKit
let TAG = "ViewController"
let AUDIO_TRACK_ID = TAG + "AUDIO"
let LOCAL_MEDIA_STREAM_ID = TAG + "STREAM"
class ViewController: UIViewController, RTCSessionDescriptionDelegate, RTCPeerConnectionDelegate {
var mediaStream: RTCMediaStream!
var localAudioTrack: RTCAudioTrack!
var remoteAudioTrack: RTCAudioTrack!
var renderer: RTCEAGLVideoView!
var renderer_sub: RTCEAGLVideoView!
var roomName: String!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
initWebRTC();
sigConnect(wsUrl: "http://192.168.1.59:3000");
localAudioTrack = peerConnectionFactory.audioTrack(withID: AUDIO_TRACK_ID)
mediaStream = peerConnectionFactory.mediaStream(withLabel: LOCAL_MEDIA_STREAM_ID)
mediaStream.addAudioTrack(localAudioTrack)
}
var peerConnectionFactory: RTCPeerConnectionFactory! = nil
var peerConnection: RTCPeerConnection! = nil
var pcConstraints: RTCMediaConstraints! = nil
var audioConstraints: RTCMediaConstraints! = nil
var mediaConstraints: RTCMediaConstraints! = nil
var wsServerUrl: String! = nil
var peerStarted: Bool = false
func initWebRTC() {
RTCPeerConnectionFactory.initializeSSL()
peerConnectionFactory = RTCPeerConnectionFactory()
pcConstraints = RTCMediaConstraints()
audioConstraints = RTCMediaConstraints()
mediaConstraints = RTCMediaConstraints(
mandatoryConstraints: [
RTCPair(key: "OfferToReceiveAudio", value: "true"),
],
optionalConstraints: nil)
}
func prepareNewConnection() -> RTCPeerConnection {
var icsServers: [RTCICEServer] = []
icsServers.append(RTCICEServer(uri: NSURL(string: "stun:stun.l.google.com:19302") as URL!, username: "",
password: ""))
let rtcConfig: RTCConfiguration = RTCConfiguration()
rtcConfig.tcpCandidatePolicy = RTCTcpCandidatePolicy.disabled
rtcConfig.bundlePolicy = RTCBundlePolicy.maxBundle
rtcConfig.rtcpMuxPolicy = RTCRtcpMuxPolicy.require
peerConnection = peerConnectionFactory.peerConnection(withICEServers: icsServers, constraints: pcConstraints, delegate: self)
peerConnection.add(mediaStream);
return peerConnection;
}
func peerConnection(_ peerConnection: RTCPeerConnection!, signalingStateChanged stateChanged: RTCSignalingState) {
}
func peerConnection(_ peerConnection: RTCPeerConnection!, iceConnectionChanged newState: RTCICEConnectionState) {
}
func peerConnection(_ peerConnection: RTCPeerConnection!, iceGatheringChanged newState: RTCICEGatheringState) {
}
func peerConnection(_ peerConnection: RTCPeerConnection!, gotICECandidate candidate: RTCICECandidate!) {
if (candidate != nil) {
print("iceCandidate: " + candidate.description)
let json:[String: AnyObject] = [
"type" : "candidate" as AnyObject,
"sdpMLineIndex" : candidate.sdpMLineIndex as AnyObject,
"sdpMid" : candidate.sdpMid as AnyObject,
"candidate" : candidate.sdp as AnyObject
]
sigSend(msg: json as NSDictionary)
} else {
print("End of candidates. -------------------")
}
}
func peerConnection(_ peerConnection: RTCPeerConnection!, addedStream stream: RTCMediaStream!) {
if (peerConnection == nil) {
return
}
if (stream.audioTracks.count > 1) {
print("Weird-looking stream: " + stream.description)
return
}
}
func peerConnection(_ peerConnection: RTCPeerConnection!, removedStream stream: RTCMediaStream!) {
}
func peerConnection(_ peerConnection: RTCPeerConnection!, didOpen dataChannel: RTCDataChannel!) {
}
func peerConnection(onRenegotiationNeeded peerConnection: RTCPeerConnection!) {
}
}
My thought is that I can catch the audio stream in the function under this command. Is that correct? In addition, can I add the stream manually to the speaker?
func peerConnection(_ peerConnection: RTCPeerConnection!, addedStream stream: RTCMediaStream!) {
if (peerConnection == nil) {
return
}
if (stream.audioTracks.count > 1) {
print("Weird-looking stream: " + stream.description)
return
}
}
When the webRTC call is connected, the Webrtc stack uses platform APIs to play or record the audio. You can only control things like
Mute or unmute the audio stream
Use system APIs to increase or decrease volume or change audio configuration
You can't add stream manually to speaker but you can choose to change the default audio output to speaker or a headphone so that webrtc audio is redirected to correct output. This can be done using avfoundation APIs

Bonjour Service Browser with Swift does not fetch serviceinfo

The service I want to connect to is published via Bonjour.
I can find all the info with the Bonjour Browser, however if I try to gather the data programmatically, the only value I get, is the name of the service.
The NetService delegate is set and the function netServiceWillPublish is called.
The functions DidNotPublish or DidPublish are not executed.
The function netServiceBrowser gets all published netServices, but all properties are set to the default value of the datatype.
import UIKit
class BMNSDelegate : NSObject, NetServiceDelegate {
func netServiceWillPublish(_ sender: NetService) {
print("netServiceWillPublish:\(sender)"); //This method is called
}
func netService(_ sender: NetService, didNotPublish errorDict: [String : NSNumber]){
print("didNotPublish:\(sender)");
}
func netServiceDidPublish(_ sender: NetService) {
print("netServiceDidPublish:\(sender)");
}
func netServiceWillResolve(_ sender: NetService) {
print("netServiceWillResolve:\(sender)");
}
func netService(_ sender: NetService, didNotResolve errorDict: [String : NSNumber]) {
print("netServiceDidNotResolve:\(sender)");
}
func netServiceDidResolveAddress(_ sender: NetService) {
print("netServiceDidResolve:\(sender)");
}
func netService(_ sender: NetService, didUpdateTXTRecord data: Data) {
print("netServiceDidUpdateTXTRecordData:\(sender)");
}
func netServiceDidStop(_ sender: NetService) {
print("netServiceDidStopService:\(sender)");
}
func netService(_ sender: NetService,
didAcceptConnectionWith inputStream: InputStream,
outputStream stream: OutputStream) {
print("netServiceDidAcceptConnection:\(sender)");
}
}
class BMBrowserDelegate : NSObject, NetServiceBrowserDelegate, NetServiceDelegate {
func netServiceBrowser(_ netServiceBrowser: NetServiceBrowser,
didFind netService: NetService,
moreComing moreServicesComing: Bool) {
let nsnsdel = BMNSDelegate()
netService.delegate = nsnsdel
netService.resolve(withTimeout: 1)
print(netService.domain) // local.
print(netService.name) // This property is correct
print(netService.type) // _http._tcp.
print(netService.addresses) // Optional([])
print(netService.hostName) // nil
print(netService.port) // -1
print(moreServicesComing) //false
}
}
let SERVICE_TYPE = "_http._tcp."
let BM_DOMAIN = "local."
let browser = NetServiceBrowser()
let nsbdel = BMBrowserDelegate()
browser.delegate = nsbdel
browser.searchForServices(ofType: SERVICE_TYPE, inDomain: BM_DOMAIN)
RunLoop.current.run()
The error is that the services which are found in the ServiceBrowserDelegate function are not saved anywhere and therefore are discarded at the end of the function.
I found a working example here:
https://github.com/mattneub/Programming-iOS-Book-Examples/blob/61f0c753a080040e4a74b912e6c18dd97fe8bcaa/bk2ch24p853bonjour/ch37p1101bonjour/ViewController.swift
class ViewController: UIViewController, NetServiceBrowserDelegate, NetServiceDelegate {
var nsb : NetServiceBrowser!
var services = [NetService]()
#IBAction func doButton (_ sender: Any!) {
print("listening for services...")
self.services.removeAll()
self.nsb = NetServiceBrowser()
self.nsb.delegate = self
self.nsb.searchForServices(ofType:"_daap._tcp", inDomain: "")
}
func updateInterface () {
for service in self.services {
if service.port == -1 {
print("service \(service.name) of type \(service.type)" +
" not yet resolved")
service.delegate = self
service.resolve(withTimeout:10)
} else {
print("service \(service.name) of type \(service.type)," +
"port \(service.port), addresses \(service.addresses)")
}
}
}
func netServiceDidResolveAddress(_ sender: NetService) {
self.updateInterface()
}
func netServiceBrowser(_ aNetServiceBrowser: NetServiceBrowser, didFind aNetService: NetService, moreComing: Bool) {
print("adding a service")
self.services.append(aNetService)
if !moreComing {
self.updateInterface()
}
}
func netServiceBrowser(_ aNetServiceBrowser: NetServiceBrowser, didRemove aNetService: NetService, moreComing: Bool) {
if let ix = self.services.index(of:aNetService) {
self.services.remove(at:ix)
print("removing a service")
if !moreComing {
self.updateInterface()
}
}
}
}