Swift 3 OutputStream hanging? - sockets

I wrote a simple Swift program to write to a server; I want to send data back to back or in a loop to a server; I tried doing something simple where I can simply send a message back to back (two pieces of data) but there is a hang; that is, the server only receives the first message but not the second message; why is this?
I am using StreamDelegate in Swift 3 and I am using OutputStream write method
print("Hello, World!")
import Foundation
class Connection: NSObject, StreamDelegate {
private var inputStream: InputStream!
private var outputStream: OutputStream!
private var my_address: CFString
private var my_port: UInt32
init (address: CFString, port:UInt32) {
self.my_address = address
self.my_port = port
super.init()
}
func connect() {
print("connecting...")
var readStream: Unmanaged<CFReadStream>?
var writeStream: Unmanaged<CFWriteStream>?
CFStreamCreatePairWithSocketToHost(nil, self.my_address, self.my_port, &readStream, &writeStream)
// Documentation suggests readStream and writeStream can be assumed to
// be non-nil. It might be wise to test if either is nil
// and implement error-handling as needed.
// self.inputStream = readStream!.takeRetainedValue()
self.outputStream = writeStream!.takeRetainedValue()
// self.inputStream.delegate = self
self.outputStream.delegate = self
// self.inputStream.schedule(in: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
// self.outputStream.schedule(in: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
// self.inputStream.open()
self.outputStream.open()
}
func disconnect() {
self.inputStream.close()
self.outputStream.close()
}
func stream(aStream: Stream, handleEvent eventCode: Stream.Event) {
switch (eventCode){
case Stream.Event.errorOccurred:
NSLog("ErrorOccurred")
case Stream.Event.endEncountered:
NSLog("EndEncountered")
case Stream.Event.hasBytesAvailable:
NSLog("HasBytesAvaible")
var buffer = [UInt8](repeating: 0, count: 4096)
while (inputStream.hasBytesAvailable){
var len = inputStream.read(&buffer, maxLength: buffer.count)
if(len > 0){
var output = NSString(bytes: &buffer, length: buffer.count, encoding: String.Encoding.utf8.rawValue)
if (output != ""){
NSLog("server said: %#", output!)
}
} else {
print("empty string from stream")
}
}
case Stream.Event.openCompleted:
NSLog("OpenCompleted")
case Stream.Event.hasSpaceAvailable:
NSLog("HasSpaceAvailable")
default: print("default reached. unknown stream event")
}
}
func sendMessage(message: String) {
let buff = [UInt8](message.utf8)
self.outputStream.write(buff, maxLength: buff.count)
}
func getOutputStream() -> OutputStream {
return self.outputStream
}
}
var socket = Connection(address: "000.000.0.000" as CFString, port: 0000)
socket.connect()
socket.sendMessage(message: "Hey you! 1")
socket.sendMessage(message: "Hey you! 2")
\
import Darwin
import Foundation
import UIKit
import Dispatch
class ViewController: UIViewController {
#IBOutlet private weak var joystickMove: Joystick!
#IBOutlet private weak var joystickRotate: Joystick!
private var joystick = Joystick()
private var contour = Contours()
private var contour_index: Int = 0
private var socket = Connection(address: "000.000.0.000" as CFString, port: 0000)
override func viewDidLoad() {
super.viewDidLoad()
createJoystick()
createContours()
createButton()
}
private func createJoystick() {
let n: CGFloat = 100.0
let x: CGFloat = (UIScreen.main.bounds.width/2) - (n/2.0)
let y: CGFloat = UIScreen.main.bounds.height - (UIScreen.main.bounds.height/4.0)
self.joystick.frame = CGRect(x: x, y: y, width: n, height: n)
self.joystick.backgroundColor = UIColor.clear
self.joystick.substrateColor = UIColor.lightGray
self.joystick.substrateBorderColor = UIColor.gray
self.joystick.substrateBorderWidth = 1.0
self.joystick.stickSize = CGSize(width: 50.0, height: 50.0)
self.joystick.stickColor = UIColor.darkGray
self.joystick.stickBorderColor = UIColor.black
self.joystick.stickBorderWidth = 2.0
self.joystick.fade = 0.5
var packet = ""
DispatchQueue.global(qos: .userInitiated).async { // do some task
self.joystick.trackingHandler = { (data) -> () in
let power = sqrt(pow(Double(data.velocity.x), 2.0) + pow(Double(data.velocity.y), 2.0))
let theta = atan2(Double(-data.velocity.y), Double(data.velocity.x))
let degrees = theta * (180.0 / M_PI)
if degrees >= 55 && degrees <= 125 { // move forward
packet = "\(1) \(1) \(power) \(power)"
} else if degrees >= -125 && degrees <= -55 { // move backwards
packet = "\(-1) \(-1) \(power) \(power)"
} else if degrees >= -55 && degrees <= 55 { // turn right
packet = "\(1) \(-1) \(power) \(power)"
} else if (degrees >= 125 && degrees <= 180) && (degrees >= -180 && degrees <= -125) { // turn left
packet = "\(-1) \(1) \(power) \(power)"
}
self.socket.connect()
let sent = self.socket.sendMessage(message: packet)
if sent {
print("packet sent!\n \(packet)")
}
self.socket.stream(aStream: self.socket.inputStream, handleEvent: Stream.Event.hasBytesAvailable)
// self.socket.changeWhenHasBytesAvailable = { str in
// print("my-str: \(str.characters)")
// }
self.socket.disconnect()
}
}
view.addSubview(joystick)
}
private func createContours() {
let n: CGFloat = 350.0
let x: CGFloat = (UIScreen.main.bounds.width/2.0) - (n/2.0)
let y: CGFloat = UIScreen.main.bounds.height - (UIScreen.main.bounds.height/4.0) - n - 100.0
self.contour.frame = CGRect(x: x, y: y, width: n, height: n)
self.contour.backgroundColor = UIColor.clear
view.addSubview(self.contour)
}
private func createButton() {
let width: CGFloat = 150.0
let height: CGFloat = 75.0
let x: CGFloat = (UIScreen.main.bounds.width/2.0) - (width/2.0)
let y: CGFloat = UIScreen.main.bounds.height - (UIScreen.main.bounds.height/4.0) - width
let button: UIButton = UIButton(frame: CGRect(x: x, y: y, width: width, height: height))
button.backgroundColor = UIColor.blue
button.setTitle("Contour Views", for: .normal)
button.addTarget(self, action: #selector(self.buttonAction), for: .touchUpInside)
button.tag = 1
view.addSubview(button)
}
#objc private func buttonAction(sender: UIButton!) {
var btnsendtag: UIButton = sender
if btnsendtag.tag == 1 {
self.contour_index = (self.contour_index + 1) % 2
switch self.contour_index {
case 0:
for index in 0...356 {
if self.contour.distx[index] != -1 && self.contour.disty[index] != -1 {
self.contour.circles[index].alpha = 0
}
}
case 1:
for index in 0...356 {
if self.contour.distx[index] != -1 && self.contour.disty[index] != -1 {
self.contour.circles[index].alpha = 1
}
}
default:
for index in 0...356 {
if self.contour.distx[index] != -1 && self.contour.disty[index] != -1 {
self.contour.circles[index].alpha = 1
UIColor.cyan.setFill()
self.contour.lines[index].fill()
self.contour.lines[index].stroke()
}
}
}
}
}
override func viewDidAppear(_ animated: Bool) {
DispatchQueue.global(qos: .background).async { // do some task
// self.socket.connect()
// var packet = "1 1 0 0"
// let sent = self.socket.sendMessage(message: packet)
// if sent {
// print("packet sent!\n \(packet)")
// }
// self.socket.disconnect()
}
}
public func delayWithSeconds(_ seconds: Double, completion: #escaping () -> ()) {
DispatchQueue.main.asyncAfter(deadline: .now() + seconds) {
completion()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func viewWillDisappear(_ animated: Bool) {
// self.socket.disconnect()
}
}
\
import Foundation
class Connection: NSObject, StreamDelegate {
public var inputStream: InputStream!
private var outputStream: OutputStream!
private var my_address: CFString
private var my_port: UInt32
public var changeWhenHasBytesAvailable: ((String)->())?
init (address: CFString, port:UInt32) {
self.my_address = address
self.my_port = port
super.init()
}
public func connect() {
var readStream: Unmanaged<CFReadStream>?
var writeStream: Unmanaged<CFWriteStream>?
CFStreamCreatePairWithSocketToHost(nil, self.my_address, self.my_port, &readStream, &writeStream)
self.inputStream = readStream!.takeRetainedValue()
self.outputStream = writeStream!.takeRetainedValue()
self.inputStream.delegate = self
self.outputStream.delegate = self
self.inputStream.schedule(in: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
self.outputStream.schedule(in: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
self.inputStream.open()
self.outputStream.open()
}
public func stream(aStream: Stream, handleEvent eventCode: Stream.Event) {
// DispatchQueue.global(qos: .userInitiated).async { // do some task
print("stream")
if aStream === self.inputStream {
switch eventCode {
case Stream.Event.openCompleted:
print("open")
break
case Stream.Event.errorOccurred:
print("error")
break
case Stream.Event.hasBytesAvailable:
print("available!")
// var inputBuffer = Array<UInt8>(repeating: 0, count:2048)
// self.inputStream?.read(&inputBuffer, maxLength: 2048)
// self.changeWhenHasBytesAvailable?(String(bytes: inputBuffer, encoding: String.Encoding.utf8)!)
var buffer = [UInt8](repeating: 0, count: 512)
while (self.inputStream.hasBytesAvailable){
print("before read")
var len = self.inputStream.read(&buffer, maxLength: buffer.count)
print("after read")
if(len > 0){
// var output = NSString(bytes: &buffer, length: buffer.count, encoding: String.Encoding.utf8.rawValue)
var output = NSString(bytes: &buffer, length: len, encoding: String.Encoding.utf8.rawValue)
if (output != ""){
print("output: \(output!)")
}
} else {
print("empty string from stream")
}
print("while")
}
break
default:
print("default")
break
}
}
// }
}
public func sendMessage(message: String) -> Bool {
var sent: Bool = false
let buff = [UInt8](message.utf8)
let result = self.outputStream.write(buff, maxLength: buff.count)
if result != -1 {
sent = true
}
return sent
}
public func disconnect() {
self.inputStream.close()
self.outputStream.close()
// self.inputStream.remove(from: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
// self.outputStream.remove(from: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
}
}
update; send works! but not receiving data?

Don't use a while inside the StreamDelegate when you are reading:
while (inputStream.hasBytesAvailable){
var len = inputStream.read(&buffer, maxLength: buffer.count)
...
}
Just call read. If there are more bytes, your delegate will be called again.

Related

How can i stop my frames from constantly being reloaded in swift

I have a code that detects objects and transcribes them to speech. However, if various objects are detected in the same frame the voice output gets messed up and starts saying the object's name all together making no sense to the user. I'm developing this application for the visually impaired so I'm trying to focus on getting an object's name transcribing it to the user and then move on to the next object.
Here is the code below.
import UIKit
import AVKit
import Vision
import CoreML
import AVFoundation
class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {
#IBOutlet weak var innerView: UIView!
#IBOutlet weak var viewLable: UILabel!
var previewLayer: AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
updateLable(newLable: "new lable")
//Start the Camera
let captureSession = AVCaptureSession()
captureSession.sessionPreset = .photo
// get back camera as Video Capture Device
guard let captureDevice = AVCaptureDevice.default(for: .video)
else { self.quickErr(myLine: #line,inputStr: "") ; return }
try? captureDevice.lockForConfiguration()
captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, 2)
captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, 2)
captureDevice.unlockForConfiguration()
guard let input = try? AVCaptureDeviceInput(device: captureDevice)
else { self.quickErr(myLine: #line,inputStr: "") ; return }
captureSession.addInput(input)
captureSession.startRunning()
self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.previewLayer?.frame.size = self.innerView.frame.size
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.innerView.layer.addSublayer(self.previewLayer!)
self.previewLayer?.frame = view.frame
// let label = UILabel(frame: CGRect(x: 0, y: 0, width: 200, height: 21))
// label.center = CGPoint(x: 160, y: 285)
// label.textAlignment = .center
// label.text = "I'am a test label"
// self.view.addSubview(label)
// label.text = ""
//get access to video frames
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "VideoQueue"))
captureSession.addOutput(dataOutput)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.previewLayer?.frame.size = self.innerView.frame.size
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
//print("Camera was able to capture a frame ", Date())
guard let pixcelBuffer:CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
else { self.quickErr(myLine: #line,inputStr: "") ; return }
guard let model = try? VNCoreMLModel(for: Resnet50().model)
else { self.quickErr(myLine: #line,inputStr: "") ; return }
let request = VNCoreMLRequest(model: model) { (finishedReq, err) in
//check err
//print(finishedReq.results)
guard let results = finishedReq.results as? [VNClassificationObservation]
else { self.quickErr(myLine: #line,inputStr: "") ; return }
guard let firstObservation = results.first
else { self.quickErr(myLine: #line,inputStr: "") ; return }
var myMessage = ""
var myConfident = 0
if (firstObservation.confidence > 0.2 ) {
myConfident = Int ( firstObservation.confidence * 100 )
let myIdentifier = firstObservation.identifier.split(separator: ",")
myMessage = "I am \(myConfident) % confidence this object is : \(myIdentifier[0]) "
} else {
myMessage = "I am not confidence to detect this object"
}
print(myMessage)
self.updateLable(newLable: myMessage)
if ( myConfident >= 70 ){
self.readyMe(myText: myMessage, myLang: "en_EN")
}
}
// Anaylize image
try? VNImageRequestHandler(cvPixelBuffer: pixcelBuffer, options: [:]).perform([request])
}
func readyMe(myText :String , myLang : String ) {
let uttrace = AVSpeechUtterance(string: myText )
uttrace.voice = AVSpeechSynthesisVoice(language: myLang)
uttrace.rate = 0.5
let synthesizer = AVSpeechSynthesizer()
synthesizer.speak(uttrace)
}
func quickErr(myLine: Int , inputStr : String = "" ) {
print("===> Guard Error \(inputStr) :\n    file:\(#file)\n    line:\(myLine)\n    function:\(#function) ")
}
func updateLable(newLable: String){
DispatchQueue.main.async { // Correct
self.viewLable?.text = "[ " + newLable + " ]"
}
}
}

Using AudioToolbox instead of AVFoundation in SFSpeechRecognizer

I have to use AudioToolbox instead AVAudioSession for providing stream to SFSpeechRecognizer. I know that I should use AudioQueue, so I made an audio recording export to CMSampleBuffer to read it with recognizer. And while debugging I see that the buffer is added to SFSpeechAudioBufferRecognitionRequest, but the code in the task closure doesn't execute: neither result, nor error.
What's wrong with the code?
let NUM_BUFFERS = 1
struct RecordState {
var dataFormat = AudioStreamBasicDescription()
var queue: AudioQueueRef?
var buffers: [AudioQueueBufferRef] = []
var audioFile: AudioFileID?
var currentPacket: Int64 = 0
var recording = false
}
func сallback(_ inUserData: UnsafeMutableRawPointer?,
_ inAQ: AudioQueueRef,
_ inBuffer: AudioQueueBufferRef,
_ inStartTime: UnsafePointer<AudioTimeStamp>,
_ inNumberPacketDescriptions: UInt32,
_ inPacketDescs: UnsafePointer<AudioStreamPacketDescription>?) {
let recordState = inUserData?.assumingMemoryBound(to: RecordState.self)
if let queue = recordState?.pointee.queue {
AudioQueueEnqueueBuffer(queue, inBuffer, 0, nil)
let rec = AudioRecorder.sharedInstance
rec.transformBuffer(pBuffer: inBuffer, pLength: inBuffer.pointee.mAudioDataByteSize)
}
}
class AudioRecorder: NSObject, ObservableObject, SFSpeechRecognizerDelegate {
let format = AudioStreamBasicDescription(mSampleRate: Float64(16000.0), mFormatID: kAudioFormatLinearPCM, mFormatFlags: kAudioFormatFlagsNativeFloatPacked, mBytesPerPacket: UInt32(MemoryLayout<Float32>.size), mFramesPerPacket: 1, mBytesPerFrame: UInt32(MemoryLayout<Float32>.size), mChannelsPerFrame: 1, mBitsPerChannel: UInt32(MemoryLayout<Float32>.size * 8), mReserved: 0)
var recordState = RecordState()
var startTime = CFAbsoluteTimeGetCurrent()
static var sharedInstance = AudioRecorder()
private var speechRecognizer = SFSpeechRecognizer()!
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private var engineEnabled = false
private var lastText = [SFTranscriptionSegment]()
override init() {
super.init()
OperationQueue.main.addOperation {
SFSpeechRecognizer.requestAuthorization { authStatus in
switch authStatus {
case .authorized:
self.engineEnabled = true
default:
self.engineEnabled = false
}
}
}
self.speechRecognizer.delegate = self
}
func startRecording() {
recordState.dataFormat = format
var queue: AudioQueueRef?
if AudioQueueNewInput(&recordState.dataFormat, сallback, &recordState, CFRunLoopGetCurrent(), CFRunLoopMode.commonModes.rawValue, 0, &queue) == noErr {
recordState.queue = queue
} else {
return
}
for _ in 0..<NUM_BUFFERS {
var buffer: AudioQueueBufferRef?
if AudioQueueAllocateBuffer(queue!, 1024, &buffer) == noErr {
recordState.buffers.append(buffer!)
}
AudioQueueEnqueueBuffer(queue!, buffer!, 0, nil)
}
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = recognitionRequest else { fatalError("Unable to create a SFSpeechAudioBufferRecognitionRequest object") }
recognitionRequest.shouldReportPartialResults = true
// Keep speech recognition data on device
if #available(iOS 13, *) {
recognitionRequest.requiresOnDeviceRecognition = true
}
recognitionTask = speechRecognizer.recognitionTask(with: recognitionRequest) { result, error in
var isFinal = false
if let result = result {
print(result.bestTranscription.formattedString)
isFinal = result.isFinal
}
if error != nil || isFinal {
// Stop recognizing speech if there is a problem.
self.recognitionRequest = nil
self.recognitionTask = nil
}
}
recordState.recording = true
if AudioQueueStart(recordState.queue!, nil) != noErr {
fatalError("Something is wrong")
}
self.startTime = CFAbsoluteTimeGetCurrent()
}
func stopRecording() {
recordState.recording = false
AudioQueueStop(recordState.queue!, true)
for i in 0..<NUM_BUFFERS {
if let buffers = recordState.buffers[i] as? AudioQueueBufferRef {
AudioQueueFreeBuffer(recordState.queue!, buffers)
}
}
AudioQueueDispose(recordState.queue!, true)
if let file = recordState.audioFile {
AudioFileClose(file)
}
}
func transformBuffer(pBuffer: AudioQueueBufferRef, pLength: UInt32) {
var blockBuffer: CMBlockBuffer?
CMBlockBufferCreateWithMemoryBlock(allocator: kCFAllocatorDefault, memoryBlock: pBuffer, blockLength: Int(pLength), blockAllocator: kCFAllocatorNull, customBlockSource: nil, offsetToData: 0, dataLength: Int(pLength), flags: kCMBlockBufferAssureMemoryNowFlag, blockBufferOut: &blockBuffer)
let timeFormat = format.mSampleRate
let currentTime = CFAbsoluteTimeGetCurrent()
let elapsedTime: CFTimeInterval = currentTime - self.startTime
let timeStamp = CMTimeMake(value: Int64(elapsedTime * timeFormat), timescale: Int32(timeFormat))
let nSamples = Int(pLength / format.mBytesPerFrame)
do {
let formatDescription = try CMAudioFormatDescription(audioStreamBasicDescription: format)
var sampleBuffer: CMSampleBuffer?
CMAudioSampleBufferCreateWithPacketDescriptions(allocator: kCFAllocatorDefault, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: formatDescription, sampleCount: nSamples, presentationTimeStamp: timeStamp, packetDescriptions: nil, sampleBufferOut: &sampleBuffer)
if let sBuffer = sampleBuffer {
self.recognitionRequest?.appendAudioSampleBuffer(sBuffer)
}
} catch {
fatalError(error.localizedDescription)
}
}
}
UPD: I modified the code so it could be more descriptive
Finally, I've found the answer. Here's the code for the conversion of AudioQueueBufferRef into AVAudioPCMBuffer:
func queueBufferToAudioBuffer(_ buffer: AudioQueueBufferRef) -> AVAudioPCMBuffer? {
guard let audioFormat = AVAudioFormat(
commonFormat: .pcmFormatFloat32,
sampleRate: format.mSampleRate,
channels: format.mChannelsPerFrame,
interleaved: true)
else { return nil }
let frameLength = buffer.pointee.mAudioDataBytesCapacity / audioFormat.streamDescription.pointee.mBytesPerFrame
guard let audioBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: frameLength) else { return nil }
audioBuffer.frameLength = frameLength
let dstLeft = audioBuffer.floatChannelData![0]
let src = buffer.pointee.mAudioData.bindMemory(to: Float.self, capacity: Int(frameLength))
dstLeft.initialize(from: src, count: Int(frameLength))
return audioBuffer
}
I fixed this by setting up the AVAudioSession before AudioQueueStart.
do{
try AVAudioSession.sharedInstance().setCategory(.record, mode:.default)
try AVAudioSession.sharedInstance().setActive(true)
} catch{
print(error)
}

How can I record AVDepthData video and save in the gallery?

I am developing an application to record RGB-D sequences with the iPhone by using the DualRearCamera or the TrueDepthCamera. I can capture and visualize the RGB frame and depth frames and I developed a version where I can compress this data and save in the internal files of the iPhone. Nevertheless, my idea is to save both sequences (RGB and depth map sequences) in the gallery, but I am having problems to use AVAssetWritter and create a depth map video.
I am using the iPhone X, Xcode 10.2.1 and swift 5
import UIKit
import AVFoundation
import AssetsLibrary
var noMoreSpace = false
class ViewController: UIViewController{
#IBOutlet weak var previewView: UIImageView!
#IBOutlet weak var timeLabel: UILabel!
#IBOutlet weak var previewModeControl: UISegmentedControl!
let session = AVCaptureSession()
let dataOutputQueue = DispatchQueue(label: "video data queue")
let videoOutput = AVCaptureVideoDataOutput()
let movieOutput = AVCaptureMovieFileOutput()
let depthOutput = AVCaptureDepthDataOutput()
let depthCapture = DepthCapture()
var previewLayer = AVCaptureVideoPreviewLayer()
var inputDevice: AVCaptureDeviceInput!
let videoDeviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInTrueDepthCamera], mediaType: .video, position: .unspecified)
var Timestamp: String {
let currentDate = NSDate()
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "ddMM_HHmmss"
return "\(dateFormatter.string(from: currentDate as Date))"
}
var isRecording = false
var time = 0
var timer = Timer()
enum PreviewMode: Int {
case original
case depth
}
var previewMode = PreviewMode.original
var depthMap: CIImage?
var scale: CGFloat = 0.0
//let sessionQueue = DispatchQueue(label: "session queue")
override func viewDidLoad() {
super.viewDidLoad()
timeLabel.isHidden = true //TODO: Disable the rest of the UI
previewMode = PreviewMode(rawValue: previewModeControl.selectedSegmentIndex) ?? .original
configureCaptureSession()
session.startRunning()
}
func configureCaptureSession() {
session.beginConfiguration()
let camera = AVCaptureDevice.default(.builtInTrueDepthCamera, for: .video, position: .unspecified)!
do {
let cameraInput = try AVCaptureDeviceInput(device: camera)
if session.canAddInput(cameraInput){
session.sessionPreset = .vga640x480
session.addInput(cameraInput)
self.inputDevice = cameraInput
}
if session.canAddOutput(videoOutput){
videoOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
session.addOutput(videoOutput)
let videoConnection = videoOutput.connection(with: .video)
videoConnection?.videoOrientation = .portrait
//previewLayer = AVCaptureVideoPreviewLayer(session: session)
//previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
//previewLayer.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
//previewView.layer.addSublayer(previewLayer)
//previewLayer.position = CGPoint(x: self.previewView.frame.width / 2, y: self.previewView.frame.height / 2)
//previewLayer.bounds = previewView.frame
}
//Add Depth output to the session
if session.canAddOutput(depthOutput){
session.addOutput(depthOutput)
depthOutput.setDelegate(self, callbackQueue: dataOutputQueue)
depthOutput.isFilteringEnabled = true
let depthConnection = depthOutput.connection(with: .depthData)
depthConnection?.videoOrientation = .portrait
}
/*if session.canAddOutput(movieOutput){
session.addOutput(movieOutput)
}*/
} catch {
print("Error")
}
let outputRect = CGRect(x: 0, y: 0, width: 1, height: 1)
let videoRect = videoOutput.outputRectConverted(fromMetadataOutputRect: outputRect)
let depthRect = depthOutput.outputRectConverted(fromMetadataOutputRect: outputRect)
// Calculate the scaling factor between videoRect and depthRect
scale = max(videoRect.width, videoRect.height) / max(depthRect.width, depthRect.height)
// Change the AVCaptureDevice configuration, so you need to lock it
do{
try camera.lockForConfiguration()
// Set the AVCaptureDevice‘s minimum frame duration (which is the inverse of the maximum frame rate) to be equal to the supported frame rate of the depth data
if let frameDuration = camera.activeDepthDataFormat?.videoSupportedFrameRateRanges.first?.minFrameDuration{
camera.activeVideoMinFrameDuration = frameDuration
}
// Unlock the configuration you locked
camera.unlockForConfiguration()
}catch{
fatalError(error.localizedDescription)
}
session.commitConfiguration()
}
#IBAction func startStopRecording(_ sender: Any) {
if isRecording{
stopRecording()
} else {
startRecording()
}
}
func startRecording(){
timeLabel.isHidden = false
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(ViewController.timerAction), userInfo: nil, repeats: true)
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let flagTime = Timestamp
let auxStr = flagTime+"_output.mp4"
let fileUrl = paths[0].appendingPathComponent(auxStr)
depthCapture.prepareForRecording(timeFlag: flagTime)
movieOutput.startRecording(to: fileUrl, recordingDelegate: self)
print(fileUrl.absoluteString)
print("Recording started")
self.isRecording = true
}
func stopRecording(){
timeLabel.isHidden = true
timer.invalidate()
time = 0
timeLabel.text = "0"
movieOutput.stopRecording()
print("Stopped recording!")
self.isRecording = false
do {
try depthCapture.finishRecording(success: { (url: URL) -> Void in
print(url.absoluteString)
})
} catch {
print("Error while finishing depth capture.")
}
}
#objc func timerAction() {
time += 1
timeLabel.text = String(time)
}
#IBAction func previeModeChanged(_ sender: UISegmentedControl) {
previewMode = PreviewMode(rawValue: previewModeControl.selectedSegmentIndex) ?? .original
}
#IBAction func switchCamera(_ sender: Any) {
let currentDevice = self.inputDevice.device
let currentPosition = currentDevice.position
let preferredPosition: AVCaptureDevice.Position
let preferredDeviceType: AVCaptureDevice.DeviceType
let devices = self.videoDeviceDiscoverySession.devices
var newVideoDevice: AVCaptureDevice? = nil
switch currentPosition {
case .unspecified, .front:
preferredPosition = .back
preferredDeviceType = .builtInDualCamera
case .back:
preferredPosition = .front
preferredDeviceType = .builtInTrueDepthCamera
#unknown default:
preferredPosition = .back
preferredDeviceType = .builtInDualCamera
}
// First, seek a device with both the preferred position and device type. Otherwise, seek a device with only the preferred position. ENTENDER MEJOR LQS CONDICIONES
if let device = devices.first(where: { $0.position == preferredPosition && $0.deviceType == preferredDeviceType }) {
newVideoDevice = device
} else if let device = devices.first(where: { $0.position == preferredPosition }) {
newVideoDevice = device
}
if let videoDevice = newVideoDevice {
do {
let cameraInput = try AVCaptureDeviceInput(device: videoDevice)
self.session.beginConfiguration()
self.session.removeInput(self.inputDevice)
if self.session.canAddInput(cameraInput) {
session.sessionPreset = .vga640x480
self.session.addInput(cameraInput)
self.inputDevice = cameraInput
}else {
self.session.addInput(self.inputDevice)
}
self.session.commitConfiguration()
} catch{
print("Error occurred while creating video device input: \(error)")
}
}
}
}
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let image = CIImage(cvPixelBuffer: pixelBuffer!)
let previewImage: CIImage
switch previewMode {
case .original:
previewImage = image
case .depth:
previewImage = depthMap ?? image
}
let displayImage = UIImage(ciImage: previewImage)
DispatchQueue.main.async {[weak self] in self?.previewView.image = displayImage}
}
}
extension ViewController: AVCaptureDepthDataOutputDelegate{
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {
var convertedDepth: AVDepthData
// Ensure the depth data is the format you need: 32 bit FP disparity.???
if depthData.depthDataType != kCVPixelFormatType_DepthFloat16{
convertedDepth = depthData.converting(toDepthDataType: kCVPixelFormatType_DepthFloat32)
}else{
convertedDepth = depthData
}
// You save the depth data map from the AVDepthData object as a CVPixelBuffer
let pixelBuffer = convertedDepth.depthDataMap
//Using an extension, you then clamp the pixels in the pixel buffer to keep them between 0.0 and 1.0.
pixelBuffer.clamp()
// Convert the pixel buffer into a CIImage
let depthMap = CIImage(cvPixelBuffer: pixelBuffer)
// You store depthMap in a class variable for later use
DispatchQueue.main.async {
[weak self] in self?.depthMap = depthMap
}
}
}

CANT RESOLVE: unsafeAddressOf is abandoned in Swift 3

I just realized that my old app is not working anymore because unsafeAddressOf is abandoned in Swift 3. I have been searching in Apple documentations and online tutorials but still cant figure out how to change my code to be compliant with Swift 3. Here is my code:
import UIKit
import ImageIO
extension UIImage {
public class func gifWithData(data: NSData) -> UIImage? {
guard let source = CGImageSourceCreateWithData(data, nil) else {
print("SwiftGif: Source for the image does not exist")
return nil
}
return UIImage.animatedImageWithSource(source: source)
}
public class func gifWithName(name: String) -> UIImage? {
guard let bundleURL = Bundle.main.url(forResource: name, withExtension: "gif") else {
print("SwiftGif: This image named \"\(name)\" does not exist")
return nil
}
guard let imageData = NSData(contentsOfURL: bundleURL) else {
print("SwiftGif: Cannot turn image named \"\(name)\" into NSData")
return nil
}
return gifWithData(imageData)
}
class func delayForImageAtIndex(index: Int, source: CGImageSource!) -> Double {
var delay = 0.1
// Get dictionaries
let cfProperties = CGImageSourceCopyPropertiesAtIndex(source, index, nil)
let gifProperties: CFDictionary = unsafeBitCast(CFDictionaryGetValue(cfProperties, unsafeAddressOf(kCGImagePropertyGIFDictionary)), to: CFDictionary.self)
// Get delay time
var delayObject: AnyObject = unsafeBitCast(
CFDictionaryGetValue(gifProperties,
unsafeAddressOf(kCGImagePropertyGIFUnclampedDelayTime)),
to: AnyObject.self)
if delayObject.doubleValue == 0 {
delayObject = unsafeBitCast(CFDictionaryGetValue(gifProperties,
unsafeAddressOf(kCGImagePropertyGIFDelayTime)), to: AnyObject.self)
}
delay = delayObject as! Double
if delay < 0.1 {
delay = 0.1 // Make sure they're not too fast
}
return delay
}
class func gcdForPair( a: Int?, var _ b: Int?) -> Int {
// Check if one of them is nil
var a = a
if b == nil || a == nil {
if b != nil {
return b!
} else if a != nil {
return a!
} else {
return 0
}
}
// Swap for modulo
if a < b {
let c = a
a = b
b = c
}
// Get greatest common divisor
var rest: Int
while true {
rest = a! % b!
if rest == 0 {
return b! // Found it
} else {
a = b
b = rest
}
}
}
class func gcdForArray(array: Array<Int>) -> Int {
if array.isEmpty {
return 1
}
var gcd = array[0]
for val in array {
gcd = UIImage.gcdForPair(val, gcd)
}
return gcd
}
class func animatedImageWithSource(source: CGImageSource) -> UIImage? {
let count = CGImageSourceGetCount(source)
var images = [CGImage]()
var delays = [Int]()
// Fill arrays
for i in 0..<count {
// Add image
if let image = CGImageSourceCreateImageAtIndex(source, i, nil) {
images.append(image)
}
// At it's delay in cs
let delaySeconds = UIImage.delayForImageAtIndex(index: Int(i),
source: source)
delays.append(Int(delaySeconds * 1000.0)) // Seconds to ms
}
// Calculate full duration
let duration: Int = {
var sum = 0
for val: Int in delays {
sum += val
}
return sum
}()
// Get frames
let gcd = gcdForArray(array: delays)
var frames = [UIImage]()
var frame: UIImage
var frameCount: Int
for i in 0..<count {
frame = UIImage(CGImage: images[Int(i)])
frameCount = Int(delays[Int(i)] / gcd)
for _ in 0..<frameCount {
frames.append(frame)
}
}
// Heyhey
let animation = UIImage.animatedImage(with: frames,
duration: Double(duration) / 1000.0)
return animation
}
}
Does anyone have an idea how I can fix this code?

Swift NSStream delegate; reading is not working; stream func

Hi I have been working on this code now for a while and I cannot figure out why my stream func is not being called?
I am building a joystick and it requires user interaction and it sends data but its not receiving data from server
can anyone see what the problem is?
I have attached my swift code and my python server
thanks!
import Darwin
import Foundation
import UIKit
import Dispatch
class ViewController: UIViewController {
#IBOutlet private weak var joystickMove: Joystick!
#IBOutlet private weak var joystickRotate: Joystick!
private var joystick = Joystick()
private var contour = Contours()
private var contour_index: Int = 0
private var socket = Connection(address: "000.000.0.000" as CFString, port: 7041)
override func viewDidLoad() {
super.viewDidLoad()
createJoystick()
createContours()
createButton()
}
private func createJoystick() {
let n: CGFloat = 100.0
let x: CGFloat = (UIScreen.main.bounds.width/2) - (n/2.0)
let y: CGFloat = UIScreen.main.bounds.height - (UIScreen.main.bounds.height/4.0)
self.joystick.frame = CGRect(x: x, y: y, width: n, height: n)
self.joystick.backgroundColor = UIColor.clear
self.joystick.substrateColor = UIColor.lightGray
self.joystick.substrateBorderColor = UIColor.gray
self.joystick.substrateBorderWidth = 1.0
self.joystick.stickSize = CGSize(width: 50.0, height: 50.0)
self.joystick.stickColor = UIColor.darkGray
self.joystick.stickBorderColor = UIColor.black
self.joystick.stickBorderWidth = 2.0
self.joystick.fade = 0.5
var packet = ""
DispatchQueue.global(qos: .userInitiated).async { // do some task
self.joystick.trackingHandler = { (data) -> () in
let power = sqrt(pow(Double(data.velocity.x), 2.0) + pow(Double(data.velocity.y), 2.0))
let theta = atan2(Double(-data.velocity.y), Double(data.velocity.x))
let degrees = theta * (180.0 / M_PI)
if degrees >= 55 && degrees <= 125 { // move forward
packet = "\(1) \(1) \(power) \(power)"
} else if degrees >= -125 && degrees <= -55 { // move backwards
packet = "\(-1) \(-1) \(power) \(power)"
} else if degrees >= -55 && degrees <= 55 { // turn right
packet = "\(1) \(-1) \(power) \(power)"
} else if (degrees >= 125 && degrees <= 180) && (degrees >= -180 && degrees <= -125) { // turn left
packet = "\(-1) \(1) \(power) \(power)"
}
self.socket.connect()
let sent = self.socket.sendMessage(message: packet)
if sent {
print("packet sent!\n \(packet)")
}
self.socket.changeWhenHasBytesAvailable = { str in
print("my-str: \(str)")
}
self.socket.disconnect()
}
}
view.addSubview(joystick)
}
private func createContours() {
let n: CGFloat = 350.0
let x: CGFloat = (UIScreen.main.bounds.width/2.0) - (n/2.0)
let y: CGFloat = UIScreen.main.bounds.height - (UIScreen.main.bounds.height/4.0) - n - 100.0
self.contour.frame = CGRect(x: x, y: y, width: n, height: n)
self.contour.backgroundColor = UIColor.clear
view.addSubview(self.contour)
}
private func createButton() {
let width: CGFloat = 150.0
let height: CGFloat = 75.0
let x: CGFloat = (UIScreen.main.bounds.width/2.0) - (width/2.0)
let y: CGFloat = UIScreen.main.bounds.height - (UIScreen.main.bounds.height/4.0) - width
let button: UIButton = UIButton(frame: CGRect(x: x, y: y, width: width, height: height))
button.backgroundColor = UIColor.blue
button.setTitle("Contour Views", for: .normal)
button.addTarget(self, action: #selector(self.buttonAction), for: .touchUpInside)
button.tag = 1
view.addSubview(button)
}
#objc private func buttonAction(sender: UIButton!) {
var btnsendtag: UIButton = sender
if btnsendtag.tag == 1 {
self.contour_index = (self.contour_index + 1) % 2
switch self.contour_index {
case 0:
for index in 0...356 {
if self.contour.distx[index] != -1 && self.contour.disty[index] != -1 {
self.contour.circles[index].alpha = 0
}
}
case 1:
for index in 0...356 {
if self.contour.distx[index] != -1 && self.contour.disty[index] != -1 {
self.contour.circles[index].alpha = 1
}
}
default:
for index in 0...356 {
if self.contour.distx[index] != -1 && self.contour.disty[index] != -1 {
self.contour.circles[index].alpha = 1
UIColor.cyan.setFill()
self.contour.lines[index].fill()
self.contour.lines[index].stroke()
}
}
}
}
}
override func viewDidAppear(_ animated: Bool) {
DispatchQueue.global(qos: .background).async { // do some task
// self.socket.connect()
// var packet = "1 1 0 0"
// let sent = self.socket.sendMessage(message: packet)
// if sent {
// print("packet sent!\n \(packet)")
// }
// self.socket.disconnect()
}
}
public func delayWithSeconds(_ seconds: Double, completion: #escaping () -> ()) {
DispatchQueue.main.asyncAfter(deadline: .now() + seconds) {
completion()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func viewWillDisappear(_ animated: Bool) {
// self.socket.disconnect()
}
}
//
import Foundation
class Connection: NSObject, StreamDelegate {
private var inputStream: InputStream!
private var outputStream: OutputStream!
private var my_address: CFString
private var my_port: UInt32
public var changeWhenHasBytesAvailable: ((String)->())?
init (address: CFString, port:UInt32) {
self.my_address = address
self.my_port = port
super.init()
}
public func connect() {
var readStream: Unmanaged<CFReadStream>?
var writeStream: Unmanaged<CFWriteStream>?
CFStreamCreatePairWithSocketToHost(nil, self.my_address, self.my_port, &readStream, &writeStream)
self.inputStream = readStream!.takeRetainedValue()
self.outputStream = writeStream!.takeRetainedValue()
self.inputStream.delegate = self
self.outputStream.delegate = self
self.inputStream.schedule(in: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
self.outputStream.schedule(in: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
self.inputStream.open()
self.outputStream.open()
}
public func stream(aStream: Stream, handleEvent eventCode: Stream.Event) {
DispatchQueue.global(qos: .background).async { // do some task
print("stream")
if aStream === self.inputStream {
switch eventCode {
case Stream.Event.hasBytesAvailable:
print("available!")
var inputBuffer = Array<UInt8>(repeating: 0, count:2048)
self.inputStream?.read(&inputBuffer, maxLength: 2048)
//call it in main queue
self.changeWhenHasBytesAvailable?(String(bytes: inputBuffer, encoding: String.Encoding.utf8)!)
// var buffer = [UInt8](repeating: 0, count: 4096)
// while (inputStream!.hasBytesAvailable){
// var len = inputStream!.read(&buffer, maxLength: buffer.count)
// if(len > 0){
// var output = NSString(bytes: &buffer, length: buffer.count, encoding: String.Encoding.utf8.rawValue)
// var output = NSString(bytes: &buffer, length: len, encoding: String.Encoding.utf8)
// print("output: \(output)")
// if (output != ""){
// NSLog("server said: %#", output!)
// }
// } else {
// print("empty string from stream")
// }
// }
break
default:
break
}
}
}
}
public func sendMessage(message: String) -> Bool {
var sent: Bool = false
let buff = [UInt8](message.utf8)
let result = self.outputStream.write(buff, maxLength: buff.count)
if result != -1 {
sent = true
}
return sent
}
public func disconnect() {
self.inputStream.close()
self.outputStream.close()
// self.inputStream.remove(from: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
// self.outputStream.remove(from: RunLoop.current, forMode: RunLoopMode.defaultRunLoopMode)
}
}
\\
import socketserver
class MyTCPSocketHandler(socketserver.BaseRequestHandler):
"""
The RequesthHandler class for our server.
It is instantiated once per connection to the server, and must
override the handle() method to implement communication to the
client
"""
def handle(self):
# self.request is the TCP socket connected to the client
self.data = self.request.recv(1024)
print("{} wrote:".format(self.client_address[0]))
print("{} wrote:".format(self.client_address))
packet = self.data.decode()
print("decode-packet: {}".format(packet))
self.request.sendto(bytes("11111111111111", 'utf-8'), self.client_address)
if __name__ == "__main__":
HOST, PORT = "", 7041
socketserver.allow_reuse_address = True
print("HOST: {}".format(HOST))
server = socketserver.TCPServer((HOST, PORT), MyTCPSocketHandler)
server.serve_forever()