with parse notification my app crash - android-activity

My application was working perfectly fine until the day I involved Parse notifications. Now my apps crashes if idle for 2 min. The following is the code, can any one support me?
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
webView = (WebView) findViewById(R.id.webView1);
if (savedInstanceState != null) {
webView.restoreState(savedInstanceState);
}
// Create the WebView
else {
startWebView("file:///android_asset/www/quotes.html");
}
try {
Parse.initialize(this, "***************", "**********");
PushService.setDefaultPushCallback(this, MainActivity.class);
ParseInstallation.getCurrentInstallation().saveInBackground();
} catch(Exception exception){
exception.printStackTrace();
}
}

Related

AVAudioSession DefaultToSpeaker still playing audio from receiver

I have an app that allows users to playback audio while recording a video. They can only record in landscape.
This is how I've set up the playback of audio during a video session:
guard allowBackgroundAudio == true else {
return
}
guard audioEnabled == true else {
return
}
do{
if #available(iOS 10.0, *) {
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: [.mixWithOthers, .defaultToSpeaker])
} else {
let options: [AVAudioSession.CategoryOptions] = [.mixWithOthers, .allowBluetooth]
let category = AVAudioSession.Category.playAndRecord
let selector = NSSelectorFromString("setCategory:withOptions:error:")
AVAudioSession.sharedInstance().perform(selector, with: category, with: options)
}
try AVAudioSession.sharedInstance().setActive(true)
session.automaticallyConfiguresApplicationAudioSession = false
}
catch {
print("[SwiftyCam]: Failed to set background audio preference")
}
}
The problem is audio is still playing slightly out of the receiver which means the top mic is picking up the audio playback, drowning out the user's audio.
After reading on here that the receiver speaker still playing might be a bug (or feature) from Apple I decided to use the back mic for the selfie camera, thus splitting the audio away from the mic. I can't seem to get the selfie camera to use the back mic.
public class func setMicrophone(_ uiorient: String) {
guard let inputs = AVAudioSession.sharedInstance().availableInputs else {
return
}
for input in inputs {
print(input.dataSources ?? "??")
}
// set preferred:
let preferredPort = inputs[0]
if let dataSources = preferredPort.dataSources {
for source in dataSources {
if source.dataSourceName == uiorient {
do {
try preferredPort.setPreferredDataSource(source)
}
catch _ {
print("Cannot set \(uiorient) microphone.")
}
}
}
}
}
and then have this when we are calling the selfie camera;
AudioRecorderViewController.setMicrophone("Back")

Sound not playing in simulator, playing during debug in Xcode

I'm trying to face with the book "Intro to app development" by apple. I'm stuck on exercise AnimalSounds.
There is a SimpleSound class provided by the book, and our job is to use a sound object to reproduce a sound.
The simulator does not play any sound.
The audio files are in the build bundle and are found by the class.
I checked the simulator with other apps (e.g. youtube on safari) and sound works.
SimpleSound class (provided by example, not written by me)
import Foundation
import AudioToolbox
class SimpleSound {
private var soundID: SystemSoundID = 0
public init(named name: String) {
if let soundURL = soundURL(forName: name) {
let status = AudioServicesCreateSystemSoundID(soundURL as CFURL, &soundID)
if status != noErr {
print("Unable to create sound at URL: '\(name)'")
soundID = 0
}
}
}
public func play() {
if soundID != 0 {
print("Playing sound \(soundID)")
AudioServicesPlaySystemSound(soundID)
//AudioServicesPlaySystemSound(1001)
}
}
private func soundURL(forName name: String) -> URL? {
let fileExtensions = ["m4a", "wav", "mp3", "aac", "adts", "aif", "aiff", "aifc", "caf", "mp4"]
for fileExtention in fileExtensions {
if let soundURL = Bundle.main.url(forResource: name, withExtension: fileExtention) {
return soundURL
}
}
print("Unable to find sound file with name '\(name)'")
return nil
}
deinit {
if soundID != 0 {
AudioServicesDisposeSystemSoundID(soundID)
}
}
}
Code used in my viewController (written by me) in a button tapped action:
let meowSound = SimpleSound(named: "meow")
meowSound.play()
When debugging from Xcode line by line, when the function play() is invoked, the sound is reproduced. When app is running in simulator (with no debugging) or in actual iPhone sound does not play.
If standard system sound 1001 is uncommented, sound is played for both simulator and iPhone.
Any ideas?

CLSLogv logs are not coming in Crashlytics

Its an Ionic app with some code written in native. Its uses cordova-plugin-firebase that for logging Crashlytics.
In the native part for iOS as well, we are trying to use Crashlytics to enable logging. However no matter what I try logs sent using CLSLogv aren't visible in dashboard.
Here is my code.
#objc(ImageUpload) class ImageUpload : CDVPlugin {
var backgroundTask: UIBackgroundTaskIdentifier = UIBackgroundTaskInvalid
//https://docs.fabric.io/apple/crashlytics/enhanced-reports.html#custom-logging-in-swift
private func sendErrorToCrashlytics(error: String) {
NSLog("Error in send error function is \(error)")
CLSLogv("%#", getVaList([error]))
}
#objc(imageUpload:)
func imageUpload(command: CDVInvokedUrlCommand) {
registerBackgroundTask()
func execute() {
let db = SQLiteDatabase()
var recToUpLoad: PayloadModel? = nil
while(toCheck) {
do {
let record = try db.readValues() // THIS METHOD THROWS EXCEPTION
} catch Exceptions.SQLiteError(let error) {
self.sendErrorToCrashlytics(error: error) // IT COMES HERE AFTER EXCEPTION
}
}
}
DispatchQueue(label: "imageUploadPlugin",qos: .background).async
{
execute()
}
}
}
However CLSLogv is not visible at all in Crashlytics. However when I do Crashlytics.sharedInstance().throwException()
, I can see it in the dashboard.
Exceptions is enum
enum Exceptions: Error {
case SQLiteError(message: String)
case JSONError(message: String)
}
Hoping it may help someone. Somehow I couldn't get CLSLogv to work. I ended up creating an NSError object and log that in Crashlytics in catch block.
catch Exceptions.SQLiteError(let error) {
let userInfo = [NSLocalizedDescriptionKey: error.message, "query": error.query]
let errorObj = NSError(domain: "sqlite", code: 400, userInfo: userInfo)
Crashlytics.sharedInstance().recordError(errorObj)
}

Flutter: Async issues with third party login

I'm trying to implement native facebook login in Flutter.
So far I have a MethodChannel:
const platform = const MethodChannel('com.app/facebook');
I have a button in flutter that launches into the login sequence:
onPressed: () async {
try {
final String fbButtonResult = await platform.invokeMethod('doFacebookLogin');
fbLoginResult = 'FB Button Result: $fbButtonResult';
} on PlatformException catch (e) {
fbLoginResult = "No FB button: '${e.message}'";
}
print("this never executes");
print(fbLoginResult);
}
And I have some iOS code to execute the native login:
FlutterViewController* controller = (FlutterViewController*)self.window.rootViewController;
FlutterMethodChannel* facebookChannel = [FlutterMethodChannel
methodChannelWithName:#"com.app/facebook"
binaryMessenger:controller];
[facebookChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) {
// check which function was called
if ([#"doFacebookLogin" isEqualToString:call.method]) {
FBSDKLoginManager *login = [[FBSDKLoginManager alloc] init];
[login
logInWithReadPermissions: #[#"public_profile", #"email"]
fromViewController:controller
handler:^(FBSDKLoginManagerLoginResult *fbResult, NSError *error) {
if (error) {
NSLog(#"Process error");
result(#"Process error");
} else if (fbResult.isCancelled) {
NSLog(#"Cancelled");
result(#"Cancelled");
} else {
NSLog(#"Logged in");
result(#"Logged in");
}
}];
} else {
result(FlutterMethodNotImplemented);
}
}];
When we execute this, the FB native login functionality works great and runs through the flow properly. When it is done, the NSLog for "Logged In" shows up in the log, but the result doesn't seem to ever get back to the flutter app to let me know there what happened.
We need to know in some way whether this was successful or not so we can get the credentials and advance the view to the next step. Is there an issue in the code above or do I misunderstand how MethodChannels work? Or do we need to do something completely different like do an invokeMethod from the iOS code back to flutter once the flow is finished? Or is there some simple way to know when focus has returned to my flutter widget so I can check to see if the login was successful or not?

Turning on both iPhone LED's

Newer iPhones have two LEDs: one basic white LED and an Amber LED for softer photography.
I'm trying to turn my iPhone into a flashlight but I want to get the maximal brightness. I've successfully been able to turn off and on the white LED with the code below, but I can't get the amber LED and the white LED to turn on at the same time.
Here's my function written in Swift:
func toggleTorch(on on: Bool) {
let device = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
if device.hasTorch {
do {
try device.lockForConfiguration()
if on == true {
device.torchMode = .On
} else {
device.torchMode = .Off
}
device.unlockForConfiguration()
} catch {
print("Torch could not be used")
}
} else {
print("Torch is not available")
}
}