Libstreaming: Can we perform zoom in and out in camera on surfaceview as well as on-touch Focus circle and Flash in camera preview? - android-activity

I am developing an android application using libstreaming streaming library. The app is sending upstream on Wowza (Mobile to Wowza). I created one surfaceview which has the camera preview. It's working fine but I want to add three functionalities (Zoom in/out, Autofous and Flash).
I don't know whether it is possible with Libstreaming.
SurfaceView which I have used, belongs to
package net.majorkernelpanic.streaming.gl.SurfaceView.
Below is my Activity code:
public class LiveStreamingActivity extends Activity implements RtspClient.Callback, Session.Callback, SurfaceHolder.Callback {
private static SurfaceView mSurfaceView;
private SurfaceHolder mHolder;
private Session mSession;// Rtsp session
private static RtspClient mClient;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_main);
if (!LibsChecker.checkVitamioLibs(this))
return;
mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#SuppressWarnings("deprecation")
private void initRtspClient() {
// Configures the SessionBuilder
mSession = SessionBuilder
.getInstance()
.setContext(getApplicationContext())
.setAudioEncoder(SessionBuilder.AUDIO_AAC)
.setAudioQuality(new AudioQuality(8000, 16000))
.setVideoEncoder(SessionBuilder.VIDEO_H264)
//.setVideoQuality(new VideoQuality(352, 288, 30, 300000))
.setCamera(CameraInfo.CAMERA_FACING_BACK)
.setSurfaceView(mSurfaceView).setPreviewOrientation(0)
.setCallback(this).build();
mClient = new RtspClient();
mClient.setSession(mSession);
mClient.setCallback(this);
mClient.setTransportMode(RtspClient.TRANSPORT_TCP);
mSurfaceView.setAspectRatioMode(SurfaceView.ASPECT_RATIO_PREVIEW);
String ip, port, path;
Pattern uri = Pattern.compile("rtsp://(.+):(\\d+)/(.+)");
Matcher m = uri.matcher("rtsp://219.65.90.226:1935/app2/myStream");
m.find();
ip = m.group(1);
port = m.group(2);
path = m.group(3);
mClient.setCredentials(AppConfig.PUBLISHER_USERNAME,
AppConfig.PUBLISHER_PASSWORD);
mClient.setServerAddress(ip, Integer.parseInt(port));
mClient.setStreamPath("/" + path);
}
#Override
protected void onResume() {System.out.println("on Resume activity 2");
super.onResume();
try{
if(null != mSurfaceView){
/* Broadcastreceiver: check network connectivity */
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction("android.net.conn.CONNECTIVITY_CHANGE");
registerReceiver(receiver, intentFilter);
/* Start audio streaming background thread: AsyncTask */
vmPlayer = null;
vmPlayer = new MediaPlayer(this);
audioStream= new AudioStreamTask(this);
audioStream.execute("push","push","push");
}
}catch(Exception ex){
ex.printStackTrace();
}
}
#Override
protected void onPause() {
super.onPause();
try{
/* release the surface view */
if(null != mSurfaceView){
mClient.release();
mSession.release();
mSurfaceView.getHolder().removeCallback(this);
}
}catch(Exception ex){
ex.printStackTrace();
}
}
#Override
public void onDestroy() {
try {
super.onDestroy();
if (mClient != null) {
mClient.release();
}
if (mSession != null) {
mSession.release();
}
mSurfaceView.getHolder().removeCallback(this);
} catch (Exception e) {
System.out.println("Error while destroying activity " + e);
}
}
private void toggleStreaming() {
if (!mClient.isStreaming()) {
// Start camera preview
mSession.startPreview();
// mFrontSession.startPreview();
// Start video stream
mClient.startStream();
//startRtmpStream();
} else {
// already streaming, stop streaming
// stop camera preview
mSession.stopPreview();
// mFrontSession.stopPreview();
// stop streaming
mClient.stopStream();
}
}}
activity_main.xml
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/surface_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#android:color/black"
android:orientation="vertical" >
<LinearLayout
android:id="#+id/surface_view_layout"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
android:orientation="vertical" >
<net.majorkernelpanic.streaming.gl.SurfaceView
android:id="#+id/surface_view"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_gravity="center" />
</LinearLayout></FrameLayout>
I need complete description to add all these three camera features.

I did it! :)
Go to VideoStream.Java and change:
protected Camera mCamera to public static Camera mCamera.
Go to your MainActivity, in your case LiveStreamingActivity and paste:
private float mDist;
#Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = VideoStream.mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
VideoStream.mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
float newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
VideoStream.mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
VideoStream.mCamera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/**
* Determine the space between the first two fingers
*/
private float getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return FloatMath.sqrt(x * x + y * y);
}
Based here.
Let me know if it helped!

Thank you #José Cunha Fonte your code is great!
For me (Works with Marshmallow SDK) return FloatMath.sqrt(x * x + y * y); is deprecated and gone, so I just changed to return (float)Math.sqrt(x * x + y * y);
Hope it will help someone :)

Related

Passing values from android to flutter but not from main activity

i'm implementing a third party android sdk in flutter and i want a message to be passed from android to flutter when sdk starts
i have implemented the sdk using platform channel just need to work on the callback code. In the code there is a function called onChannelJoin i want to send message to flutter when this function is called
Main Activity
public class MainActivity extends FlutterActivity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
GeneratedPluginRegistrant.registerWith(this);
final String CHANNEL = "samples.flutter.io/screen_record";
new MethodChannel(getFlutterView(), CHANNEL).setMethodCallHandler(
new MethodChannel.MethodCallHandler() {
#Override
public void onMethodCall(MethodCall call, MethodChannel.Result result) {
// TODO
if (call.method.equals("startScreenShare")) {
Intent intent = new Intent(MainActivity.this , HelloAgoraScreenSharingActivity.class);
startActivity(intent);
} else {
result.notImplemented();
}
}
});
}
}
ScreenSharingActivity
public class HelloAgoraScreenSharingActivity extends Activity {
private static final String LOG_TAG = "AgoraScreenSharing";
private static final int PERMISSION_REQ_ID_RECORD_AUDIO = 22;
private ScreenCapture mScreenCapture;
private GLRender mScreenGLRender;
private RtcEngine mRtcEngine;
private boolean mIsLandSpace = false;
private void initModules() {
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
if (mScreenGLRender == null) {
mScreenGLRender = new GLRender();
}
if (mScreenCapture == null) {
mScreenCapture = new ScreenCapture(getApplicationContext(), mScreenGLRender, metrics.densityDpi);
}
mScreenCapture.mImgTexSrcConnector.connect(new SinkConnector<ImgTexFrame>() {
#Override
public void onFormatChanged(Object obj) {
Log.d(LOG_TAG, "onFormatChanged " + obj.toString());
}
#Override
public void onFrameAvailable(ImgTexFrame frame) {
Log.d(LOG_TAG, "onFrameAvailable " + frame.toString());
if (mRtcEngine == null) {
return;
}
AgoraVideoFrame vf = new AgoraVideoFrame();
vf.format = AgoraVideoFrame.FORMAT_TEXTURE_OES;
vf.timeStamp = frame.pts;
vf.stride = frame.mFormat.mWidth;
vf.height = frame.mFormat.mHeight;
vf.textureID = frame.mTextureId;
vf.syncMode = true;
vf.eglContext14 = mScreenGLRender.getEGLContext();
vf.transform = frame.mTexMatrix;
mRtcEngine.pushExternalVideoFrame(vf);
}
});
mScreenCapture.setOnScreenCaptureListener(new ScreenCapture.OnScreenCaptureListener() {
#Override
public void onStarted() {
Log.d(LOG_TAG, "Screen Record Started");
}
#Override
public void onError(int err) {
Log.d(LOG_TAG, "onError " + err);
switch (err) {
case ScreenCapture.SCREEN_ERROR_SYSTEM_UNSUPPORTED:
break;
case ScreenCapture.SCREEN_ERROR_PERMISSION_DENIED:
break;
}
}
});
WindowManager wm = (WindowManager) getApplicationContext()
.getSystemService(Context.WINDOW_SERVICE);
int screenWidth = wm.getDefaultDisplay().getWidth();
int screenHeight = wm.getDefaultDisplay().getHeight();
if ((mIsLandSpace && screenWidth < screenHeight) ||
(!mIsLandSpace) && screenWidth > screenHeight) {
screenWidth = wm.getDefaultDisplay().getHeight();
screenHeight = wm.getDefaultDisplay().getWidth();
}
setOffscreenPreview(screenWidth, screenHeight);
if (mRtcEngine == null) {
try {
mRtcEngine = RtcEngine.create(getApplicationContext(), "Agora_id", new IRtcEngineEventHandler() {
#Override
public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
Log.d(LOG_TAG, "onJoinChannelSuccess " + channel + " " + elapsed);
}
#Override
public void onWarning(int warn) {
Log.d(LOG_TAG, "onWarning " + warn);
}
#Override
public void onError(int err) {
Log.d(LOG_TAG, "onError " + err);
}
#Override
public void onAudioRouteChanged(int routing) {
Log.d(LOG_TAG, "onAudioRouteChanged " + routing);
}
});
} catch (Exception e) {
Log.e(LOG_TAG, Log.getStackTraceString(e));
throw new RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e));
}
mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING);
mRtcEngine.enableVideo();
if (mRtcEngine.isTextureEncodeSupported()) {
mRtcEngine.setExternalVideoSource(true, true, true);
} else {
throw new RuntimeException("Can not work on device do not supporting texture" + mRtcEngine.isTextureEncodeSupported());
}
mRtcEngine.setVideoProfile(Constants.VIDEO_PROFILE_360P, true);
mRtcEngine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER);
}
}
private void deInitModules() {
RtcEngine.destroy();
mRtcEngine = null;
if (mScreenCapture != null) {
mScreenCapture.release();
mScreenCapture = null;
}
if (mScreenGLRender != null) {
mScreenGLRender.quit();
mScreenGLRender = null;
}
}
/**
* Set offscreen preview.
*
* #param width offscreen width
* #param height offscreen height
* #throws IllegalArgumentException
*/
public void setOffscreenPreview(int width, int height) throws IllegalArgumentException {
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException("Invalid offscreen resolution");
}
mScreenGLRender.init(width, height);
}
private void startCapture() {
mScreenCapture.start();
}
private void stopCapture() {
mScreenCapture.stop();
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_hello_agora_screen_sharing);
}
public void onLiveSharingScreenClicked(View view) {
Button button = (Button) view;
boolean selected = button.isSelected();
button.setSelected(!selected);
if (button.isSelected()) {
initModules();
startCapture();
String channel = "ss_test" + System.currentTimeMillis();
channel = "ss_test";
button.setText("stop");
mRtcEngine.muteAllRemoteAudioStreams(true);
mRtcEngine.muteAllRemoteVideoStreams(true);
mRtcEngine.joinChannel(null, channel, "", 0);
} else {
button.setText("start");
mRtcEngine.leaveChannel();
stopCapture();
}
}
#Override
protected void onDestroy() {
super.onDestroy();
deInitModules();
}
}
Dart Code
const platform = const MethodChannel('samples.flutter.io/screen_record');
try {
final int result = await platform.invokeMethod('startScreenShare');
} on PlatformException catch (e) {}
setState(() {
});

GWT image get from server

when I implemented as given here but for not working
my implementation is
ServerSide:
File f = fileFromDatabase // from database the fileName is india.png
DataInputStream din = new DataInputStream(new FileInputStream(f));
din.readFully(data);
din.close();
String base64 = Base64Utils.toBase64(data);
String[] s = filename.split("\\.");
base64 = "data:" + "india/png" + ";base64," + base64;
or
base64 = "data:image/png;base64," + base64;
return base64;
clientSide:
imageService.getImageData(new AsyncCallback() {
#Override
public void onSuccess(String imageData) {
Image image = new Image(imageData);
Canvas.addChild(image);
//this Canvas class addItem into com.smartgwt.client.widgets.Window
}
#Override
public void onFailure(Throwable caught) {
}
}
client side imageData stirng is <image class="gwt-Image src=sume big string starts with "data:image/png;base64,someSting......>"
eventhough client side could not see the image.
Please clear my doubt
Thanks in advance
When you get the base64 string from the server you need to add a load handler on the image in the DOM. The way I've done it is to attach it to the DOM and hide it as in the below code.
/** get events preview from server, attach to the DOM and store in 'preview'
* #param handler option to pass a custom load handler
*/
private void get_preview(LoadHandler handler) {
final LoadHandler load_handler = handler;
server.getPreviewImage(user, data, new AsyncCallback<String>() {
#Override
public void onFailure(Throwable caught) {
log.info("getPreviewImage: " + error);
}
#Override
public void onSuccess(String result) {
preview = null;
if (result != null) {
ImageElement ie = doc.createImageElement();
preview = Image.wrap(ie);
preview.setVisible(false);
doc.getElementById("imagedummy").removeAllChildren();
doc.getElementById("imagedummy").appendChild(preview.getElement());
// add load handler to DOM image before being able to use
if (load_handler == null) {
preview.addLoadHandler(new LoadHandler() {
#Override
public void onLoad(LoadEvent event) {
display_preview();
}
});
} else {
preview.addLoadHandler(load_handler);
}
preview.setUrl(result);
}
}
});
}
/** Displays the preview on the canvas.
* Resizes canvas if necessary and sets zoom
*/
private void display_preview() {
EventSize size = data.getEventSize();
canvas.canvas.setCoordinateSpaceWidth(size.width);
canvas.canvas.setCoordinateSpaceHeight(size.height);
float zoom = Float.parseFloat(preview_zoom.getSelectedValue());
canvas.canvas.setPixelSize((int)(size.width * zoom), (int)(size.height * zoom));
if (preview != null) {
ImageElement elem = ImageElement.as(preview.getElement());
canvas.canvas.getContext2d().drawImage(elem, 0, 0);
}
}

Camera in Android app

I am creating an app which required to perform from API 15 to API 23 using camera so what should be the best way to implement camera as camera class is deprecated in API 21 and also android.hardware.camera2 not able to implement on lower version then API 21.
The below code is something I have taken out of one of my projects, it has had a lot of stuff ripped out for the purpose of putting it on here so you will have to edit it for your needs. It uses the original camera api which is back compatible for your api needs.
public class RecordGameKam extends Fragment
implements TextureView.SurfaceTextureListener, View.OnClickListener {
private final static String TAG = "CameraRecordTexture";
private Camera mCamera;
private TextureView mTextureView;
int numberOfCameras;
int defaultCameraId;
private boolean isRecording = false;
protected MediaRecorder mediaRecorder;
#SuppressWarnings("ConstantConditions")
#TargetApi(Build.VERSION_CODES.JELLY_BEAN)
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
getActivity().getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
View rootView = new RelativeLayout(getActivity());
mTextureView = new TextureView(getActivity());
mTextureView.setSurfaceTextureListener(this);
//View parameters-----------------------------------------------------------------------
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT);
rootView.setLayoutParams(params);
((ViewGroup) rootView).addView(mTextureView);
return rootView;
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
// Find the total number of cameras available
numberOfCameras = Camera.getNumberOfCameras();
// Find the ID of the default camera
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
defaultCameraId = i;
}
}
try {
if (mCamera != null) {
//final Camera.Size previewSize = onMeasure();
//Camera.Size recorderSize = previewSize;
final Camera.Parameters params = mCamera.getParameters();
params.setPreviewSize(720, 480);
mCamera.setParameters(params);
mCamera.setDisplayOrientation(90);
mCamera.setPreviewTexture(surface);
mCamera.startPreview();
startContinuousAutoFocus();
}
} catch (IOException ioe) {
// Something bad happened
mCamera.release();
mCamera = null;
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
// Ignored, Camera does all the work for us
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
try {
if (getActivity().getActionBar() != null) {
getActivity().getActionBar().show();
}
} catch (Exception e) {
e.printStackTrace();
}
releaseMediaRecorder();
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
// Invoked every time there's a new Camera preview frame
}
private boolean setMediaRecorder() throws IllegalStateException {
try {
//Create a new instance of MediaRecorder.
mediaRecorder = new MediaRecorder();
//Unlock and set camera to Media recorder
mCamera.unlock();
mediaRecorder.setCamera(mCamera);
//Configure audio/video input
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
CamcorderProfile profile = null;
if (CamcorderProfile.hasProfile(CamcorderProfile.QUALITY_480P)) {
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
}
if (profile != null) {
mediaRecorder.setProfile(profile);
}
//Change oritentation
mediaRecorder.setOrientationHint(90 - 180 + 360);
mediaRecorder.setOutputFile(getFilename());
} catch (Exception e) {
e.printStackTrace();
}
//Attempt to prepare the configuration and record video.
try {
button.setBackgroundResource(R.drawable.camera_pressed);
mediaRecorder.prepare();
} catch (Exception e) {
e.printStackTrace();
mediaRecorder.release();
return false;
}
return true;
}
boolean startContinuousAutoFocus() {
Camera.Parameters params = mCamera.getParameters();
List<String> focusModes = params.getSupportedFocusModes();
assert focusModes != null;
String CAF_PICTURE = Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE,
CAF_VIDEO = Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO,
supportedMode = focusModes
.contains(CAF_PICTURE) ? CAF_PICTURE : focusModes
.contains(CAF_VIDEO) ? CAF_VIDEO : "";
if (!supportedMode.equals("")) {
params.setFocusMode(supportedMode);
mCamera.setParameters(params);
return true;
}
return false;
}
#Override
public void onResume() {
super.onResume();
}
#Override
public void onPause() {
super.onPause();
}
#Override
public void onDestroy() {
super.onDestroy();
}
}
enter code here

current location not display accurately by google location API

When I am running this code it gives current location after I clicked the button_currentlocation. But when I checked the accuracy it is very low accuracy (sometime 5000m). But I need to get current location for one time with high accuracy (around 10m).
If someone can help me to correct my coding,it will be great help for my research.
My cordings are as follow
in my manifest
uses-permission android:name="android.permission.ACCESS_FINE_LOCATION"
LocationProvider Class
public class LocationProvider implements
GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener,
LocationListener {
public abstract interface LocationCallback {
public void handleNewLocation(Location location);
}
public static final String TAG = LocationProvider.class.getSimpleName();
/*
* Define a request code to send to Google Play services
* This code is returned in Activity.onActivityResult
*/
private final static int CONNECTION_FAILURE_RESOLUTION_REQUEST = 9000;
private LocationCallback mLocationCallback;
private Context mContext;
private GoogleApiClient mGoogleApiClient;
private LocationRequest mLocationRequest;
public LocationProvider(Context context, LocationCallback callback) {
mGoogleApiClient = new GoogleApiClient.Builder(context)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(LocationServices.API)
.build();
mLocationCallback = callback;
// Create the LocationRequest object
mLocationRequest = LocationRequest.create()
.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY)
.setInterval(10 * 1000) // 10 seconds, in milliseconds
.setFastestInterval(1 * 1000); // 1 second, in milliseconds
mContext = context;
}
public void connect() {
mGoogleApiClient.connect();
}
public void disconnect() {
if (mGoogleApiClient.isConnected()) {
LocationServices.FusedLocationApi.removeLocationUpdates(mGoogleApiClient, this);
mGoogleApiClient.disconnect();
}
}
#Override
public void onConnected(Bundle bundle) {
Log.i(TAG, "Location services connected.");
Location location = LocationServices.FusedLocationApi.getLastLocation(mGoogleApiClient);
if (location == null) {
LocationServices.FusedLocationApi.requestLocationUpdates(mGoogleApiClient, mLocationRequest, this);
}
else {
mLocationCallback.handleNewLocation(location);
}
}
#Override
public void onConnectionSuspended(int i) {
}
#Override
public void onConnectionFailed(ConnectionResult connectionResult) {
/*
* Google Play services can resolve some errors it detects.
* If the error has a resolution, try sending an Intent to
* start a Google Play services activity that can resolve
* error.
*/
if (connectionResult.hasResolution() && mContext instanceof Activity) {
try {
Activity activity = (Activity)mContext;
// Start an Activity that tries to resolve the error
connectionResult.startResolutionForResult(activity, CONNECTION_FAILURE_RESOLUTION_REQUEST);
/*
* Thrown if Google Play services canceled the original
* PendingIntent
*/
} catch (IntentSender.SendIntentException e) {
// Log the error
e.printStackTrace();
}
} else {
/*
* If no resolution is available, display a dialog to the
* user with the error.
*/
Log.i(TAG, "Location services connection failed with code " + connectionResult.getErrorCode());
}
}
#Override
public void onLocationChanged(Location location) {
mLocationCallback.handleNewLocation(location);
}
}
In HomePage Class
package com.ksfr.finaltest01;
import android.app.Activity;
import android.location.Location;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.util.Log;
import android.view.View;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.Spinner;
import android.widget.Toast;
import com.google.android.gms.maps.model.LatLng;
public class HomePage extends Activity implements AdapterView.OnItemSelectedListener,LocationProvider.LocationCallback {
public static final String TAG = HomePage.class.getSimpleName();
Button button_disFinder;
private LocationProvider locationProvider;
String Provider,Str_endLocation;
double cur_latitude, cur_longitude,cur_accuracy, end_latitude, end_longitude;
float distance_cur_to_end;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home_page);
locationProvider = new LocationProvider(this,this);
Spinner spinner = (Spinner) findViewById(R.id.spinner_schools);
ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this,
R.array.school_array, android.R.layout.simple_spinner_item);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
spinner.setAdapter(adapter);
spinner.setOnItemSelectedListener(this);
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show();
}
});
}
public void CurrentLocationClicked(View view) {
if (cur_latitude!=0&&cur_longitude != 0 ){
Spinner spinner = (Spinner) findViewById(R.id.spinner_schools);
spinner.setClickable(true);
String message= String.format("Current Location\n" + "Latitude :" + cur_latitude + "\nLongitude :" + cur_longitude+"\nAccuracy :"+cur_accuracy+"\nProvider :"+Provider);
Toast.makeText(HomePage.this, message, Toast.LENGTH_LONG).show();
}
else{
String message= String.format("Location services disconnected.\nSwich ON Location Service to work App.");
Toast.makeText(HomePage.this, message, Toast.LENGTH_LONG).show();
//System.exit(0);
}
}
#Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if(position!= 0) {
Str_endLocation=String.valueOf(parent.getItemAtPosition(position));
Toast.makeText(getApplicationContext(), Str_endLocation + " Selected", Toast.LENGTH_SHORT).show();
switch (Str_endLocation){
//end location latitude and logitude will be taken from here.
case "Kahagolla National School":
end_latitude = 6.816703;end_longitude = 80.9637076;
break;
}
String message3= String.format("End Location\n"+"Latitude :"+end_latitude+"\nLongitude :"+end_longitude);//For Testing
Toast.makeText(HomePage.this, message3, Toast.LENGTH_LONG).show();//For Testing
}
}
#Override
public void onNothingSelected(AdapterView<?> parent) {
}
public void FindDistanceClicked (View view){
Location curlocation = new Location("");
curlocation.setLatitude(cur_latitude);
curlocation.setLongitude(cur_longitude);
Location endlocation = new Location("");
endlocation.setLatitude(end_latitude);
endlocation.setLongitude(end_longitude);
distance_cur_to_end = curlocation.distanceTo(endlocation)/1000;
String message5= String.format("Distance from current location to\n" + Str_endLocation + " :" + String.format("%.3g%n", distance_cur_to_end)+"km");//For testing
Toast.makeText(HomePage.this, message5, Toast.LENGTH_LONG).show();//For testing
}
public void handleNewLocation(Location location) {
Log.d(TAG, location.toString());
cur_latitude = location.getLatitude();
cur_longitude = location.getLongitude();
cur_accuracy = location.getAccuracy();
Provider = location.getProvider();
LatLng cur_latLng = new LatLng(cur_latitude, cur_longitude);
}
#Override
protected void onResume() {
super.onResume();
locationProvider.connect();
}
#Override
protected void onPause() {
super.onPause();
locationProvider.disconnect();
}
public void ClearClicked (View view){
}
}
Try reversing your onConnected method location code.
First try to get current location and if that is not available then try getting lastKnown location.
Some thing like this
if(location == null) {
locationManager.requestLocationUpdates(
LocationManager.GPS_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
if(locationManager != null) {
location = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
if(location != null) {
latitude = location.getLatitude();
longitude = location.getLongitude();
}
}
}
I corrected my issue finally,
From this method i got locations in 3m accuracy,which I want.
Here's code
DistanceFinderActivity.java
private static final long MINIMUM_DISTANCE_CHANGE_FOR_UPDATES = 1; // in Meters
private static final long MINIMUM_TIME_BETWEEN_UPDATES = 5*1000; // in Milliseconds
protected LocationManager locationManager;
double cur_latitude, cur_longitude,cur_accuracy, end_latitude, end_longitude;
float distance_cur_to_end;
public LatLng cur_latLng,end_latLng;
MyLocationListener myLocationListener =new MyLocationListener();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main_distance_finder);
locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
locationManager.requestLocationUpdates(
GPS_PROVIDER,
MINIMUM_TIME_BETWEEN_UPDATES,
MINIMUM_DISTANCE_CHANGE_FOR_UPDATES,
myLocationListener
);
//......
}
protected void showCurrentLocation() {
Location location = locationManager.getLastKnownLocation(GPS_PROVIDER);
if (location != null) {
cur_latitude = location.getLatitude();
cur_longitude = location.getLongitude();
cur_accuracy = location.getAccuracy();
Provider = location.getProvider();
DecimalFormat df = new DecimalFormat("#.###");
df.setMinimumFractionDigits(2);
String message = String.format(
"Current Location \nLatitude: %1$s\nLongitude: %2$s\nAccuracy: %3$s\n" +
" Provider: %4$s\nWait until new location capture",
cur_latitude, cur_longitude,String.valueOf(df.format(cur_accuracy)),Provider.toUpperCase()
);
Toast.makeText(MainDistanceFinderActivity.this, message,
Toast.LENGTH_LONG).show();
}
}
private class MyLocationListener implements LocationListener {
public void onLocationChanged(Location location) {
cur_latitude = location.getLatitude();
cur_longitude = location.getLongitude();
cur_accuracy = location.getAccuracy();
Provider = location.getProvider();
DecimalFormat df = new DecimalFormat("#.###");
df.setMinimumFractionDigits(2);
String message = String.format(
"New Location Captured. \nLatitude: %1$s\nLongitude: %2$s\nAccuracy: %3$s\n" +
" Provider: %4$s",
cur_latitude, cur_longitude,String.valueOf(df.format(cur_accuracy)), Provider.toUpperCase()
);
Toast.makeText(MainDistanceFinderActivity.this, message, Toast.LENGTH_LONG).show();
}
public void onStatusChanged(String s, int i, Bundle b) {
Toast.makeText(MainDistanceFinderActivity.this, "Provider status changed",
Toast.LENGTH_LONG).show();
}
public void onProviderDisabled(String s) {
Toast.makeText(MainDistanceFinderActivity.this,
"Provider disabled by the user. GPS turned off",
Toast.LENGTH_LONG).show();
}
public void onProviderEnabled(String s) {
Toast.makeText(MainDistanceFinderActivity.this,
"Provider enabled by the user. GPS turned on",
Toast.LENGTH_LONG).show();
}
}

How to move the graph viewport when adding time-based items to a series at runtime?

I'm trying to create a graph with displays the last n seconds of data arriving by bluetooth. I want to add to the series from within a timer by adding a DataPoint with a Date as X value. All I get is a white screen. I think that the problem lies with the values used in ´graph.getViewport().setMinX`.
What value should I set for minX and maxX if I want to display the last n seconds of data?
public class MainActivity extends ActionBarActivity {
private Handler mHandler;
private GraphView graph;
LineGraphSeries<DataPoint> serFl = new LineGraphSeries<DataPoint>();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initGraph();
mHandler = new Handler();
startRepeatingTask();
}
private void initGraph() {
try {
graph = (GraphView) findViewById(R.id.graph);
serFl.setColor(Color.BLUE);
graph.addSeries(serFl);
graph.getViewport().setXAxisBoundsManual(true);
graph.getViewport().setMinX(System.currentTimeMillis() - 10000);
graph.getViewport().setMaxX(System.currentTimeMillis() + 1000);
graph.getViewport().setYAxisBoundsManual(true);
graph.getViewport().setMinY(0);
graph.getViewport().setMaxY(20);
final SimpleDateFormat sdf = new SimpleDateFormat("mm:ss");
graph.getGridLabelRenderer().setLabelFormatter(
new DefaultLabelFormatter() {
#Override
public String formatLabel(double value, boolean isValueX) {
if (isValueX) {
Date d = new Date((long) value);
return sdf.format(d);
} else {
// show currency for y values
return super.formatLabel(value, isValueX);
}
}
});
} catch (Exception x) {
System.err.println(x);
}
}
Runnable mStatusChecker = new Runnable() {
#Override
public void run() {
updateStatus();
mHandler.postDelayed(mStatusChecker, mInterval);
}
};
void startRepeatingTask() {
mStatusChecker.run();
}
protected void updateStatus() {
DataMessage msg = source.fetch();
long d = msg.getTimestamp();
serFl.appendData(new DataPoint(d, msg.getFl()), true,
settings.getMaxSeriesSize());
}
void stopRepeatingTask() {
mHandler.removeCallbacks(mStatusChecker);
}
}
You did not
graph.onDataChanged(false, false);
after setting the graph.getViewport().setMaxY(20);.
This helped in my case.