iceCandidates) {
+ this.iceServers = iceServers;
+ this.initiator = initiator;
+ this.clientId = clientId;
+ this.wssUrl = wssUrl;
+ this.wssPostUrl = wssPostUrl;
+ this.offerSdp = offerSdp;
+ this.iceCandidates = iceCandidates;
+ }
+ }
+
+ /**
+ * Callback interface for messages delivered on signaling channel.
+ *
+ * Methods are guaranteed to be invoked on the UI thread of |activity|.
+ */
+ interface SignalingEvents {
+ /**
+ * Callback fired once the room's signaling parameters
+ * SignalingParameters are extracted.
+ */
+ void onConnectedToRoom(final SignalingParameters params);
+
+ /**
+ * Callback fired once remote SDP is received.
+ */
+ void onRemoteDescription(final SessionDescription sdp);
+
+ /**
+ * Callback fired once remote Ice candidate is received.
+ */
+ void onRemoteIceCandidate(final IceCandidate candidate);
+
+ /**
+ * Callback fired once remote Ice candidate removals are received.
+ */
+ void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates);
+
+ /**
+ * Callback fired once channel is closed.
+ */
+ void onChannelClose();
+
+ /**
+ * Callback fired once channel error happened.
+ */
+ void onChannelError(final String description);
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/AppRTCProximitySensor.java b/app/src/main/java/com/myhexaville/androidwebrtc/AppRTCProximitySensor.java
new file mode 100644
index 0000000..aee3934
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/AppRTCProximitySensor.java
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+
+import android.content.Context;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.os.Build;
+import android.util.Log;
+
+import com.myhexaville.androidwebrtc.util.AppRTCUtils;
+
+import org.webrtc.ThreadUtils;
+
+/**
+ * AppRTCProximitySensor manages functions related to the proximity sensor in
+ * the AppRTC demo.
+ * On most device, the proximity sensor is implemented as a boolean-sensor.
+ * It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX
+ * value i.e. the LUX value of the light sensor is compared with a threshold.
+ * A LUX-value more than the threshold means the proximity sensor returns "FAR".
+ * Anything less than the threshold value and the sensor returns "NEAR".
+ */
+public class AppRTCProximitySensor implements SensorEventListener {
+ private static final String TAG = "AppRTCProximitySensor";
+
+ // This class should be created, started and stopped on one thread
+ // (e.g. the main thread). We use |nonThreadSafe| to ensure that this is
+ // the case. Only active when |DEBUG| is set to true.
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+ private final Runnable onSensorStateListener;
+ private final SensorManager sensorManager;
+ private Sensor proximitySensor = null;
+ private boolean lastStateReportIsNear = false;
+
+ /** Construction */
+ static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
+ return new AppRTCProximitySensor(context, sensorStateListener);
+ }
+
+ private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
+ Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
+ onSensorStateListener = sensorStateListener;
+ sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
+ }
+
+ /**
+ * Activate the proximity sensor. Also do initialization if called for the
+ * first time.
+ */
+ public boolean start() {
+ threadChecker.checkIsOnValidThread();
+ Log.d(TAG, "start" + AppRTCUtils.getThreadInfo());
+ if (!initDefaultSensor()) {
+ // Proximity sensor is not supported on this device.
+ return false;
+ }
+ sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
+ return true;
+ }
+
+ /** Deactivate the proximity sensor. */
+ public void stop() {
+ threadChecker.checkIsOnValidThread();
+ Log.d(TAG, "stop" + AppRTCUtils.getThreadInfo());
+ if (proximitySensor == null) {
+ return;
+ }
+ sensorManager.unregisterListener(this, proximitySensor);
+ }
+
+ /** Getter for last reported state. Set to true if "near" is reported. */
+ public boolean sensorReportsNearState() {
+ threadChecker.checkIsOnValidThread();
+ return lastStateReportIsNear;
+ }
+
+ @Override
+ public final void onAccuracyChanged(Sensor sensor, int accuracy) {
+ threadChecker.checkIsOnValidThread();
+ AppRTCUtils.assertIsTrue(sensor.getType() == Sensor.TYPE_PROXIMITY);
+ if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) {
+ Log.e(TAG, "The values returned by this sensor cannot be trusted");
+ }
+ }
+
+ @Override
+ public final void onSensorChanged(SensorEvent event) {
+ threadChecker.checkIsOnValidThread();
+ AppRTCUtils.assertIsTrue(event.sensor.getType() == Sensor.TYPE_PROXIMITY);
+ // As a best practice; do as little as possible within this method and
+ // avoid blocking.
+ float distanceInCentimeters = event.values[0];
+ if (distanceInCentimeters < proximitySensor.getMaximumRange()) {
+ Log.d(TAG, "Proximity sensor => NEAR state");
+ lastStateReportIsNear = true;
+ } else {
+ Log.d(TAG, "Proximity sensor => FAR state");
+ lastStateReportIsNear = false;
+ }
+
+ // Report about new state to listening client. Client can then call
+ // sensorReportsNearState() to query the current state (NEAR or FAR).
+ if (onSensorStateListener != null) {
+ onSensorStateListener.run();
+ }
+
+ Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
+ + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
+ + event.values[0]);
+ }
+
+ /**
+ * Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7)
+ * does not support this type of sensor and false will be returned in such
+ * cases.
+ */
+ private boolean initDefaultSensor() {
+ if (proximitySensor != null) {
+ return true;
+ }
+ proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY);
+ if (proximitySensor == null) {
+ return false;
+ }
+ logProximitySensorInfo();
+ return true;
+ }
+
+ /** Helper method for logging information about the proximity sensor. */
+ private void logProximitySensorInfo() {
+ if (proximitySensor == null) {
+ return;
+ }
+ StringBuilder info = new StringBuilder("Proximity sensor: ");
+ info.append("name=").append(proximitySensor.getName());
+ info.append(", vendor: ").append(proximitySensor.getVendor());
+ info.append(", power: ").append(proximitySensor.getPower());
+ info.append(", resolution: ").append(proximitySensor.getResolution());
+ info.append(", max range: ").append(proximitySensor.getMaximumRange());
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
+ // Added in API level 9.
+ info.append(", min delay: ").append(proximitySensor.getMinDelay());
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) {
+ // Added in API level 20.
+ info.append(", type: ").append(proximitySensor.getStringType());
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ // Added in API level 21.
+ info.append(", max delay: ").append(proximitySensor.getMaxDelay());
+ info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
+ info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
+ }
+ Log.d(TAG, info.toString());
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/CallActivity.java b/app/src/main/java/com/myhexaville/androidwebrtc/CallActivity.java
new file mode 100644
index 0000000..9f61476
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/CallActivity.java
@@ -0,0 +1,903 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.app.FragmentTransaction;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.pm.PackageManager;
+import android.media.projection.MediaProjection;
+import android.media.projection.MediaProjectionManager;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.support.v7.app.AppCompatActivity;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.View;
+import android.view.Window;
+import android.view.WindowManager;
+import android.view.WindowManager.LayoutParams;
+import android.widget.Toast;
+
+import com.myhexaville.androidwebrtc.AppRTCClient.RoomConnectionParameters;
+import com.myhexaville.androidwebrtc.AppRTCClient.SignalingParameters;
+import com.myhexaville.androidwebrtc.PeerConnectionClient.DataChannelParameters;
+import com.myhexaville.androidwebrtc.PeerConnectionClient.PeerConnectionParameters;
+
+import org.webrtc.Camera1Enumerator;
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.CameraEnumerator;
+import org.webrtc.EglBase;
+import org.webrtc.FileVideoCapturer;
+import org.webrtc.IceCandidate;
+import org.webrtc.Logging;
+import org.webrtc.PeerConnectionFactory;
+import org.webrtc.RendererCommon.ScalingType;
+import org.webrtc.ScreenCapturerAndroid;
+import org.webrtc.SessionDescription;
+import org.webrtc.StatsReport;
+import org.webrtc.SurfaceViewRenderer;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoFileRenderer;
+import org.webrtc.VideoRenderer;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Activity for peer connection call setup, call waiting
+ * and call view.
+ */
+public class CallActivity extends AppCompatActivity implements AppRTCClient.SignalingEvents,
+ PeerConnectionClient.PeerConnectionEvents,
+ CallFragment.OnCallEvents {
+ private static final String LOG_TAG = "CallActivity";
+ public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
+ public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
+ public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
+ public static final String EXTRA_SCREENCAPTURE = "org.appspot.apprtc.SCREENCAPTURE";
+ public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
+ public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
+ public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
+ public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
+ public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
+ "org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
+ public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
+ public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
+ public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
+ public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
+ public static final String EXTRA_FLEXFEC_ENABLED = "org.appspot.apprtc.FLEXFEC";
+ public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
+ public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
+ public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
+ "org.appspot.apprtc.NOAUDIOPROCESSING";
+ public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
+ public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
+ public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
+ public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
+ public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
+ public static final String EXTRA_ENABLE_LEVEL_CONTROL = "org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
+ public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
+ public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
+ public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
+ public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
+ public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
+ public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
+ "org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
+ public static final String EXTRA_USE_VALUES_FROM_INTENT =
+ "org.appspot.apprtc.USE_VALUES_FROM_INTENT";
+ public static final String EXTRA_DATA_CHANNEL_ENABLED = "org.appspot.apprtc.DATA_CHANNEL_ENABLED";
+ public static final String EXTRA_ORDERED = "org.appspot.apprtc.ORDERED";
+ public static final String EXTRA_MAX_RETRANSMITS_MS = "org.appspot.apprtc.MAX_RETRANSMITS_MS";
+ public static final String EXTRA_MAX_RETRANSMITS = "org.appspot.apprtc.MAX_RETRANSMITS";
+ public static final String EXTRA_PROTOCOL = "org.appspot.apprtc.PROTOCOL";
+ public static final String EXTRA_NEGOTIATED = "org.appspot.apprtc.NEGOTIATED";
+ public static final String EXTRA_ID = "org.appspot.apprtc.ID";
+
+ private static final String TAG = "CallRTCClient";
+ private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1;
+
+ // List of mandatory application permissions.
+ private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
+ "android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
+
+ // Peer connection statistics callback period in ms.
+ private static final int STAT_CALLBACK_PERIOD = 1000;
+ // Local preview screen position before call is connected.
+ private static final int LOCAL_X_CONNECTING = 0;
+ private static final int LOCAL_Y_CONNECTING = 0;
+ private static final int LOCAL_WIDTH_CONNECTING = 100;
+ private static final int LOCAL_HEIGHT_CONNECTING = 100;
+ // Local preview screen position after call is connected.
+ private static final int LOCAL_X_CONNECTED = 72;
+ private static final int LOCAL_Y_CONNECTED = 72;
+ private static final int LOCAL_WIDTH_CONNECTED = 25;
+ private static final int LOCAL_HEIGHT_CONNECTED = 25;
+ // Remote video screen position
+ private static final int REMOTE_X = 0;
+ private static final int REMOTE_Y = 0;
+ private static final int REMOTE_WIDTH = 100;
+ private static final int REMOTE_HEIGHT = 100;
+ private PeerConnectionClient peerConnectionClient = null;
+ private AppRTCClient appRtcClient;
+ private SignalingParameters signalingParameters;
+ private AppRTCAudioManager audioManager = null;
+ private EglBase rootEglBase;
+ private SurfaceViewRenderer localRender;
+ private SurfaceViewRenderer remoteRenderScreen;
+ private VideoFileRenderer videoFileRenderer;
+ private final List remoteRenderers =
+ new ArrayList();
+ private PercentFrameLayout localRenderLayout;
+ private PercentFrameLayout remoteRenderLayout;
+ private ScalingType scalingType;
+ private Toast logToast;
+ private boolean commandLineRun;
+ private int runTimeMs;
+ private boolean activityRunning;
+ private RoomConnectionParameters roomConnectionParameters;
+ private PeerConnectionParameters peerConnectionParameters;
+ private boolean iceConnected;
+ private boolean isError;
+ private boolean callControlFragmentVisible = true;
+ private long callStartedTimeMs = 0;
+ private boolean micEnabled = true;
+ private boolean screencaptureEnabled = false;
+ private static Intent mediaProjectionPermissionResultData;
+ private static int mediaProjectionPermissionResultCode;
+
+ // Controls
+ private CallFragment callFragment;
+ private HudFragment hudFragment;
+ private CpuMonitor cpuMonitor;
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
+
+ Log.d(LOG_TAG, "onCreate: ");
+
+ // Set window styles for fullscreen-window size. Needs to be done before
+ // adding content.
+ requestWindowFeature(Window.FEATURE_NO_TITLE);
+ getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
+ | LayoutParams.FLAG_DISMISS_KEYGUARD | LayoutParams.FLAG_SHOW_WHEN_LOCKED
+ | LayoutParams.FLAG_TURN_SCREEN_ON);
+ getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
+ | View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
+ setContentView(R.layout.activity_call);
+
+ iceConnected = false;
+ signalingParameters = null;
+ scalingType = ScalingType.SCALE_ASPECT_FILL;
+
+ // Create UI controls.
+ localRender = (SurfaceViewRenderer) findViewById(R.id.local_video_view);
+ remoteRenderScreen = (SurfaceViewRenderer) findViewById(R.id.remote_video_view);
+ localRenderLayout = (PercentFrameLayout) findViewById(R.id.local_video_layout);
+ remoteRenderLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);
+ callFragment = new CallFragment();
+ hudFragment = new HudFragment();
+
+ // Show/hide call control fragment on view click.
+ View.OnClickListener listener = new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ toggleCallControlFragmentVisibility();
+ }
+ };
+
+ localRender.setOnClickListener(listener);
+ remoteRenderScreen.setOnClickListener(listener);
+ remoteRenderers.add(remoteRenderScreen);
+
+ final Intent intent = getIntent();
+
+ // Create video renderers.
+ rootEglBase = EglBase.create();
+ localRender.init(rootEglBase.getEglBaseContext(), null);
+ String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
+
+ // When saveRemoteVideoToFile is set we save the video from the remote to a file.
+ if (saveRemoteVideoToFile != null) {
+ int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
+ int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
+ try {
+ videoFileRenderer = new VideoFileRenderer(
+ saveRemoteVideoToFile, videoOutWidth, videoOutHeight, rootEglBase.getEglBaseContext());
+ remoteRenderers.add(videoFileRenderer);
+ } catch (IOException e) {
+ throw new RuntimeException(
+ "Failed to open video file for output: " + saveRemoteVideoToFile, e);
+ }
+ }
+ remoteRenderScreen.init(rootEglBase.getEglBaseContext(), null);
+
+ localRender.setZOrderMediaOverlay(true);
+ localRender.setEnableHardwareScaler(true /* enabled */);
+ remoteRenderScreen.setEnableHardwareScaler(true /* enabled */);
+ updateVideoView();
+
+ // Check for mandatory permissions.
+ for (String permission : MANDATORY_PERMISSIONS) {
+ if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
+ logAndToast("Permission " + permission + " is not granted");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+ }
+
+ Uri roomUri = intent.getData();
+ if (roomUri == null) {
+ logAndToast(getString(R.string.missing_url));
+ Log.e(TAG, "Didn't get any URL in intent!");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+
+ // Get Intent parameters.
+ String roomId = intent.getStringExtra(EXTRA_ROOMID);
+ Log.d(TAG, "Room ID: " + roomId);
+ if (roomId == null || roomId.length() == 0) {
+ logAndToast(getString(R.string.missing_url));
+ Log.e(TAG, "Incorrect room ID in intent!");
+ setResult(RESULT_CANCELED);
+ finish();
+ return;
+ }
+
+ boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
+ boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
+
+ int videoWidth = intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0);
+ int videoHeight = intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0);
+
+ screencaptureEnabled = intent.getBooleanExtra(EXTRA_SCREENCAPTURE, false);
+ // If capturing format is not specified for screencapture, use screen resolution.
+ if (screencaptureEnabled && videoWidth == 0 && videoHeight == 0) {
+ DisplayMetrics displayMetrics = new DisplayMetrics();
+ WindowManager windowManager =
+ (WindowManager) getApplication().getSystemService(Context.WINDOW_SERVICE);
+ windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
+ videoWidth = displayMetrics.widthPixels;
+ videoHeight = displayMetrics.heightPixels;
+ }
+ DataChannelParameters dataChannelParameters = null;
+ if (intent.getBooleanExtra(EXTRA_DATA_CHANNEL_ENABLED, true)) {
+ dataChannelParameters = new DataChannelParameters(intent.getBooleanExtra(EXTRA_ORDERED, true),
+ intent.getIntExtra(EXTRA_MAX_RETRANSMITS_MS, -1),
+ intent.getIntExtra(EXTRA_MAX_RETRANSMITS, -1), intent.getStringExtra(EXTRA_PROTOCOL),
+ intent.getBooleanExtra(EXTRA_NEGOTIATED, false), intent.getIntExtra(EXTRA_ID, -1));
+ }
+ peerConnectionParameters =
+ new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
+ tracing, videoWidth, videoHeight, intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
+ intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
+ intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
+ intent.getBooleanExtra(EXTRA_FLEXFEC_ENABLED, false),
+ intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
+ intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
+ intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
+ intent.getBooleanExtra(EXTRA_ENABLE_LEVEL_CONTROL, false), dataChannelParameters);
+ commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
+ runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
+
+ Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
+
+ // Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
+ // standard WebSocketRTCClient.
+ if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
+ appRtcClient = new WebSocketRTCClient(this);
+ } else {
+ Log.i(TAG, "Using DirectRTCClient because room name looks like an IP.");
+ appRtcClient = new DirectRTCClient(this);
+ }
+ // Create connection parameters.
+ roomConnectionParameters = new RoomConnectionParameters(roomUri.toString(), roomId, loopback);
+
+ // Create CPU monitor
+ cpuMonitor = new CpuMonitor(this);
+ hudFragment.setCpuMonitor(cpuMonitor);
+
+ // Send intent arguments to fragments.
+ callFragment.setArguments(intent.getExtras());
+ hudFragment.setArguments(intent.getExtras());
+ // Activate call and HUD fragments and start the call.
+ FragmentTransaction ft = getFragmentManager().beginTransaction();
+ ft.add(R.id.call_fragment_container, callFragment);
+ ft.add(R.id.hud_fragment_container, hudFragment);
+ ft.commit();
+
+ // For command line execution run connection for and exit.
+ if (commandLineRun && runTimeMs > 0) {
+ (new Handler()).postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ disconnect();
+ }
+ }, runTimeMs);
+ }
+
+ peerConnectionClient = PeerConnectionClient.getInstance();
+ if (loopback) {
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ peerConnectionClient.setPeerConnectionFactoryOptions(options);
+ }
+ peerConnectionClient.createPeerConnectionFactory(
+ CallActivity.this, peerConnectionParameters, CallActivity.this);
+
+ if (screencaptureEnabled) {
+ MediaProjectionManager mediaProjectionManager =
+ (MediaProjectionManager) getApplication().getSystemService(
+ Context.MEDIA_PROJECTION_SERVICE);
+ startActivityForResult(
+ mediaProjectionManager.createScreenCaptureIntent(), CAPTURE_PERMISSION_REQUEST_CODE);
+ } else {
+ startCall();
+ }
+ }
+
+ @Override
+ public void onActivityResult(int requestCode, int resultCode, Intent data) {
+ if (requestCode != CAPTURE_PERMISSION_REQUEST_CODE)
+ return;
+ mediaProjectionPermissionResultCode = resultCode;
+ mediaProjectionPermissionResultData = data;
+ startCall();
+ }
+
+ private boolean useCamera2() {
+ return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
+ }
+
+ private boolean captureToTexture() {
+ return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
+ }
+
+ private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
+ final String[] deviceNames = enumerator.getDeviceNames();
+
+ // First, try to find front facing camera
+ Logging.d(TAG, "Looking for front facing cameras.");
+ for (String deviceName : deviceNames) {
+ if (enumerator.isFrontFacing(deviceName)) {
+ Logging.d(TAG, "Creating front facing camera capturer.");
+ VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
+
+ if (videoCapturer != null) {
+ return videoCapturer;
+ }
+ }
+ }
+
+ // Front facing camera not found, try something else
+ Logging.d(TAG, "Looking for other cameras.");
+ for (String deviceName : deviceNames) {
+ if (!enumerator.isFrontFacing(deviceName)) {
+ Logging.d(TAG, "Creating other camera capturer.");
+ VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
+
+ if (videoCapturer != null) {
+ return videoCapturer;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ // Activity interfaces
+ @Override
+ public void onPause() {
+ super.onPause();
+ activityRunning = false;
+ // Don't stop the video when using screencapture to allow user to show other apps to the remote
+ // end.
+ if (peerConnectionClient != null && !screencaptureEnabled) {
+ peerConnectionClient.stopVideoSource();
+ }
+ cpuMonitor.pause();
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ activityRunning = true;
+ // Video is not paused for screencapture. See onPause.
+ if (peerConnectionClient != null && !screencaptureEnabled) {
+ peerConnectionClient.startVideoSource();
+ }
+ cpuMonitor.resume();
+ }
+
+ @Override
+ protected void onDestroy() {
+ disconnect();
+ if (logToast != null) {
+ logToast.cancel();
+ }
+ activityRunning = false;
+ rootEglBase.release();
+ super.onDestroy();
+ }
+
+ // CallFragment.OnCallEvents interface implementation.
+ @Override
+ public void onCallHangUp() {
+ disconnect();
+ }
+
+ @Override
+ public void onCameraSwitch() {
+ if (peerConnectionClient != null) {
+ peerConnectionClient.switchCamera();
+ }
+ }
+
+ @Override
+ public void onVideoScalingSwitch(ScalingType scalingType) {
+ this.scalingType = scalingType;
+ updateVideoView();
+ }
+
+ @Override
+ public void onCaptureFormatChange(int width, int height, int framerate) {
+ if (peerConnectionClient != null) {
+ peerConnectionClient.changeCaptureFormat(width, height, framerate);
+ }
+ }
+
+ @Override
+ public boolean onToggleMic() {
+ if (peerConnectionClient != null) {
+ micEnabled = !micEnabled;
+ peerConnectionClient.setAudioEnabled(micEnabled);
+ }
+ return micEnabled;
+ }
+
+ // Helper functions.
+ private void toggleCallControlFragmentVisibility() {
+ if (!iceConnected || !callFragment.isAdded()) {
+ return;
+ }
+ // Show/hide call control fragment
+ callControlFragmentVisible = !callControlFragmentVisible;
+ FragmentTransaction ft = getFragmentManager().beginTransaction();
+ if (callControlFragmentVisible) {
+ ft.show(callFragment);
+ ft.show(hudFragment);
+ } else {
+ ft.hide(callFragment);
+ ft.hide(hudFragment);
+ }
+ ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
+ ft.commit();
+ }
+
+ private void updateVideoView() {
+ remoteRenderLayout.setPosition(REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT);
+ remoteRenderScreen.setScalingType(scalingType);
+ remoteRenderScreen.setMirror(false);
+
+ if (iceConnected) {
+ localRenderLayout.setPosition(
+ LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED, LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED);
+ localRender.setScalingType(ScalingType.SCALE_ASPECT_FIT);
+ } else {
+ localRenderLayout.setPosition(
+ LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING, LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING);
+ localRender.setScalingType(scalingType);
+ }
+ localRender.setMirror(true);
+
+ localRender.requestLayout();
+ remoteRenderScreen.requestLayout();
+ }
+
+ private void startCall() {
+ if (appRtcClient == null) {
+ Log.e(TAG, "AppRTC client is not allocated for a call.");
+ return;
+ }
+ callStartedTimeMs = System.currentTimeMillis();
+
+ // Start room connection.
+ logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
+ appRtcClient.connectToRoom(roomConnectionParameters);
+
+ // Create and audio manager that will take care of audio routing,
+ // audio modes, audio device enumeration etc.
+ audioManager = AppRTCAudioManager.create(this);
+ // Store existing audio settings and change audio mode to
+ // MODE_IN_COMMUNICATION for best possible VoIP performance.
+ Log.d(TAG, "Starting the audio manager...");
+ audioManager.start(new AppRTCAudioManager.AudioManagerEvents() {
+ // This method will be called each time the number of available audio
+ // devices has changed.
+ @Override
+ public void onAudioDeviceChanged(
+ AppRTCAudioManager.AudioDevice audioDevice, Set availableAudioDevices) {
+ onAudioManagerDevicesChanged(audioDevice, availableAudioDevices);
+ }
+ });
+ }
+
+ // Should be called from UI thread
+ private void callConnected() {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ Log.i(TAG, "Call connected: delay=" + delta + "ms");
+ if (peerConnectionClient == null || isError) {
+ Log.w(TAG, "Call is connected in closed or error state");
+ return;
+ }
+ // Update video view.
+ updateVideoView();
+ // Enable statistics callback.
+ peerConnectionClient.enableStatsEvents(true, STAT_CALLBACK_PERIOD);
+ }
+
+ // This method is called when the audio manager reports audio device change,
+ // e.g. from wired headset to speakerphone.
+ private void onAudioManagerDevicesChanged(
+ final AppRTCAudioManager.AudioDevice device, final Set availableDevices) {
+ Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", "
+ + "selected: " + device);
+ // TODO(henrika): add callback handler.
+ }
+
+ // Disconnect from remote resources, dispose of local resources, and exit.
+ private void disconnect() {
+ activityRunning = false;
+ if (appRtcClient != null) {
+ appRtcClient.disconnectFromRoom();
+ appRtcClient = null;
+ }
+ if (peerConnectionClient != null) {
+ peerConnectionClient.close();
+ peerConnectionClient = null;
+ }
+ if (localRender != null) {
+ localRender.release();
+ localRender = null;
+ }
+ if (videoFileRenderer != null) {
+ videoFileRenderer.release();
+ videoFileRenderer = null;
+ }
+ if (remoteRenderScreen != null) {
+ remoteRenderScreen.release();
+ remoteRenderScreen = null;
+ }
+ if (audioManager != null) {
+ audioManager.stop();
+ audioManager = null;
+ }
+ if (iceConnected && !isError) {
+ setResult(RESULT_OK);
+ } else {
+ setResult(RESULT_CANCELED);
+ }
+ finish();
+ }
+
+ private void disconnectWithErrorMessage(final String errorMessage) {
+ if (commandLineRun || !activityRunning) {
+ Log.e(TAG, "Critical error: " + errorMessage);
+ disconnect();
+ } else {
+ new AlertDialog.Builder(this)
+ .setTitle(getText(R.string.channel_error_title))
+ .setMessage(errorMessage)
+ .setCancelable(false)
+ .setNeutralButton(R.string.ok,
+ new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(DialogInterface dialog, int id) {
+ dialog.cancel();
+ disconnect();
+ }
+ })
+ .create()
+ .show();
+ }
+ }
+
+ // Log |msg| and Toast about it.
+ private void logAndToast(String msg) {
+ Log.d(TAG, msg);
+ if (logToast != null) {
+ logToast.cancel();
+ }
+ logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
+ logToast.show();
+ }
+
+ private void reportError(final String description) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (!isError) {
+ isError = true;
+ disconnectWithErrorMessage(description);
+ }
+ }
+ });
+ }
+
+ private VideoCapturer createVideoCapturer() {
+ VideoCapturer videoCapturer = null;
+ String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
+ if (videoFileAsCamera != null) {
+ try {
+ videoCapturer = new FileVideoCapturer(videoFileAsCamera);
+ } catch (IOException e) {
+ reportError("Failed to open video file for emulated camera");
+ return null;
+ }
+ } else if (screencaptureEnabled) {
+ if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
+ reportError("User didn't give permission to capture the screen.");
+ return null;
+ }
+ return new ScreenCapturerAndroid(
+ mediaProjectionPermissionResultData, new MediaProjection.Callback() {
+ @Override
+ public void onStop() {
+ reportError("User revoked permission to capture the screen.");
+ }
+ });
+ } else if (useCamera2()) {
+ if (!captureToTexture()) {
+ reportError(getString(R.string.camera2_texture_only_error));
+ return null;
+ }
+
+ Logging.d(TAG, "Creating capturer using camera2 API.");
+ videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
+ } else {
+ Logging.d(TAG, "Creating capturer using camera1 API.");
+ videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
+ }
+ if (videoCapturer == null) {
+ reportError("Failed to open camera");
+ return null;
+ }
+ return videoCapturer;
+ }
+
+ // -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
+ // All callbacks are invoked from websocket signaling looper thread and
+ // are routed to UI thread.
+ private void onConnectedToRoomInternal(final SignalingParameters params) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+
+ signalingParameters = params;
+ logAndToast("Creating peer connection, delay=" + delta + "ms");
+ VideoCapturer videoCapturer = null;
+ if (peerConnectionParameters.videoCallEnabled) {
+ videoCapturer = createVideoCapturer();
+ }
+ peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(), localRender,
+ remoteRenderers, videoCapturer, signalingParameters);
+
+ if (signalingParameters.initiator) {
+ logAndToast("Creating OFFER...");
+ // Create offer. Offer SDP will be sent to answering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createOffer();
+ } else {
+ if (params.offerSdp != null) {
+ peerConnectionClient.setRemoteDescription(params.offerSdp);
+ logAndToast("Creating ANSWER...");
+ // Create answer. Answer SDP will be sent to offering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createAnswer();
+ }
+ if (params.iceCandidates != null) {
+ // Add remote ICE candidates from room.
+ for (IceCandidate iceCandidate : params.iceCandidates) {
+ peerConnectionClient.addRemoteIceCandidate(iceCandidate);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void onConnectedToRoom(final SignalingParameters params) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ onConnectedToRoomInternal(params);
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteDescription(final SessionDescription sdp) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received remote SDP for non-initilized peer connection.");
+ return;
+ }
+ logAndToast("Received remote " + sdp.type + ", delay=" + delta + "ms");
+ peerConnectionClient.setRemoteDescription(sdp);
+ if (!signalingParameters.initiator) {
+ logAndToast("Creating ANSWER...");
+ // Create answer. Answer SDP will be sent to offering client in
+ // PeerConnectionEvents.onLocalDescription event.
+ peerConnectionClient.createAnswer();
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteIceCandidate(final IceCandidate candidate) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received ICE candidate for a non-initialized peer connection.");
+ return;
+ }
+ peerConnectionClient.addRemoteIceCandidate(candidate);
+ }
+ });
+ }
+
+ @Override
+ public void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnectionClient == null) {
+ Log.e(TAG, "Received ICE candidate removals for a non-initialized peer connection.");
+ return;
+ }
+ peerConnectionClient.removeRemoteIceCandidates(candidates);
+ }
+ });
+ }
+
+ @Override
+ public void onChannelClose() {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("Remote end hung up; dropping PeerConnection");
+ disconnect();
+ }
+ });
+ }
+
+ @Override
+ public void onChannelError(final String description) {
+ reportError(description);
+ }
+
+ // -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
+ // Send local peer connection SDP and ICE candidates to remote party.
+ // All callbacks are invoked from peer connection client looper thread and
+ // are routed to UI thread.
+ @Override
+ public void onLocalDescription(final SessionDescription sdp) {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ logAndToast("Sending " + sdp.type + ", delay=" + delta + "ms");
+ if (signalingParameters.initiator) {
+ appRtcClient.sendOfferSdp(sdp);
+ } else {
+ appRtcClient.sendAnswerSdp(sdp);
+ }
+ }
+ if (peerConnectionParameters.videoMaxBitrate > 0) {
+ Log.d(TAG, "Set video maximum bitrate: " + peerConnectionParameters.videoMaxBitrate);
+ peerConnectionClient.setVideoMaxBitrate(peerConnectionParameters.videoMaxBitrate);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceCandidate(final IceCandidate candidate) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ appRtcClient.sendLocalIceCandidate(candidate);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (appRtcClient != null) {
+ appRtcClient.sendLocalIceCandidateRemovals(candidates);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceConnected() {
+ final long delta = System.currentTimeMillis() - callStartedTimeMs;
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("ICE connected, delay=" + delta + "ms");
+ iceConnected = true;
+ callConnected();
+ }
+ });
+ }
+
+ @Override
+ public void onIceDisconnected() {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ logAndToast("ICE disconnected");
+ iceConnected = false;
+ disconnect();
+ }
+ });
+ }
+
+ @Override
+ public void onPeerConnectionClosed() {
+ }
+
+ @Override
+ public void onPeerConnectionStatsReady(final StatsReport[] reports) {
+ runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ if (!isError && iceConnected) {
+ hudFragment.updateEncoderStatistics(reports);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onPeerConnectionError(final String description) {
+ reportError(description);
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/CallFragment.java b/app/src/main/java/com/myhexaville/androidwebrtc/CallFragment.java
new file mode 100644
index 0000000..74ee932
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/CallFragment.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ImageButton;
+import android.widget.SeekBar;
+import android.widget.TextView;
+
+import org.webrtc.RendererCommon.ScalingType;
+
+/**
+ * Fragment for call control.
+ */
+public class CallFragment extends Fragment {
+ private View controlView;
+ private TextView contactView;
+ private ImageButton disconnectButton;
+ private ImageButton cameraSwitchButton;
+ private ImageButton videoScalingButton;
+ private ImageButton toggleMuteButton;
+ private TextView captureFormatText;
+ private SeekBar captureFormatSlider;
+ private OnCallEvents callEvents;
+ private ScalingType scalingType;
+ private boolean videoCallEnabled = true;
+
+ /**
+ * Call control interface for container activity.
+ */
+ public interface OnCallEvents {
+ void onCallHangUp();
+ void onCameraSwitch();
+ void onVideoScalingSwitch(ScalingType scalingType);
+ void onCaptureFormatChange(int width, int height, int framerate);
+ boolean onToggleMic();
+ }
+
+ @Override
+ public View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ controlView = inflater.inflate(R.layout.fragment_call, container, false);
+
+ // Create UI controls.
+ contactView = (TextView) controlView.findViewById(R.id.contact_name_call);
+ disconnectButton = (ImageButton) controlView.findViewById(R.id.button_call_disconnect);
+ cameraSwitchButton = (ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
+ videoScalingButton = (ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
+ toggleMuteButton = (ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
+ captureFormatText = (TextView) controlView.findViewById(R.id.capture_format_text_call);
+ captureFormatSlider = (SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
+
+ // Add buttons click events.
+ disconnectButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ callEvents.onCallHangUp();
+ }
+ });
+
+ cameraSwitchButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ callEvents.onCameraSwitch();
+ }
+ });
+
+ videoScalingButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
+ videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
+ scalingType = ScalingType.SCALE_ASPECT_FIT;
+ } else {
+ videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
+ scalingType = ScalingType.SCALE_ASPECT_FILL;
+ }
+ callEvents.onVideoScalingSwitch(scalingType);
+ }
+ });
+ scalingType = ScalingType.SCALE_ASPECT_FILL;
+
+ toggleMuteButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ boolean enabled = callEvents.onToggleMic();
+ toggleMuteButton.setAlpha(enabled ? 1.0f : 0.3f);
+ }
+ });
+
+ return controlView;
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+
+ boolean captureSliderEnabled = false;
+ Bundle args = getArguments();
+ if (args != null) {
+ String contactName = args.getString(CallActivity.EXTRA_ROOMID);
+ contactView.setText(contactName);
+ videoCallEnabled = args.getBoolean(CallActivity.EXTRA_VIDEO_CALL, true);
+ captureSliderEnabled = videoCallEnabled
+ && args.getBoolean(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, false);
+ }
+ if (!videoCallEnabled) {
+ cameraSwitchButton.setVisibility(View.INVISIBLE);
+ }
+ if (captureSliderEnabled) {
+ captureFormatSlider.setOnSeekBarChangeListener(
+ new CaptureQualityController(captureFormatText, callEvents));
+ } else {
+ captureFormatText.setVisibility(View.GONE);
+ captureFormatSlider.setVisibility(View.GONE);
+ }
+ }
+
+ // TODO(sakal): Replace with onAttach(Context) once we only support API level 23+.
+ @SuppressWarnings("deprecation")
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+ callEvents = (OnCallEvents) activity;
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/CaptureQualityController.java b/app/src/main/java/com/myhexaville/androidwebrtc/CaptureQualityController.java
new file mode 100644
index 0000000..5fea126
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/CaptureQualityController.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.widget.SeekBar;
+import android.widget.TextView;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+/**
+ * Control capture format based on a seekbar listener.
+ */
+public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
+ private final List formats =
+ Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
+ new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
+ new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
+ // Prioritize framerate below this threshold and resolution above the threshold.
+ private static final int FRAMERATE_THRESHOLD = 15;
+ private TextView captureFormatText;
+ private CallFragment.OnCallEvents callEvents;
+ private int width = 0;
+ private int height = 0;
+ private int framerate = 0;
+ private double targetBandwidth = 0;
+
+ public CaptureQualityController(
+ TextView captureFormatText, CallFragment.OnCallEvents callEvents) {
+ this.captureFormatText = captureFormatText;
+ this.callEvents = callEvents;
+ }
+
+ private final Comparator compareFormats = new Comparator() {
+ @Override
+ public int compare(CaptureFormat first, CaptureFormat second) {
+ int firstFps = calculateFramerate(targetBandwidth, first);
+ int secondFps = calculateFramerate(targetBandwidth, second);
+
+ if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
+ || firstFps == secondFps) {
+ // Compare resolution.
+ return first.width * first.height - second.width * second.height;
+ } else {
+ // Compare fps.
+ return firstFps - secondFps;
+ }
+ }
+ };
+
+ @Override
+ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+ if (progress == 0) {
+ width = 0;
+ height = 0;
+ framerate = 0;
+ captureFormatText.setText(R.string.muted);
+ return;
+ }
+
+ // Extract max bandwidth (in millipixels / second).
+ long maxCaptureBandwidth = Long.MIN_VALUE;
+ for (CaptureFormat format : formats) {
+ maxCaptureBandwidth =
+ Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
+ }
+
+ // Fraction between 0 and 1.
+ double bandwidthFraction = (double) progress / 100.0;
+ // Make a log-scale transformation, still between 0 and 1.
+ final double kExpConstant = 3.0;
+ bandwidthFraction =
+ (Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
+ targetBandwidth = bandwidthFraction * maxCaptureBandwidth;
+
+ // Choose the best format given a target bandwidth.
+ final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
+ width = bestFormat.width;
+ height = bestFormat.height;
+ framerate = calculateFramerate(targetBandwidth, bestFormat);
+ captureFormatText.setText(
+ String.format(captureFormatText.getContext().getString(R.string.format_description), width,
+ height, framerate));
+ }
+
+ @Override
+ public void onStartTrackingTouch(SeekBar seekBar) {}
+
+ @Override
+ public void onStopTrackingTouch(SeekBar seekBar) {
+ callEvents.onCaptureFormatChange(width, height, framerate);
+ }
+
+ // Return the highest frame rate possible based on bandwidth and format.
+ private int calculateFramerate(double bandwidth, CaptureFormat format) {
+ return (int) Math.round(
+ Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
+ / 1000.0);
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/ConnectActivity.java b/app/src/main/java/com/myhexaville/androidwebrtc/ConnectActivity.java
new file mode 100644
index 0000000..2c63948
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/ConnectActivity.java
@@ -0,0 +1,643 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.Manifest;
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.SharedPreferences;
+import android.net.Uri;
+import android.os.Bundle;
+import android.preference.PreferenceManager;
+import android.support.v7.app.AppCompatActivity;
+import android.util.Log;
+import android.view.ContextMenu;
+import android.view.KeyEvent;
+import android.view.Menu;
+import android.view.MenuItem;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.view.inputmethod.EditorInfo;
+import android.webkit.URLUtil;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.EditText;
+import android.widget.ImageButton;
+import android.widget.ImageView;
+import android.widget.ListView;
+import android.widget.TextView;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+
+import java.util.ArrayList;
+import java.util.Random;
+
+import pub.devrel.easypermissions.AfterPermissionGranted;
+import pub.devrel.easypermissions.EasyPermissions;
+
+/**
+ * Handles the initial setup where the user selects which room to join.
+ */
+public class ConnectActivity extends AppCompatActivity {
+ private static final String LOG_TAG = "ConnectActivity";
+ private static final int CONNECTION_REQUEST = 1;
+ private static final int REMOVE_FAVORITE_INDEX = 0;
+ private static final int RC_CALL = 12312;
+ private static boolean commandLineRun = false;
+
+ private ImageView connectButton;
+ private ImageView addFavoriteButton;
+ private EditText roomEditText;
+ private ListView roomListView;
+ private SharedPreferences sharedPref;
+ private String keyprefVideoCallEnabled;
+ private String keyprefScreencapture;
+ private String keyprefCamera2;
+ private String keyprefResolution;
+ private String keyprefFps;
+ private String keyprefCaptureQualitySlider;
+ private String keyprefVideoBitrateType;
+ private String keyprefVideoBitrateValue;
+ private String keyprefVideoCodec;
+ private String keyprefAudioBitrateType;
+ private String keyprefAudioBitrateValue;
+ private String keyprefAudioCodec;
+ private String keyprefHwCodecAcceleration;
+ private String keyprefCaptureToTexture;
+ private String keyprefFlexfec;
+ private String keyprefNoAudioProcessingPipeline;
+ private String keyprefAecDump;
+ private String keyprefOpenSLES;
+ private String keyprefDisableBuiltInAec;
+ private String keyprefDisableBuiltInAgc;
+ private String keyprefDisableBuiltInNs;
+ private String keyprefEnableLevelControl;
+ private String keyprefDisplayHud;
+ private String keyprefTracing;
+ private String keyprefRoomServerUrl;
+ private String keyprefRoom;
+ private String keyprefRoomList;
+ private ArrayList roomList;
+ private ArrayAdapter adapter;
+ private String keyprefEnableDataChannel;
+ private String keyprefOrdered;
+ private String keyprefMaxRetransmitTimeMs;
+ private String keyprefMaxRetransmits;
+ private String keyprefDataProtocol;
+ private String keyprefNegotiated;
+ private String keyprefDataId;
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ // Get setting keys.
+ PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
+ sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
+ keyprefVideoCallEnabled = getString(R.string.pref_videocall_key);
+ keyprefScreencapture = getString(R.string.pref_screencapture_key);
+ keyprefCamera2 = getString(R.string.pref_camera2_key);
+ keyprefResolution = getString(R.string.pref_resolution_key);
+ keyprefFps = getString(R.string.pref_fps_key);
+ keyprefCaptureQualitySlider = getString(R.string.pref_capturequalityslider_key);
+ keyprefVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
+ keyprefVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
+ keyprefVideoCodec = getString(R.string.pref_videocodec_key);
+ keyprefHwCodecAcceleration = getString(R.string.pref_hwcodec_key);
+ keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
+ keyprefFlexfec = getString(R.string.pref_flexfec_key);
+ keyprefAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
+ keyprefAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
+ keyprefAudioCodec = getString(R.string.pref_audiocodec_key);
+ keyprefNoAudioProcessingPipeline = getString(R.string.pref_noaudioprocessing_key);
+ keyprefAecDump = getString(R.string.pref_aecdump_key);
+ keyprefOpenSLES = getString(R.string.pref_opensles_key);
+ keyprefDisableBuiltInAec = getString(R.string.pref_disable_built_in_aec_key);
+ keyprefDisableBuiltInAgc = getString(R.string.pref_disable_built_in_agc_key);
+ keyprefDisableBuiltInNs = getString(R.string.pref_disable_built_in_ns_key);
+ keyprefEnableLevelControl = getString(R.string.pref_enable_level_control_key);
+ keyprefDisplayHud = getString(R.string.pref_displayhud_key);
+ keyprefTracing = getString(R.string.pref_tracing_key);
+ keyprefRoomServerUrl = getString(R.string.pref_room_server_url_key);
+ keyprefRoom = getString(R.string.pref_room_key);
+ keyprefRoomList = getString(R.string.pref_room_list_key);
+ keyprefEnableDataChannel = getString(R.string.pref_enable_datachannel_key);
+ keyprefOrdered = getString(R.string.pref_ordered_key);
+ keyprefMaxRetransmitTimeMs = getString(R.string.pref_max_retransmit_time_ms_key);
+ keyprefMaxRetransmits = getString(R.string.pref_max_retransmits_key);
+ keyprefDataProtocol = getString(R.string.pref_data_protocol_key);
+ keyprefNegotiated = getString(R.string.pref_negotiated_key);
+ keyprefDataId = getString(R.string.pref_data_id_key);
+
+ setContentView(R.layout.activity_connect);
+
+ roomEditText = (EditText) findViewById(R.id.room_edittext);
+ roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
+ @Override
+ public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
+ if (i == EditorInfo.IME_ACTION_DONE) {
+ addFavoriteButton.performClick();
+ return true;
+ }
+ return false;
+ }
+ });
+ roomEditText.requestFocus();
+
+ roomListView = (ListView) findViewById(R.id.room_listview);
+ roomListView.setEmptyView(findViewById(android.R.id.empty));
+ roomListView.setOnItemClickListener(roomListClickListener);
+ registerForContextMenu(roomListView);
+ connectButton = (ImageView) findViewById(R.id.connect_button);
+ connectButton.setOnClickListener(connectListener);
+ addFavoriteButton = (ImageView) findViewById(R.id.add_favorite_button);
+ addFavoriteButton.setOnClickListener(addFavoriteListener);
+
+ // If an implicit VIEW intent is launching the app, go directly to that URL.
+ final Intent intent = getIntent();
+ if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
+ boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
+ int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
+ boolean useValuesFromIntent =
+ intent.getBooleanExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, false);
+ String room = sharedPref.getString(keyprefRoom, "");
+ connectToRoom(room, true, loopback, useValuesFromIntent, runTimeMs);
+ }
+ }
+
+
+ @Override
+ public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
+ super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+ EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this);
+ }
+
+
+ @Override
+ public boolean onCreateOptionsMenu(Menu menu) {
+ getMenuInflater().inflate(R.menu.connect_menu, menu);
+ return true;
+ }
+
+ @Override
+ public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
+ if (v.getId() == R.id.room_listview) {
+ AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) menuInfo;
+ menu.setHeaderTitle(roomList.get(info.position));
+ String[] menuItems = getResources().getStringArray(R.array.roomListContextMenu);
+ for (int i = 0; i < menuItems.length; i++) {
+ menu.add(Menu.NONE, i, i, menuItems[i]);
+ }
+ } else {
+ super.onCreateContextMenu(menu, v, menuInfo);
+ }
+ }
+
+ @Override
+ public boolean onContextItemSelected(MenuItem item) {
+ if (item.getItemId() == REMOVE_FAVORITE_INDEX) {
+ AdapterView.AdapterContextMenuInfo info =
+ (AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
+ roomList.remove(info.position);
+ adapter.notifyDataSetChanged();
+ return true;
+ }
+
+ return super.onContextItemSelected(item);
+ }
+
+ @Override
+ public boolean onOptionsItemSelected(MenuItem item) {
+ // Handle presses on the action bar items.
+ if (item.getItemId() == R.id.action_settings) {
+ Intent intent = new Intent(this, SettingsActivity.class);
+ startActivity(intent);
+ return true;
+ } else if (item.getItemId() == R.id.action_loopback) {
+ connectToRoom(null, false, true, false, 0);
+ return true;
+ } else {
+ return super.onOptionsItemSelected(item);
+ }
+ }
+
+ @Override
+ public void onPause() {
+ super.onPause();
+ String room = roomEditText.getText().toString();
+ String roomListJson = new JSONArray(roomList).toString();
+ SharedPreferences.Editor editor = sharedPref.edit();
+ editor.putString(keyprefRoom, room);
+ editor.putString(keyprefRoomList, roomListJson);
+ editor.commit();
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ String room = sharedPref.getString(keyprefRoom, "");
+ roomEditText.setText(room);
+ roomList = new ArrayList();
+ String roomListJson = sharedPref.getString(keyprefRoomList, null);
+ if (roomListJson != null) {
+ try {
+ JSONArray jsonArray = new JSONArray(roomListJson);
+ for (int i = 0; i < jsonArray.length(); i++) {
+ roomList.add(jsonArray.get(i).toString());
+ }
+ } catch (JSONException e) {
+ Log.e(LOG_TAG, "Failed to load room list: " + e.toString());
+ }
+ }
+ adapter = new ArrayAdapter(this, android.R.layout.simple_list_item_1, roomList);
+ roomListView.setAdapter(adapter);
+ if (adapter.getCount() > 0) {
+ roomListView.requestFocus();
+ roomListView.setItemChecked(0, true);
+ }
+ }
+
+ @Override
+ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+ if (requestCode == CONNECTION_REQUEST && commandLineRun) {
+ Log.d(LOG_TAG, "Return: " + resultCode);
+ setResult(resultCode);
+ commandLineRun = false;
+ finish();
+ }
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ private String sharedPrefGetString(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ String defaultValue = getString(defaultId);
+ if (useFromIntent) {
+ String value = getIntent().getStringExtra(intentName);
+ if (value != null) {
+ return value;
+ }
+ return defaultValue;
+ } else {
+ String attributeName = getString(attributeId);
+ return sharedPref.getString(attributeName, defaultValue);
+ }
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ private boolean sharedPrefGetBoolean(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ boolean defaultValue = Boolean.valueOf(getString(defaultId));
+ if (useFromIntent) {
+ return getIntent().getBooleanExtra(intentName, defaultValue);
+ } else {
+ String attributeName = getString(attributeId);
+ return sharedPref.getBoolean(attributeName, defaultValue);
+ }
+ }
+
+ /**
+ * Get a value from the shared preference or from the intent, if it does not
+ * exist the default is used.
+ */
+ private int sharedPrefGetInteger(
+ int attributeId, String intentName, int defaultId, boolean useFromIntent) {
+ String defaultString = getString(defaultId);
+ int defaultValue = Integer.parseInt(defaultString);
+ if (useFromIntent) {
+ return getIntent().getIntExtra(intentName, defaultValue);
+ } else {
+ String attributeName = getString(attributeId);
+ String value = sharedPref.getString(attributeName, defaultString);
+ try {
+ return Integer.parseInt(value);
+ } catch (NumberFormatException e) {
+ Log.e(LOG_TAG, "Wrong setting for: " + attributeName + ":" + value);
+ return defaultValue;
+ }
+ }
+ }
+
+ private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
+ boolean useValuesFromIntent, int runTimeMs) {
+ this.commandLineRun = commandLineRun;
+
+ // roomId is random for loopback.
+ if (loopback) {
+ roomId = Integer.toString((new Random()).nextInt(100000000));
+ }
+
+ String roomUrl = sharedPref.getString(
+ keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
+
+ // Video call enabled flag.
+ boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
+ CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
+
+ // Use screencapture option.
+ boolean useScreencapture = sharedPrefGetBoolean(R.string.pref_screencapture_key,
+ CallActivity.EXTRA_SCREENCAPTURE, R.string.pref_screencapture_default, useValuesFromIntent);
+
+ // Use Camera2 option.
+ boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
+ R.string.pref_camera2_default, useValuesFromIntent);
+
+ // Get default codecs.
+ String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
+ CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
+ String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
+ CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
+
+ // Check HW codec flag.
+ boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
+ CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
+
+ // Check Capture to texture.
+ boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
+ CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
+ useValuesFromIntent);
+
+ // Check FlexFEC.
+ boolean flexfecEnabled = sharedPrefGetBoolean(R.string.pref_flexfec_key,
+ CallActivity.EXTRA_FLEXFEC_ENABLED, R.string.pref_flexfec_default, useValuesFromIntent);
+
+ // Check Disable Audio Processing flag.
+ boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
+ CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
+ useValuesFromIntent);
+
+ // Check Disable Audio Processing flag.
+ boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
+ CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
+
+ // Check OpenSL ES enabled flag.
+ boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
+ CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
+
+ // Check Disable built-in AEC flag.
+ boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
+ useValuesFromIntent);
+
+ // Check Disable built-in AGC flag.
+ boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
+ useValuesFromIntent);
+
+ // Check Disable built-in NS flag.
+ boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
+ CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
+ useValuesFromIntent);
+
+ // Check Enable level control.
+ boolean enableLevelControl = sharedPrefGetBoolean(R.string.pref_enable_level_control_key,
+ CallActivity.EXTRA_ENABLE_LEVEL_CONTROL, R.string.pref_enable_level_control_key,
+ useValuesFromIntent);
+
+ // Get video resolution from settings.
+ int videoWidth = 0;
+ int videoHeight = 0;
+ if (useValuesFromIntent) {
+ videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
+ videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
+ }
+ if (videoWidth == 0 && videoHeight == 0) {
+ String resolution =
+ sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
+ String[] dimensions = resolution.split("[ x]+");
+ if (dimensions.length == 2) {
+ try {
+ videoWidth = Integer.parseInt(dimensions[0]);
+ videoHeight = Integer.parseInt(dimensions[1]);
+ } catch (NumberFormatException e) {
+ videoWidth = 0;
+ videoHeight = 0;
+ Log.e(LOG_TAG, "Wrong video resolution setting: " + resolution);
+ }
+ }
+ }
+
+ // Get camera fps from settings.
+ int cameraFps = 0;
+ if (useValuesFromIntent) {
+ cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
+ }
+ if (cameraFps == 0) {
+ String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
+ String[] fpsValues = fps.split("[ x]+");
+ if (fpsValues.length == 2) {
+ try {
+ cameraFps = Integer.parseInt(fpsValues[0]);
+ } catch (NumberFormatException e) {
+ cameraFps = 0;
+ Log.e(LOG_TAG, "Wrong camera fps setting: " + fps);
+ }
+ }
+ }
+
+ // Check capture quality slider flag.
+ boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
+ CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
+ R.string.pref_capturequalityslider_default, useValuesFromIntent);
+
+ // Get video and audio start bitrate.
+ int videoStartBitrate = 0;
+ if (useValuesFromIntent) {
+ videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
+ }
+ if (videoStartBitrate == 0) {
+ String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
+ String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
+ if (!bitrateType.equals(bitrateTypeDefault)) {
+ String bitrateValue = sharedPref.getString(
+ keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
+ videoStartBitrate = Integer.parseInt(bitrateValue);
+ }
+ }
+
+ int audioStartBitrate = 0;
+ if (useValuesFromIntent) {
+ audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
+ }
+ if (audioStartBitrate == 0) {
+ String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
+ String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
+ if (!bitrateType.equals(bitrateTypeDefault)) {
+ String bitrateValue = sharedPref.getString(
+ keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
+ audioStartBitrate = Integer.parseInt(bitrateValue);
+ }
+ }
+
+ // Check statistics display option.
+ boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
+ CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
+
+ boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
+ R.string.pref_tracing_default, useValuesFromIntent);
+
+ // Get datachannel options
+ boolean dataChannelEnabled = sharedPrefGetBoolean(R.string.pref_enable_datachannel_key,
+ CallActivity.EXTRA_DATA_CHANNEL_ENABLED, R.string.pref_enable_datachannel_default,
+ useValuesFromIntent);
+ boolean ordered = sharedPrefGetBoolean(R.string.pref_ordered_key, CallActivity.EXTRA_ORDERED,
+ R.string.pref_ordered_default, useValuesFromIntent);
+ boolean negotiated = sharedPrefGetBoolean(R.string.pref_negotiated_key,
+ CallActivity.EXTRA_NEGOTIATED, R.string.pref_negotiated_default, useValuesFromIntent);
+ int maxRetrMs = sharedPrefGetInteger(R.string.pref_max_retransmit_time_ms_key,
+ CallActivity.EXTRA_MAX_RETRANSMITS_MS, R.string.pref_max_retransmit_time_ms_default,
+ useValuesFromIntent);
+ int maxRetr =
+ sharedPrefGetInteger(R.string.pref_max_retransmits_key, CallActivity.EXTRA_MAX_RETRANSMITS,
+ R.string.pref_max_retransmits_default, useValuesFromIntent);
+ int id = sharedPrefGetInteger(R.string.pref_data_id_key, CallActivity.EXTRA_ID,
+ R.string.pref_data_id_default, useValuesFromIntent);
+ String protocol = sharedPrefGetString(R.string.pref_data_protocol_key,
+ CallActivity.EXTRA_PROTOCOL, R.string.pref_data_protocol_default, useValuesFromIntent);
+
+ // Start AppRTCMobile activity.
+ Log.d(LOG_TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
+ if (validateUrl(roomUrl)) {
+ Uri uri = Uri.parse(roomUrl);
+ Intent intent = new Intent(this, CallActivity.class);
+ intent.setData(uri);
+ intent.putExtra(CallActivity.EXTRA_ROOMID, roomId);
+ intent.putExtra(CallActivity.EXTRA_LOOPBACK, loopback);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_CALL, videoCallEnabled);
+ intent.putExtra(CallActivity.EXTRA_SCREENCAPTURE, useScreencapture);
+ intent.putExtra(CallActivity.EXTRA_CAMERA2, useCamera2);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
+ intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
+ intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
+ intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
+ intent.putExtra(CallActivity.EXTRA_FLEXFEC_ENABLED, flexfecEnabled);
+ intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
+ intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
+ intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, disableBuiltInAGC);
+ intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_NS, disableBuiltInNS);
+ intent.putExtra(CallActivity.EXTRA_ENABLE_LEVEL_CONTROL, enableLevelControl);
+ intent.putExtra(CallActivity.EXTRA_AUDIO_BITRATE, audioStartBitrate);
+ intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, audioCodec);
+ intent.putExtra(CallActivity.EXTRA_DISPLAY_HUD, displayHud);
+ intent.putExtra(CallActivity.EXTRA_TRACING, tracing);
+ intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
+ intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
+
+ intent.putExtra(CallActivity.EXTRA_DATA_CHANNEL_ENABLED, dataChannelEnabled);
+
+ if (dataChannelEnabled) {
+ intent.putExtra(CallActivity.EXTRA_ORDERED, ordered);
+ intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS_MS, maxRetrMs);
+ intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS, maxRetr);
+ intent.putExtra(CallActivity.EXTRA_PROTOCOL, protocol);
+ intent.putExtra(CallActivity.EXTRA_NEGOTIATED, negotiated);
+ intent.putExtra(CallActivity.EXTRA_ID, id);
+ }
+
+ if (useValuesFromIntent) {
+ if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
+ String videoFileAsCamera =
+ getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
+ intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
+ String saveRemoteVideoToFile =
+ getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
+ int videoOutWidth =
+ getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
+ }
+
+ if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
+ int videoOutHeight =
+ getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
+ intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
+ }
+ }
+
+ startActivityForResult(intent, CONNECTION_REQUEST);
+ }
+ }
+
+ private boolean validateUrl(String url) {
+ if (URLUtil.isHttpsUrl(url) || URLUtil.isHttpUrl(url)) {
+ return true;
+ }
+
+ new AlertDialog.Builder(this)
+ .setTitle(getText(R.string.invalid_url_title))
+ .setMessage(getString(R.string.invalid_url_text, url))
+ .setCancelable(false)
+ .setNeutralButton(R.string.ok,
+ new DialogInterface.OnClickListener() {
+ public void onClick(DialogInterface dialog, int id) {
+ dialog.cancel();
+ }
+ })
+ .create()
+ .show();
+ return false;
+ }
+
+ private final AdapterView.OnItemClickListener roomListClickListener =
+ new AdapterView.OnItemClickListener() {
+ @Override
+ public void onItemClick(AdapterView> adapterView, View view, int i, long l) {
+ String roomId = ((TextView) view).getText().toString();
+ connectToRoom(roomId, false, false, false, 0);
+ }
+ };
+
+ private final OnClickListener addFavoriteListener = new OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ String newRoom = roomEditText.getText().toString();
+ if (newRoom.length() > 0 && !roomList.contains(newRoom)) {
+ adapter.add(newRoom);
+ adapter.notifyDataSetChanged();
+ }
+ }
+ };
+
+ private final OnClickListener connectListener = view -> connect();
+
+ @AfterPermissionGranted(RC_CALL)
+ private void connect() {
+ Log.d(LOG_TAG, "connect: 1");
+ String[] perms = {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO};
+ if (EasyPermissions.hasPermissions(this, perms)) {
+ Log.d(LOG_TAG, "connect: 2");
+ connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
+ } else {
+ Log.d(LOG_TAG, "connect: 3");
+ EasyPermissions.requestPermissions(this, "Need some permissions", RC_CALL, perms);
+ }
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/CpuMonitor.java b/app/src/main/java/com/myhexaville/androidwebrtc/CpuMonitor.java
new file mode 100644
index 0000000..2ef1f5b
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/CpuMonitor.java
@@ -0,0 +1,510 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.os.BatteryManager;
+import android.os.SystemClock;
+import android.util.Log;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Scanner;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Simple CPU monitor. The caller creates a CpuMonitor object which can then
+ * be used via sampleCpuUtilization() to collect the percentual use of the
+ * cumulative CPU capacity for all CPUs running at their nominal frequency. 3
+ * values are generated: (1) getCpuCurrent() returns the use since the last
+ * sampleCpuUtilization(), (2) getCpuAvg3() returns the use since 3 prior
+ * calls, and (3) getCpuAvgAll() returns the use over all SAMPLE_SAVE_NUMBER
+ * calls.
+ *
+ * CPUs in Android are often "offline", and while this of course means 0 Hz
+ * as current frequency, in this state we cannot even get their nominal
+ * frequency. We therefore tread carefully, and allow any CPU to be missing.
+ * Missing CPUs are assumed to have the same nominal frequency as any close
+ * lower-numbered CPU, but as soon as it is online, we'll get their proper
+ * frequency and remember it. (Since CPU 0 in practice always seem to be
+ * online, this unidirectional frequency inheritance should be no problem in
+ * practice.)
+ *
+ *
Caveats:
+ * o No provision made for zany "turbo" mode, common in the x86 world.
+ * o No provision made for ARM big.LITTLE; if CPU n can switch behind our
+ * back, we might get incorrect estimates.
+ * o This is not thread-safe. To call asynchronously, create different
+ * CpuMonitor objects.
+ *
+ *
If we can gather enough info to generate a sensible result,
+ * sampleCpuUtilization returns true. It is designed to never throw an
+ * exception.
+ *
+ *
sampleCpuUtilization should not be called too often in its present form,
+ * since then deltas would be small and the percent values would fluctuate and
+ * be unreadable. If it is desirable to call it more often than say once per
+ * second, one would need to increase SAMPLE_SAVE_NUMBER and probably use
+ * Queue to avoid copying overhead.
+ *
+ * Known problems:
+ * 1. Nexus 7 devices running Kitkat have a kernel which often output an
+ * incorrect 'idle' field in /proc/stat. The value is close to twice the
+ * correct value, and then returns to back to correct reading. Both when
+ * jumping up and back down we might create faulty CPU load readings.
+ */
+
+class CpuMonitor {
+ private static final String TAG = "CpuMonitor";
+ private static final int MOVING_AVERAGE_SAMPLES = 5;
+
+ private static final int CPU_STAT_SAMPLE_PERIOD_MS = 2000;
+ private static final int CPU_STAT_LOG_PERIOD_MS = 6000;
+
+ private final Context appContext;
+ // User CPU usage at current frequency.
+ private final MovingAverage userCpuUsage;
+ // System CPU usage at current frequency.
+ private final MovingAverage systemCpuUsage;
+ // Total CPU usage relative to maximum frequency.
+ private final MovingAverage totalCpuUsage;
+ // CPU frequency in percentage from maximum.
+ private final MovingAverage frequencyScale;
+
+ private ScheduledExecutorService executor;
+ private long lastStatLogTimeMs;
+ private long[] cpuFreqMax;
+ private int cpusPresent;
+ private int actualCpusPresent;
+ private boolean initialized;
+ private boolean cpuOveruse;
+ private String[] maxPath;
+ private String[] curPath;
+ private double[] curFreqScales;
+ private ProcStat lastProcStat;
+
+ private static class ProcStat {
+ final long userTime;
+ final long systemTime;
+ final long idleTime;
+
+ ProcStat(long userTime, long systemTime, long idleTime) {
+ this.userTime = userTime;
+ this.systemTime = systemTime;
+ this.idleTime = idleTime;
+ }
+ }
+
+ private static class MovingAverage {
+ private final int size;
+ private double sum;
+ private double currentValue;
+ private double circBuffer[];
+ private int circBufferIndex;
+
+ public MovingAverage(int size) {
+ if (size <= 0) {
+ throw new AssertionError("Size value in MovingAverage ctor should be positive.");
+ }
+ this.size = size;
+ circBuffer = new double[size];
+ }
+
+ public void reset() {
+ Arrays.fill(circBuffer, 0);
+ circBufferIndex = 0;
+ sum = 0;
+ currentValue = 0;
+ }
+
+ public void addValue(double value) {
+ sum -= circBuffer[circBufferIndex];
+ circBuffer[circBufferIndex++] = value;
+ currentValue = value;
+ sum += value;
+ if (circBufferIndex >= size) {
+ circBufferIndex = 0;
+ }
+ }
+
+ public double getCurrent() {
+ return currentValue;
+ }
+
+ public double getAverage() {
+ return sum / (double) size;
+ }
+ }
+
+ public CpuMonitor(Context context) {
+ Log.d(TAG, "CpuMonitor ctor.");
+ appContext = context.getApplicationContext();
+ userCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ systemCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ totalCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ frequencyScale = new MovingAverage(MOVING_AVERAGE_SAMPLES);
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+
+ scheduleCpuUtilizationTask();
+ }
+
+ public void pause() {
+ if (executor != null) {
+ Log.d(TAG, "pause");
+ executor.shutdownNow();
+ executor = null;
+ }
+ }
+
+ public void resume() {
+ Log.d(TAG, "resume");
+ resetStat();
+ scheduleCpuUtilizationTask();
+ }
+
+ public synchronized void reset() {
+ if (executor != null) {
+ Log.d(TAG, "reset");
+ resetStat();
+ cpuOveruse = false;
+ }
+ }
+
+ public synchronized int getCpuUsageCurrent() {
+ return doubleToPercent(userCpuUsage.getCurrent() + systemCpuUsage.getCurrent());
+ }
+
+ public synchronized int getCpuUsageAverage() {
+ return doubleToPercent(userCpuUsage.getAverage() + systemCpuUsage.getAverage());
+ }
+
+ public synchronized int getFrequencyScaleAverage() {
+ return doubleToPercent(frequencyScale.getAverage());
+ }
+
+ private void scheduleCpuUtilizationTask() {
+ if (executor != null) {
+ executor.shutdownNow();
+ executor = null;
+ }
+
+ executor = Executors.newSingleThreadScheduledExecutor();
+ executor.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ cpuUtilizationTask();
+ }
+ }, 0, CPU_STAT_SAMPLE_PERIOD_MS, TimeUnit.MILLISECONDS);
+ }
+
+ private void cpuUtilizationTask() {
+ boolean cpuMonitorAvailable = sampleCpuUtilization();
+ if (cpuMonitorAvailable
+ && SystemClock.elapsedRealtime() - lastStatLogTimeMs >= CPU_STAT_LOG_PERIOD_MS) {
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+ String statString = getStatString();
+ Log.d(TAG, statString);
+ }
+ }
+
+ private void init() {
+ try {
+ FileReader fin = new FileReader("/sys/devices/system/cpu/present");
+ try {
+ BufferedReader reader = new BufferedReader(fin);
+ Scanner scanner = new Scanner(reader).useDelimiter("[-\n]");
+ scanner.nextInt(); // Skip leading number 0.
+ cpusPresent = 1 + scanner.nextInt();
+ scanner.close();
+ } catch (Exception e) {
+ Log.e(TAG, "Cannot do CPU stats due to /sys/devices/system/cpu/present parsing problem");
+ } finally {
+ fin.close();
+ }
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Cannot do CPU stats since /sys/devices/system/cpu/present is missing");
+ } catch (IOException e) {
+ Log.e(TAG, "Error closing file");
+ }
+
+ cpuFreqMax = new long[cpusPresent];
+ maxPath = new String[cpusPresent];
+ curPath = new String[cpusPresent];
+ curFreqScales = new double[cpusPresent];
+ for (int i = 0; i < cpusPresent; i++) {
+ cpuFreqMax[i] = 0; // Frequency "not yet determined".
+ curFreqScales[i] = 0;
+ maxPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
+ curPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/scaling_cur_freq";
+ }
+
+ lastProcStat = new ProcStat(0, 0, 0);
+ resetStat();
+
+ initialized = true;
+ }
+
+ private synchronized void resetStat() {
+ userCpuUsage.reset();
+ systemCpuUsage.reset();
+ totalCpuUsage.reset();
+ frequencyScale.reset();
+ lastStatLogTimeMs = SystemClock.elapsedRealtime();
+ }
+
+ private int getBatteryLevel() {
+ // Use sticky broadcast with null receiver to read battery level once only.
+ Intent intent = appContext.registerReceiver(
+ null /* receiver */, new IntentFilter(Intent.ACTION_BATTERY_CHANGED));
+
+ int batteryLevel = 0;
+ int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
+ if (batteryScale > 0) {
+ batteryLevel =
+ (int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
+ }
+ return batteryLevel;
+ }
+
+ /**
+ * Re-measure CPU use. Call this method at an interval of around 1/s.
+ * This method returns true on success. The fields
+ * cpuCurrent, cpuAvg3, and cpuAvgAll are updated on success, and represents:
+ * cpuCurrent: The CPU use since the last sampleCpuUtilization call.
+ * cpuAvg3: The average CPU over the last 3 calls.
+ * cpuAvgAll: The average CPU over the last SAMPLE_SAVE_NUMBER calls.
+ */
+ private synchronized boolean sampleCpuUtilization() {
+ long lastSeenMaxFreq = 0;
+ long cpuFreqCurSum = 0;
+ long cpuFreqMaxSum = 0;
+
+ if (!initialized) {
+ init();
+ }
+ if (cpusPresent == 0) {
+ return false;
+ }
+
+ actualCpusPresent = 0;
+ for (int i = 0; i < cpusPresent; i++) {
+ /*
+ * For each CPU, attempt to first read its max frequency, then its
+ * current frequency. Once as the max frequency for a CPU is found,
+ * save it in cpuFreqMax[].
+ */
+
+ curFreqScales[i] = 0;
+ if (cpuFreqMax[i] == 0) {
+ // We have never found this CPU's max frequency. Attempt to read it.
+ long cpufreqMax = readFreqFromFile(maxPath[i]);
+ if (cpufreqMax > 0) {
+ Log.d(TAG, "Core " + i + ". Max frequency: " + cpufreqMax);
+ lastSeenMaxFreq = cpufreqMax;
+ cpuFreqMax[i] = cpufreqMax;
+ maxPath[i] = null; // Kill path to free its memory.
+ }
+ } else {
+ lastSeenMaxFreq = cpuFreqMax[i]; // A valid, previously read value.
+ }
+
+ long cpuFreqCur = readFreqFromFile(curPath[i]);
+ if (cpuFreqCur == 0 && lastSeenMaxFreq == 0) {
+ // No current frequency information for this CPU core - ignore it.
+ continue;
+ }
+ if (cpuFreqCur > 0) {
+ actualCpusPresent++;
+ }
+ cpuFreqCurSum += cpuFreqCur;
+
+ /* Here, lastSeenMaxFreq might come from
+ * 1. cpuFreq[i], or
+ * 2. a previous iteration, or
+ * 3. a newly read value, or
+ * 4. hypothetically from the pre-loop dummy.
+ */
+ cpuFreqMaxSum += lastSeenMaxFreq;
+ if (lastSeenMaxFreq > 0) {
+ curFreqScales[i] = (double) cpuFreqCur / lastSeenMaxFreq;
+ }
+ }
+
+ if (cpuFreqCurSum == 0 || cpuFreqMaxSum == 0) {
+ Log.e(TAG, "Could not read max or current frequency for any CPU");
+ return false;
+ }
+
+ /*
+ * Since the cycle counts are for the period between the last invocation
+ * and this present one, we average the percentual CPU frequencies between
+ * now and the beginning of the measurement period. This is significantly
+ * incorrect only if the frequencies have peeked or dropped in between the
+ * invocations.
+ */
+ double currentFrequencyScale = cpuFreqCurSum / (double) cpuFreqMaxSum;
+ if (frequencyScale.getCurrent() > 0) {
+ currentFrequencyScale = (frequencyScale.getCurrent() + currentFrequencyScale) * 0.5;
+ }
+
+ ProcStat procStat = readProcStat();
+ if (procStat == null) {
+ return false;
+ }
+
+ long diffUserTime = procStat.userTime - lastProcStat.userTime;
+ long diffSystemTime = procStat.systemTime - lastProcStat.systemTime;
+ long diffIdleTime = procStat.idleTime - lastProcStat.idleTime;
+ long allTime = diffUserTime + diffSystemTime + diffIdleTime;
+
+ if (currentFrequencyScale == 0 || allTime == 0) {
+ return false;
+ }
+
+ // Update statistics.
+ frequencyScale.addValue(currentFrequencyScale);
+
+ double currentUserCpuUsage = diffUserTime / (double) allTime;
+ userCpuUsage.addValue(currentUserCpuUsage);
+
+ double currentSystemCpuUsage = diffSystemTime / (double) allTime;
+ systemCpuUsage.addValue(currentSystemCpuUsage);
+
+ double currentTotalCpuUsage =
+ (currentUserCpuUsage + currentSystemCpuUsage) * currentFrequencyScale;
+ totalCpuUsage.addValue(currentTotalCpuUsage);
+
+ // Save new measurements for next round's deltas.
+ lastProcStat = procStat;
+
+ return true;
+ }
+
+ private int doubleToPercent(double d) {
+ return (int) (d * 100 + 0.5);
+ }
+
+ private synchronized String getStatString() {
+ StringBuilder stat = new StringBuilder();
+ stat.append("CPU User: ")
+ .append(doubleToPercent(userCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(userCpuUsage.getAverage()))
+ .append(". System: ")
+ .append(doubleToPercent(systemCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(systemCpuUsage.getAverage()))
+ .append(". Freq: ")
+ .append(doubleToPercent(frequencyScale.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(frequencyScale.getAverage()))
+ .append(". Total usage: ")
+ .append(doubleToPercent(totalCpuUsage.getCurrent()))
+ .append("/")
+ .append(doubleToPercent(totalCpuUsage.getAverage()))
+ .append(". Cores: ")
+ .append(actualCpusPresent);
+ stat.append("( ");
+ for (int i = 0; i < cpusPresent; i++) {
+ stat.append(doubleToPercent(curFreqScales[i])).append(" ");
+ }
+ stat.append("). Battery: ").append(getBatteryLevel());
+ if (cpuOveruse) {
+ stat.append(". Overuse.");
+ }
+ return stat.toString();
+ }
+
+ /**
+ * Read a single integer value from the named file. Return the read value
+ * or if an error occurs return 0.
+ */
+ private long readFreqFromFile(String fileName) {
+ long number = 0;
+ try {
+ BufferedReader reader = new BufferedReader(new FileReader(fileName));
+ try {
+ String line = reader.readLine();
+ number = parseLong(line);
+ } finally {
+ reader.close();
+ }
+ } catch (FileNotFoundException e) {
+ // CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
+ // is not present. This is not an error.
+ } catch (IOException e) {
+ // CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
+ // is empty. This is not an error.
+ }
+ return number;
+ }
+
+ private static long parseLong(String value) {
+ long number = 0;
+ try {
+ number = Long.parseLong(value);
+ } catch (NumberFormatException e) {
+ Log.e(TAG, "parseLong error.", e);
+ }
+ return number;
+ }
+
+ /*
+ * Read the current utilization of all CPUs using the cumulative first line
+ * of /proc/stat.
+ */
+ private ProcStat readProcStat() {
+ long userTime = 0;
+ long systemTime = 0;
+ long idleTime = 0;
+ try {
+ BufferedReader reader = new BufferedReader(new FileReader("/proc/stat"));
+ try {
+ // line should contain something like this:
+ // cpu 5093818 271838 3512830 165934119 101374 447076 272086 0 0 0
+ // user nice system idle iowait irq softirq
+ String line = reader.readLine();
+ String lines[] = line.split("\\s+");
+ int length = lines.length;
+ if (length >= 5) {
+ userTime = parseLong(lines[1]); // user
+ userTime += parseLong(lines[2]); // nice
+ systemTime = parseLong(lines[3]); // system
+ idleTime = parseLong(lines[4]); // idle
+ }
+ if (length >= 8) {
+ userTime += parseLong(lines[5]); // iowait
+ systemTime += parseLong(lines[6]); // irq
+ systemTime += parseLong(lines[7]); // softirq
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "Problems parsing /proc/stat", e);
+ return null;
+ } finally {
+ reader.close();
+ }
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Cannot open /proc/stat for reading", e);
+ return null;
+ } catch (IOException e) {
+ Log.e(TAG, "Problems reading /proc/stat", e);
+ return null;
+ }
+ return new ProcStat(userTime, systemTime, idleTime);
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/DirectRTCClient.java b/app/src/main/java/com/myhexaville/androidwebrtc/DirectRTCClient.java
new file mode 100644
index 0000000..86b66c8
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/DirectRTCClient.java
@@ -0,0 +1,347 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.util.Log;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.PeerConnection;
+import org.webrtc.SessionDescription;
+
+import java.util.LinkedList;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Implementation of AppRTCClient that uses direct TCP connection as the signaling channel.
+ * This eliminates the need for an external server. This class does not support loopback
+ * connections.
+ */
+public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChannelEvents {
+ private static final String TAG = "DirectRTCClient";
+ private static final int DEFAULT_PORT = 8888;
+
+ // Regex pattern used for checking if room id looks like an IP.
+ static final Pattern IP_PATTERN = Pattern.compile("("
+ // IPv4
+ + "((\\d+\\.){3}\\d+)|"
+ // IPv6
+ + "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
+ + "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
+ + "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
+ // IPv6 without []
+ + "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
+ + "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
+ // Literals
+ + "localhost"
+ + ")"
+ // Optional port number
+ + "(:(\\d+))?");
+
+ private final ExecutorService executor;
+ private final SignalingEvents events;
+ private TCPChannelClient tcpClient;
+ private RoomConnectionParameters connectionParameters;
+
+ private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
+
+ // All alterations of the room state should be done from inside the looper thread.
+ private ConnectionState roomState;
+
+ public DirectRTCClient(SignalingEvents events) {
+ this.events = events;
+
+ executor = Executors.newSingleThreadExecutor();
+ roomState = ConnectionState.NEW;
+ }
+
+ /**
+ * Connects to the room, roomId in connectionsParameters is required. roomId must be a valid
+ * IP address matching IP_PATTERN.
+ */
+ @Override
+ public void connectToRoom(RoomConnectionParameters connectionParameters) {
+ this.connectionParameters = connectionParameters;
+
+ if (connectionParameters.loopback) {
+ reportError("Loopback connections aren't supported by DirectRTCClient.");
+ }
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ connectToRoomInternal();
+ }
+ });
+ }
+
+ @Override
+ public void disconnectFromRoom() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ disconnectFromRoomInternal();
+ }
+ });
+ }
+
+ /**
+ * Connects to the room.
+ *
+ * Runs on the looper thread.
+ */
+ private void connectToRoomInternal() {
+ this.roomState = ConnectionState.NEW;
+
+ String endpoint = connectionParameters.roomId;
+
+ Matcher matcher = IP_PATTERN.matcher(endpoint);
+ if (!matcher.matches()) {
+ reportError("roomId must match IP_PATTERN for DirectRTCClient.");
+ return;
+ }
+
+ String ip = matcher.group(1);
+ String portStr = matcher.group(matcher.groupCount());
+ int port;
+
+ if (portStr != null) {
+ try {
+ port = Integer.parseInt(portStr);
+ } catch (NumberFormatException e) {
+ reportError("Invalid port number: " + portStr);
+ return;
+ }
+ } else {
+ port = DEFAULT_PORT;
+ }
+
+ tcpClient = new TCPChannelClient(executor, this, ip, port);
+ }
+
+ /**
+ * Disconnects from the room.
+ *
+ * Runs on the looper thread.
+ */
+ private void disconnectFromRoomInternal() {
+ roomState = ConnectionState.CLOSED;
+
+ if (tcpClient != null) {
+ tcpClient.disconnect();
+ tcpClient = null;
+ }
+ executor.shutdown();
+ }
+
+ @Override
+ public void sendOfferSdp(final SessionDescription sdp) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending offer SDP in non connected state.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "offer");
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ @Override
+ public void sendAnswerSdp(final SessionDescription sdp) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "answer");
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ @Override
+ public void sendLocalIceCandidate(final IceCandidate candidate) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "candidate");
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate in non connected state.");
+ return;
+ }
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ /** Send removed Ice candidates to the other participant. */
+ @Override
+ public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "remove-candidates");
+ JSONArray jsonArray = new JSONArray();
+ for (final IceCandidate candidate : candidates) {
+ jsonArray.put(toJsonCandidate(candidate));
+ }
+ jsonPut(json, "candidates", jsonArray);
+
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate removals in non connected state.");
+ return;
+ }
+ sendMessage(json.toString());
+ }
+ });
+ }
+
+ // -------------------------------------------------------------------
+ // TCPChannelClient event handlers
+
+ /**
+ * If the client is the server side, this will trigger onConnectedToRoom.
+ */
+ @Override
+ public void onTCPConnected(boolean isServer) {
+ if (isServer) {
+ roomState = ConnectionState.CONNECTED;
+
+ SignalingParameters parameters = new SignalingParameters(
+ // Ice servers are not needed for direct connections.
+ new LinkedList(),
+ isServer, // Server side acts as the initiator on direct connections.
+ null, // clientId
+ null, // wssUrl
+ null, // wwsPostUrl
+ null, // offerSdp
+ null // iceCandidates
+ );
+ events.onConnectedToRoom(parameters);
+ }
+ }
+
+ @Override
+ public void onTCPMessage(String msg) {
+ try {
+ JSONObject json = new JSONObject(msg);
+ String type = json.optString("type");
+ if (type.equals("candidate")) {
+ events.onRemoteIceCandidate(toJavaCandidate(json));
+ } else if (type.equals("remove-candidates")) {
+ JSONArray candidateArray = json.getJSONArray("candidates");
+ IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
+ for (int i = 0; i < candidateArray.length(); ++i) {
+ candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
+ }
+ events.onRemoteIceCandidatesRemoved(candidates);
+ } else if (type.equals("answer")) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else if (type.equals("offer")) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+
+ SignalingParameters parameters = new SignalingParameters(
+ // Ice servers are not needed for direct connections.
+ new LinkedList(),
+ false, // This code will only be run on the client side. So, we are not the initiator.
+ null, // clientId
+ null, // wssUrl
+ null, // wssPostUrl
+ sdp, // offerSdp
+ null // iceCandidates
+ );
+ roomState = ConnectionState.CONNECTED;
+ events.onConnectedToRoom(parameters);
+ } else {
+ reportError("Unexpected TCP message: " + msg);
+ }
+ } catch (JSONException e) {
+ reportError("TCP message JSON parsing error: " + e.toString());
+ }
+ }
+
+ @Override
+ public void onTCPError(String description) {
+ reportError("TCP connection error: " + description);
+ }
+
+ @Override
+ public void onTCPClose() {
+ events.onChannelClose();
+ }
+
+ // --------------------------------------------------------------------
+ // Helper functions.
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.ERROR) {
+ roomState = ConnectionState.ERROR;
+ events.onChannelError(errorMessage);
+ }
+ }
+ });
+ }
+
+ private void sendMessage(final String message) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ tcpClient.send(message);
+ }
+ });
+ }
+
+ // Put a |key|->|value| mapping in |json|.
+ private static void jsonPut(JSONObject json, String key, Object value) {
+ try {
+ json.put(key, value);
+ } catch (JSONException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ // Converts a Java candidate to a JSONObject.
+ private static JSONObject toJsonCandidate(final IceCandidate candidate) {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ return json;
+ }
+
+ // Converts a JSON candidate to a Java object.
+ private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
+ return new IceCandidate(
+ json.getString("id"), json.getInt("label"), json.getString("candidate"));
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/HudFragment.java b/app/src/main/java/com/myhexaville/androidwebrtc/HudFragment.java
new file mode 100644
index 0000000..37505ad
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/HudFragment.java
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.TypedValue;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ImageButton;
+import android.widget.TextView;
+
+import org.webrtc.StatsReport;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Fragment for HUD statistics display.
+ */
+public class HudFragment extends Fragment {
+ private View controlView;
+ private TextView encoderStatView;
+ private TextView hudViewBwe;
+ private TextView hudViewConnection;
+ private TextView hudViewVideoSend;
+ private TextView hudViewVideoRecv;
+ private ImageButton toggleDebugButton;
+ private boolean videoCallEnabled;
+ private boolean displayHud;
+ private volatile boolean isRunning;
+ private CpuMonitor cpuMonitor;
+
+ @Override
+ public View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ controlView = inflater.inflate(R.layout.fragment_hud, container, false);
+
+ // Create UI controls.
+ encoderStatView = (TextView) controlView.findViewById(R.id.encoder_stat_call);
+ hudViewBwe = (TextView) controlView.findViewById(R.id.hud_stat_bwe);
+ hudViewConnection = (TextView) controlView.findViewById(R.id.hud_stat_connection);
+ hudViewVideoSend = (TextView) controlView.findViewById(R.id.hud_stat_video_send);
+ hudViewVideoRecv = (TextView) controlView.findViewById(R.id.hud_stat_video_recv);
+ toggleDebugButton = (ImageButton) controlView.findViewById(R.id.button_toggle_debug);
+
+ toggleDebugButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ if (displayHud) {
+ int visibility =
+ (hudViewBwe.getVisibility() == View.VISIBLE) ? View.INVISIBLE : View.VISIBLE;
+ hudViewsSetProperties(visibility);
+ }
+ }
+ });
+
+ return controlView;
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+
+ Bundle args = getArguments();
+ if (args != null) {
+ videoCallEnabled = args.getBoolean(CallActivity.EXTRA_VIDEO_CALL, true);
+ displayHud = args.getBoolean(CallActivity.EXTRA_DISPLAY_HUD, false);
+ }
+ int visibility = displayHud ? View.VISIBLE : View.INVISIBLE;
+ encoderStatView.setVisibility(visibility);
+ toggleDebugButton.setVisibility(visibility);
+ hudViewsSetProperties(View.INVISIBLE);
+ isRunning = true;
+ }
+
+ @Override
+ public void onStop() {
+ isRunning = false;
+ super.onStop();
+ }
+
+ public void setCpuMonitor(CpuMonitor cpuMonitor) {
+ this.cpuMonitor = cpuMonitor;
+ }
+
+ private void hudViewsSetProperties(int visibility) {
+ hudViewBwe.setVisibility(visibility);
+ hudViewConnection.setVisibility(visibility);
+ hudViewVideoSend.setVisibility(visibility);
+ hudViewVideoRecv.setVisibility(visibility);
+ hudViewBwe.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
+ hudViewConnection.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
+ hudViewVideoSend.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
+ hudViewVideoRecv.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
+ }
+
+ private Map getReportMap(StatsReport report) {
+ Map reportMap = new HashMap();
+ for (StatsReport.Value value : report.values) {
+ reportMap.put(value.name, value.value);
+ }
+ return reportMap;
+ }
+
+ public void updateEncoderStatistics(final StatsReport[] reports) {
+ if (!isRunning || !displayHud) {
+ return;
+ }
+ StringBuilder encoderStat = new StringBuilder(128);
+ StringBuilder bweStat = new StringBuilder();
+ StringBuilder connectionStat = new StringBuilder();
+ StringBuilder videoSendStat = new StringBuilder();
+ StringBuilder videoRecvStat = new StringBuilder();
+ String fps = null;
+ String targetBitrate = null;
+ String actualBitrate = null;
+
+ for (StatsReport report : reports) {
+ if (report.type.equals("ssrc") && report.id.contains("ssrc") && report.id.contains("send")) {
+ // Send video statistics.
+ Map reportMap = getReportMap(report);
+ String trackId = reportMap.get("googTrackId");
+ if (trackId != null && trackId.contains(PeerConnectionClient.VIDEO_TRACK_ID)) {
+ fps = reportMap.get("googFrameRateSent");
+ videoSendStat.append(report.id).append("\n");
+ for (StatsReport.Value value : report.values) {
+ String name = value.name.replace("goog", "");
+ videoSendStat.append(name).append("=").append(value.value).append("\n");
+ }
+ }
+ } else if (report.type.equals("ssrc") && report.id.contains("ssrc")
+ && report.id.contains("recv")) {
+ // Receive video statistics.
+ Map reportMap = getReportMap(report);
+ // Check if this stat is for video track.
+ String frameWidth = reportMap.get("googFrameWidthReceived");
+ if (frameWidth != null) {
+ videoRecvStat.append(report.id).append("\n");
+ for (StatsReport.Value value : report.values) {
+ String name = value.name.replace("goog", "");
+ videoRecvStat.append(name).append("=").append(value.value).append("\n");
+ }
+ }
+ } else if (report.id.equals("bweforvideo")) {
+ // BWE statistics.
+ Map reportMap = getReportMap(report);
+ targetBitrate = reportMap.get("googTargetEncBitrate");
+ actualBitrate = reportMap.get("googActualEncBitrate");
+
+ bweStat.append(report.id).append("\n");
+ for (StatsReport.Value value : report.values) {
+ String name = value.name.replace("goog", "").replace("Available", "");
+ bweStat.append(name).append("=").append(value.value).append("\n");
+ }
+ } else if (report.type.equals("googCandidatePair")) {
+ // Connection statistics.
+ Map reportMap = getReportMap(report);
+ String activeConnection = reportMap.get("googActiveConnection");
+ if (activeConnection != null && activeConnection.equals("true")) {
+ connectionStat.append(report.id).append("\n");
+ for (StatsReport.Value value : report.values) {
+ String name = value.name.replace("goog", "");
+ connectionStat.append(name).append("=").append(value.value).append("\n");
+ }
+ }
+ }
+ }
+ hudViewBwe.setText(bweStat.toString());
+ hudViewConnection.setText(connectionStat.toString());
+ hudViewVideoSend.setText(videoSendStat.toString());
+ hudViewVideoRecv.setText(videoRecvStat.toString());
+
+ if (videoCallEnabled) {
+ if (fps != null) {
+ encoderStat.append("Fps: ").append(fps).append("\n");
+ }
+ if (targetBitrate != null) {
+ encoderStat.append("Target BR: ").append(targetBitrate).append("\n");
+ }
+ if (actualBitrate != null) {
+ encoderStat.append("Actual BR: ").append(actualBitrate).append("\n");
+ }
+ }
+
+ if (cpuMonitor != null) {
+ encoderStat.append("CPU%: ")
+ .append(cpuMonitor.getCpuUsageCurrent())
+ .append("/")
+ .append(cpuMonitor.getCpuUsageAverage())
+ .append(". Freq: ")
+ .append(cpuMonitor.getFrequencyScaleAverage());
+ }
+ encoderStatView.setText(encoderStat.toString());
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/PeerConnectionClient.java b/app/src/main/java/com/myhexaville/androidwebrtc/PeerConnectionClient.java
new file mode 100644
index 0000000..6779730
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/PeerConnectionClient.java
@@ -0,0 +1,1261 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.content.Context;
+import android.os.Environment;
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+
+import com.myhexaville.androidwebrtc.AppRTCClient.SignalingParameters;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.webrtc.AudioSource;
+import org.webrtc.AudioTrack;
+import org.webrtc.CameraVideoCapturer;
+import org.webrtc.DataChannel;
+import org.webrtc.EglBase;
+import org.webrtc.IceCandidate;
+import org.webrtc.Logging;
+import org.webrtc.MediaConstraints;
+import org.webrtc.MediaStream;
+import org.webrtc.PeerConnection;
+import org.webrtc.PeerConnection.IceConnectionState;
+import org.webrtc.PeerConnectionFactory;
+import org.webrtc.RtpParameters;
+import org.webrtc.RtpSender;
+import org.webrtc.SdpObserver;
+import org.webrtc.SessionDescription;
+import org.webrtc.StatsObserver;
+import org.webrtc.StatsReport;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoRenderer;
+import org.webrtc.VideoSource;
+import org.webrtc.VideoTrack;
+import org.webrtc.voiceengine.WebRtcAudioManager;
+import org.webrtc.voiceengine.WebRtcAudioUtils;
+
+/**
+ * Peer connection client implementation.
+ *
+ * All public methods are routed to local looper thread.
+ * All PeerConnectionEvents callbacks are invoked from the same looper thread.
+ * This class is a singleton.
+ */
+public class PeerConnectionClient {
+ public static final String VIDEO_TRACK_ID = "ARDAMSv0";
+ public static final String AUDIO_TRACK_ID = "ARDAMSa0";
+ public static final String VIDEO_TRACK_TYPE = "video";
+ private static final String TAG = "PCRTCClient";
+ private static final String VIDEO_CODEC_VP8 = "VP8";
+ private static final String VIDEO_CODEC_VP9 = "VP9";
+ private static final String VIDEO_CODEC_H264 = "H264";
+ private static final String AUDIO_CODEC_OPUS = "opus";
+ private static final String AUDIO_CODEC_ISAC = "ISAC";
+ private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
+ private static final String VIDEO_FLEXFEC_FIELDTRIAL = "WebRTC-FlexFEC-03/Enabled/";
+ private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
+ private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
+ private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
+ private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
+ private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
+ private static final String AUDIO_LEVEL_CONTROL_CONSTRAINT = "levelControl";
+ private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
+ private static final int HD_VIDEO_WIDTH = 1280;
+ private static final int HD_VIDEO_HEIGHT = 720;
+ private static final int BPS_IN_KBPS = 1000;
+
+ private static final PeerConnectionClient instance = new PeerConnectionClient();
+ private final PCObserver pcObserver = new PCObserver();
+ private final SDPObserver sdpObserver = new SDPObserver();
+ private final ScheduledExecutorService executor;
+
+ private Context context;
+ private PeerConnectionFactory factory;
+ private PeerConnection peerConnection;
+ PeerConnectionFactory.Options options = null;
+ private AudioSource audioSource;
+ private VideoSource videoSource;
+ private boolean videoCallEnabled;
+ private boolean preferIsac;
+ private String preferredVideoCodec;
+ private boolean videoCapturerStopped;
+ private boolean isError;
+ private Timer statsTimer;
+ private VideoRenderer.Callbacks localRender;
+ private List remoteRenders;
+ private SignalingParameters signalingParameters;
+ private MediaConstraints pcConstraints;
+ private int videoWidth;
+ private int videoHeight;
+ private int videoFps;
+ private MediaConstraints audioConstraints;
+ private ParcelFileDescriptor aecDumpFileDescriptor;
+ private MediaConstraints sdpMediaConstraints;
+ private PeerConnectionParameters peerConnectionParameters;
+ // Queued remote ICE candidates are consumed only after both local and
+ // remote descriptions are set. Similarly local ICE candidates are sent to
+ // remote peer after both local and remote description are set.
+ private LinkedList queuedRemoteCandidates;
+ private PeerConnectionEvents events;
+ private boolean isInitiator;
+ private SessionDescription localSdp; // either offer or answer SDP
+ private MediaStream mediaStream;
+ private VideoCapturer videoCapturer;
+ // enableVideo is set to true if video should be rendered and sent.
+ private boolean renderVideo;
+ private VideoTrack localVideoTrack;
+ private VideoTrack remoteVideoTrack;
+ private RtpSender localVideoSender;
+ // enableAudio is set to true if audio should be sent.
+ private boolean enableAudio;
+ private AudioTrack localAudioTrack;
+ private DataChannel dataChannel;
+ private boolean dataChannelEnabled;
+
+ /**
+ * Peer connection parameters.
+ */
+ public static class DataChannelParameters {
+ public final boolean ordered;
+ public final int maxRetransmitTimeMs;
+ public final int maxRetransmits;
+ public final String protocol;
+ public final boolean negotiated;
+ public final int id;
+
+ public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
+ String protocol, boolean negotiated, int id) {
+ this.ordered = ordered;
+ this.maxRetransmitTimeMs = maxRetransmitTimeMs;
+ this.maxRetransmits = maxRetransmits;
+ this.protocol = protocol;
+ this.negotiated = negotiated;
+ this.id = id;
+ }
+ }
+
+ /**
+ * Peer connection parameters.
+ */
+ public static class PeerConnectionParameters {
+ public final boolean videoCallEnabled;
+ public final boolean loopback;
+ public final boolean tracing;
+ public final int videoWidth;
+ public final int videoHeight;
+ public final int videoFps;
+ public final int videoMaxBitrate;
+ public final String videoCodec;
+ public final boolean videoCodecHwAcceleration;
+ public final boolean videoFlexfecEnabled;
+ public final int audioStartBitrate;
+ public final String audioCodec;
+ public final boolean noAudioProcessing;
+ public final boolean aecDump;
+ public final boolean useOpenSLES;
+ public final boolean disableBuiltInAEC;
+ public final boolean disableBuiltInAGC;
+ public final boolean disableBuiltInNS;
+ public final boolean enableLevelControl;
+ private final DataChannelParameters dataChannelParameters;
+
+ public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
+ int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
+ boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate,
+ String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES,
+ boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS,
+ boolean enableLevelControl) {
+ this(videoCallEnabled, loopback, tracing, videoWidth, videoHeight, videoFps, videoMaxBitrate,
+ videoCodec, videoCodecHwAcceleration, videoFlexfecEnabled, audioStartBitrate, audioCodec,
+ noAudioProcessing, aecDump, useOpenSLES, disableBuiltInAEC, disableBuiltInAGC,
+ disableBuiltInNS, enableLevelControl, null);
+ }
+
+ public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
+ int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
+ boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate,
+ String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES,
+ boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS,
+ boolean enableLevelControl, DataChannelParameters dataChannelParameters) {
+ this.videoCallEnabled = videoCallEnabled;
+ this.loopback = loopback;
+ this.tracing = tracing;
+ this.videoWidth = videoWidth;
+ this.videoHeight = videoHeight;
+ this.videoFps = videoFps;
+ this.videoMaxBitrate = videoMaxBitrate;
+ this.videoCodec = videoCodec;
+ this.videoFlexfecEnabled = videoFlexfecEnabled;
+ this.videoCodecHwAcceleration = videoCodecHwAcceleration;
+ this.audioStartBitrate = audioStartBitrate;
+ this.audioCodec = audioCodec;
+ this.noAudioProcessing = noAudioProcessing;
+ this.aecDump = aecDump;
+ this.useOpenSLES = useOpenSLES;
+ this.disableBuiltInAEC = disableBuiltInAEC;
+ this.disableBuiltInAGC = disableBuiltInAGC;
+ this.disableBuiltInNS = disableBuiltInNS;
+ this.enableLevelControl = enableLevelControl;
+ this.dataChannelParameters = dataChannelParameters;
+ }
+ }
+
+ /**
+ * Peer connection events.
+ */
+ public interface PeerConnectionEvents {
+ /**
+ * Callback fired once local SDP is created and set.
+ */
+ void onLocalDescription(final SessionDescription sdp);
+
+ /**
+ * Callback fired once local Ice candidate is generated.
+ */
+ void onIceCandidate(final IceCandidate candidate);
+
+ /**
+ * Callback fired once local ICE candidates are removed.
+ */
+ void onIceCandidatesRemoved(final IceCandidate[] candidates);
+
+ /**
+ * Callback fired once connection is established (IceConnectionState is
+ * CONNECTED).
+ */
+ void onIceConnected();
+
+ /**
+ * Callback fired once connection is closed (IceConnectionState is
+ * DISCONNECTED).
+ */
+ void onIceDisconnected();
+
+ /**
+ * Callback fired once peer connection is closed.
+ */
+ void onPeerConnectionClosed();
+
+ /**
+ * Callback fired once peer connection statistics is ready.
+ */
+ void onPeerConnectionStatsReady(final StatsReport[] reports);
+
+ /**
+ * Callback fired once peer connection error happened.
+ */
+ void onPeerConnectionError(final String description);
+ }
+
+ private PeerConnectionClient() {
+ // Executor thread is started once in private ctor and is used for all
+ // peer connection API calls to ensure new peer connection factory is
+ // created on the same thread as previously destroyed factory.
+ executor = Executors.newSingleThreadScheduledExecutor();
+ }
+
+ public static PeerConnectionClient getInstance() {
+ return instance;
+ }
+
+ public void setPeerConnectionFactoryOptions(PeerConnectionFactory.Options options) {
+ this.options = options;
+ }
+
+ public void createPeerConnectionFactory(final Context context,
+ final PeerConnectionParameters peerConnectionParameters, final PeerConnectionEvents events) {
+ this.peerConnectionParameters = peerConnectionParameters;
+ this.events = events;
+ videoCallEnabled = peerConnectionParameters.videoCallEnabled;
+ dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null;
+ // Reset variables to initial states.
+ this.context = null;
+ factory = null;
+ peerConnection = null;
+ preferIsac = false;
+ videoCapturerStopped = false;
+ isError = false;
+ queuedRemoteCandidates = null;
+ localSdp = null; // either offer or answer SDP
+ mediaStream = null;
+ videoCapturer = null;
+ renderVideo = true;
+ localVideoTrack = null;
+ remoteVideoTrack = null;
+ localVideoSender = null;
+ enableAudio = true;
+ localAudioTrack = null;
+ statsTimer = new Timer();
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ createPeerConnectionFactoryInternal(context);
+ }
+ });
+ }
+
+ public void createPeerConnection(final EglBase.Context renderEGLContext,
+ final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
+ final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
+ createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
+ videoCapturer, signalingParameters);
+ }
+ public void createPeerConnection(final EglBase.Context renderEGLContext,
+ final VideoRenderer.Callbacks localRender, final List remoteRenders,
+ final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
+ if (peerConnectionParameters == null) {
+ Log.e(TAG, "Creating peer connection without initializing factory.");
+ return;
+ }
+ this.localRender = localRender;
+ this.remoteRenders = remoteRenders;
+ this.videoCapturer = videoCapturer;
+ this.signalingParameters = signalingParameters;
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ createMediaConstraintsInternal();
+ createPeerConnectionInternal(renderEGLContext);
+ } catch (Exception e) {
+ reportError("Failed to create peer connection: " + e.getMessage());
+ throw e;
+ }
+ }
+ });
+ }
+
+ public void close() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ closeInternal();
+ }
+ });
+ }
+
+ public boolean isVideoCallEnabled() {
+ return videoCallEnabled;
+ }
+
+ private void createPeerConnectionFactoryInternal(Context context) {
+ PeerConnectionFactory.initializeInternalTracer();
+ if (peerConnectionParameters.tracing) {
+ PeerConnectionFactory.startInternalTracingCapture(
+ Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ + "webrtc-trace.txt");
+ }
+ Log.d(TAG,
+ "Create peer connection factory. Use video: " + peerConnectionParameters.videoCallEnabled);
+ isError = false;
+
+ // Initialize field trials.
+ if (peerConnectionParameters.videoFlexfecEnabled) {
+ PeerConnectionFactory.initializeFieldTrials(VIDEO_FLEXFEC_FIELDTRIAL);
+ Log.d(TAG, "Enable FlexFEC field trial.");
+ } else {
+ PeerConnectionFactory.initializeFieldTrials("");
+ }
+
+ // Check preferred video codec.
+ preferredVideoCodec = VIDEO_CODEC_VP8;
+ if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
+ if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
+ preferredVideoCodec = VIDEO_CODEC_VP9;
+ } else if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
+ preferredVideoCodec = VIDEO_CODEC_H264;
+ }
+ }
+ Log.d(TAG, "Preferred video codec: " + preferredVideoCodec);
+
+ // Check if ISAC is used by default.
+ preferIsac = peerConnectionParameters.audioCodec != null
+ && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC);
+
+ // Enable/disable OpenSL ES playback.
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
+ WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
+ } else {
+ Log.d(TAG, "Allow OpenSL ES audio if device supports it");
+ WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
+ }
+
+ if (peerConnectionParameters.disableBuiltInAEC) {
+ Log.d(TAG, "Disable built-in AEC even if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
+ } else {
+ Log.d(TAG, "Enable built-in AEC if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
+ }
+
+ if (peerConnectionParameters.disableBuiltInAGC) {
+ Log.d(TAG, "Disable built-in AGC even if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(true);
+ } else {
+ Log.d(TAG, "Enable built-in AGC if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(false);
+ }
+
+ if (peerConnectionParameters.disableBuiltInNS) {
+ Log.d(TAG, "Disable built-in NS even if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true);
+ } else {
+ Log.d(TAG, "Enable built-in NS if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
+ }
+
+ // Create peer connection factory.
+ if (!PeerConnectionFactory.initializeAndroidGlobals(
+ context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
+ events.onPeerConnectionError("Failed to initializeAndroidGlobals");
+ }
+ if (options != null) {
+ Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
+ }
+ this.context = context;
+ factory = new PeerConnectionFactory(options);
+ Log.d(TAG, "Peer connection factory created.");
+ }
+
+ private void createMediaConstraintsInternal() {
+ // Create peer connection constraints.
+ pcConstraints = new MediaConstraints();
+ // Enable DTLS for normal calls and disable for loopback calls.
+ if (peerConnectionParameters.loopback) {
+ pcConstraints.optional.add(
+ new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "false"));
+ } else {
+ pcConstraints.optional.add(
+ new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "true"));
+ }
+
+ // Check if there is a camera on device and disable video call if not.
+ if (videoCapturer == null) {
+ Log.w(TAG, "No camera on device. Switch to audio only call.");
+ videoCallEnabled = false;
+ }
+ // Create video constraints if video call is enabled.
+ if (videoCallEnabled) {
+ videoWidth = peerConnectionParameters.videoWidth;
+ videoHeight = peerConnectionParameters.videoHeight;
+ videoFps = peerConnectionParameters.videoFps;
+
+ // If video resolution is not specified, default to HD.
+ if (videoWidth == 0 || videoHeight == 0) {
+ videoWidth = HD_VIDEO_WIDTH;
+ videoHeight = HD_VIDEO_HEIGHT;
+ }
+
+ // If fps is not specified, default to 30.
+ if (videoFps == 0) {
+ videoFps = 30;
+ }
+ Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps);
+ }
+
+ // Create audio constraints.
+ audioConstraints = new MediaConstraints();
+ // added for audio performance measurements
+ if (peerConnectionParameters.noAudioProcessing) {
+ Log.d(TAG, "Disabling audio processing");
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
+ }
+ if (peerConnectionParameters.enableLevelControl) {
+ Log.d(TAG, "Enabling level control.");
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
+ }
+ // Create SDP constraints.
+ sdpMediaConstraints = new MediaConstraints();
+ sdpMediaConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
+ if (videoCallEnabled || peerConnectionParameters.loopback) {
+ sdpMediaConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
+ } else {
+ sdpMediaConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
+ }
+ }
+
+ private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
+ if (factory == null || isError) {
+ Log.e(TAG, "Peerconnection factory is not created");
+ return;
+ }
+ Log.d(TAG, "Create peer connection.");
+
+ Log.d(TAG, "PCConstraints: " + pcConstraints.toString());
+ queuedRemoteCandidates = new LinkedList();
+
+ if (videoCallEnabled) {
+ Log.d(TAG, "EGLContext: " + renderEGLContext);
+ factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
+ }
+
+ PeerConnection.RTCConfiguration rtcConfig =
+ new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
+ // TCP candidates are only useful when connecting to a server that supports
+ // ICE-TCP.
+ rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
+ rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
+ rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
+ rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
+ // Use ECDSA encryption.
+ rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
+
+ peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
+
+ if (dataChannelEnabled) {
+ DataChannel.Init init = new DataChannel.Init();
+ init.ordered = peerConnectionParameters.dataChannelParameters.ordered;
+ init.negotiated = peerConnectionParameters.dataChannelParameters.negotiated;
+ init.maxRetransmits = peerConnectionParameters.dataChannelParameters.maxRetransmits;
+ init.maxRetransmitTimeMs = peerConnectionParameters.dataChannelParameters.maxRetransmitTimeMs;
+ init.id = peerConnectionParameters.dataChannelParameters.id;
+ init.protocol = peerConnectionParameters.dataChannelParameters.protocol;
+ dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init);
+ }
+ isInitiator = false;
+
+ // Set default WebRTC tracing and INFO libjingle logging.
+ // NOTE: this _must_ happen while |factory| is alive!
+ Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
+ Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
+
+ mediaStream = factory.createLocalMediaStream("ARDAMS");
+ if (videoCallEnabled) {
+ mediaStream.addTrack(createVideoTrack(videoCapturer));
+ }
+
+ mediaStream.addTrack(createAudioTrack());
+ peerConnection.addStream(mediaStream);
+ if (videoCallEnabled) {
+ findVideoSender();
+ }
+
+ if (peerConnectionParameters.aecDump) {
+ try {
+ aecDumpFileDescriptor =
+ ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
+ + File.separator + "Download/audio.aecdump"),
+ ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
+ | ParcelFileDescriptor.MODE_TRUNCATE);
+ factory.startAecDump(aecDumpFileDescriptor.getFd(), -1);
+ } catch (IOException e) {
+ Log.e(TAG, "Can not open aecdump file", e);
+ }
+ }
+
+ Log.d(TAG, "Peer connection created.");
+ }
+
+ private void closeInternal() {
+ if (factory != null && peerConnectionParameters.aecDump) {
+ factory.stopAecDump();
+ }
+ Log.d(TAG, "Closing peer connection.");
+ statsTimer.cancel();
+ if (dataChannel != null) {
+ dataChannel.dispose();
+ dataChannel = null;
+ }
+ if (peerConnection != null) {
+ peerConnection.dispose();
+ peerConnection = null;
+ }
+ Log.d(TAG, "Closing audio source.");
+ if (audioSource != null) {
+ audioSource.dispose();
+ audioSource = null;
+ }
+ Log.d(TAG, "Stopping capture.");
+ if (videoCapturer != null) {
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ videoCapturerStopped = true;
+ videoCapturer.dispose();
+ videoCapturer = null;
+ }
+ Log.d(TAG, "Closing video source.");
+ if (videoSource != null) {
+ videoSource.dispose();
+ videoSource = null;
+ }
+ Log.d(TAG, "Closing peer connection factory.");
+ if (factory != null) {
+ factory.dispose();
+ factory = null;
+ }
+ options = null;
+ Log.d(TAG, "Closing peer connection done.");
+ events.onPeerConnectionClosed();
+ PeerConnectionFactory.stopInternalTracingCapture();
+ PeerConnectionFactory.shutdownInternalTracer();
+ }
+
+ public boolean isHDVideo() {
+ if (!videoCallEnabled) {
+ return false;
+ }
+
+ return videoWidth * videoHeight >= 1280 * 720;
+ }
+
+ private void getStats() {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ boolean success = peerConnection.getStats(new StatsObserver() {
+ @Override
+ public void onComplete(final StatsReport[] reports) {
+ events.onPeerConnectionStatsReady(reports);
+ }
+ }, null);
+ if (!success) {
+ Log.e(TAG, "getStats() returns false!");
+ }
+ }
+
+ public void enableStatsEvents(boolean enable, int periodMs) {
+ if (enable) {
+ try {
+ statsTimer.schedule(new TimerTask() {
+ @Override
+ public void run() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ getStats();
+ }
+ });
+ }
+ }, 0, periodMs);
+ } catch (Exception e) {
+ Log.e(TAG, "Can not schedule statistics timer", e);
+ }
+ } else {
+ statsTimer.cancel();
+ }
+ }
+
+ public void setAudioEnabled(final boolean enable) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ enableAudio = enable;
+ if (localAudioTrack != null) {
+ localAudioTrack.setEnabled(enableAudio);
+ }
+ }
+ });
+ }
+
+ public void setVideoEnabled(final boolean enable) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ renderVideo = enable;
+ if (localVideoTrack != null) {
+ localVideoTrack.setEnabled(renderVideo);
+ }
+ if (remoteVideoTrack != null) {
+ remoteVideoTrack.setEnabled(renderVideo);
+ }
+ }
+ });
+ }
+
+ public void createOffer() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "PC Create OFFER");
+ isInitiator = true;
+ peerConnection.createOffer(sdpObserver, sdpMediaConstraints);
+ }
+ }
+ });
+ }
+
+ public void createAnswer() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "PC create ANSWER");
+ isInitiator = false;
+ peerConnection.createAnswer(sdpObserver, sdpMediaConstraints);
+ }
+ }
+ });
+ }
+
+ public void addRemoteIceCandidate(final IceCandidate candidate) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection != null && !isError) {
+ if (queuedRemoteCandidates != null) {
+ queuedRemoteCandidates.add(candidate);
+ } else {
+ peerConnection.addIceCandidate(candidate);
+ }
+ }
+ }
+ });
+ }
+
+ public void removeRemoteIceCandidates(final IceCandidate[] candidates) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ // Drain the queued remote candidates if there is any so that
+ // they are processed in the proper order.
+ drainCandidates();
+ peerConnection.removeIceCandidates(candidates);
+ }
+ });
+ }
+
+ public void setRemoteDescription(final SessionDescription sdp) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ String sdpDescription = sdp.description;
+ if (preferIsac) {
+ sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
+ }
+ if (videoCallEnabled) {
+ sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
+ }
+ if (peerConnectionParameters.audioStartBitrate > 0) {
+ sdpDescription = setStartBitrate(
+ AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate);
+ }
+ Log.d(TAG, "Set remote SDP.");
+ SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription);
+ peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
+ }
+ });
+ }
+
+ public void stopVideoSource() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (videoCapturer != null && !videoCapturerStopped) {
+ Log.d(TAG, "Stop video source.");
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ }
+ videoCapturerStopped = true;
+ }
+ }
+ });
+ }
+
+ public void startVideoSource() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (videoCapturer != null && videoCapturerStopped) {
+ Log.d(TAG, "Restart video source.");
+ videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
+ videoCapturerStopped = false;
+ }
+ }
+ });
+ }
+
+ public void setVideoMaxBitrate(final Integer maxBitrateKbps) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection == null || localVideoSender == null || isError) {
+ return;
+ }
+ Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps);
+ if (localVideoSender == null) {
+ Log.w(TAG, "Sender is not ready.");
+ return;
+ }
+
+ RtpParameters parameters = localVideoSender.getParameters();
+ if (parameters.encodings.size() == 0) {
+ Log.w(TAG, "RtpParameters are not ready.");
+ return;
+ }
+
+ for (RtpParameters.Encoding encoding : parameters.encodings) {
+ // Null value means no limit.
+ encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
+ }
+ if (!localVideoSender.setParameters(parameters)) {
+ Log.e(TAG, "RtpSender.setParameters failed.");
+ }
+ Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps);
+ }
+ });
+ }
+
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, "Peerconnection error: " + errorMessage);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (!isError) {
+ events.onPeerConnectionError(errorMessage);
+ isError = true;
+ }
+ }
+ });
+ }
+
+ private AudioTrack createAudioTrack() {
+ audioSource = factory.createAudioSource(audioConstraints);
+ localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
+ localAudioTrack.setEnabled(enableAudio);
+ return localAudioTrack;
+ }
+
+ private VideoTrack createVideoTrack(VideoCapturer capturer) {
+ videoSource = factory.createVideoSource(capturer);
+ capturer.startCapture(videoWidth, videoHeight, videoFps);
+
+ localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
+ localVideoTrack.setEnabled(renderVideo);
+ localVideoTrack.addRenderer(new VideoRenderer(localRender));
+ return localVideoTrack;
+ }
+
+ private void findVideoSender() {
+ for (RtpSender sender : peerConnection.getSenders()) {
+ if (sender.track() != null) {
+ String trackType = sender.track().kind();
+ if (trackType.equals(VIDEO_TRACK_TYPE)) {
+ Log.d(TAG, "Found video sender.");
+ localVideoSender = sender;
+ }
+ }
+ }
+ }
+
+ private static String setStartBitrate(
+ String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) {
+ String[] lines = sdpDescription.split("\r\n");
+ int rtpmapLineIndex = -1;
+ boolean sdpFormatUpdated = false;
+ String codecRtpMap = null;
+ // Search for codec rtpmap in format
+ // a=rtpmap: / [/]
+ String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$";
+ Pattern codecPattern = Pattern.compile(regex);
+ for (int i = 0; i < lines.length; i++) {
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ codecRtpMap = codecMatcher.group(1);
+ rtpmapLineIndex = i;
+ break;
+ }
+ }
+ if (codecRtpMap == null) {
+ Log.w(TAG, "No rtpmap for " + codec + " codec");
+ return sdpDescription;
+ }
+ Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
+
+ // Check if a=fmtp string already exist in remote SDP for this codec and
+ // update it with new bitrate parameter.
+ regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$";
+ codecPattern = Pattern.compile(regex);
+ for (int i = 0; i < lines.length; i++) {
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ Log.d(TAG, "Found " + codec + " " + lines[i]);
+ if (isVideoCodec) {
+ lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ } else {
+ lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
+ }
+ Log.d(TAG, "Update remote SDP line: " + lines[i]);
+ sdpFormatUpdated = true;
+ break;
+ }
+ }
+
+ StringBuilder newSdpDescription = new StringBuilder();
+ for (int i = 0; i < lines.length; i++) {
+ newSdpDescription.append(lines[i]).append("\r\n");
+ // Append new a=fmtp line if no such line exist for a codec.
+ if (!sdpFormatUpdated && i == rtpmapLineIndex) {
+ String bitrateSet;
+ if (isVideoCodec) {
+ bitrateSet =
+ "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ } else {
+ bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
+ + (bitrateKbps * 1000);
+ }
+ Log.d(TAG, "Add remote SDP line: " + bitrateSet);
+ newSdpDescription.append(bitrateSet).append("\r\n");
+ }
+ }
+ return newSdpDescription.toString();
+ }
+
+ private static String preferCodec(String sdpDescription, String codec, boolean isAudio) {
+ String[] lines = sdpDescription.split("\r\n");
+ int mLineIndex = -1;
+ String codecRtpMap = null;
+ // a=rtpmap: / [/]
+ String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$";
+ Pattern codecPattern = Pattern.compile(regex);
+ String mediaDescription = "m=video ";
+ if (isAudio) {
+ mediaDescription = "m=audio ";
+ }
+ for (int i = 0; (i < lines.length) && (mLineIndex == -1 || codecRtpMap == null); i++) {
+ if (lines[i].startsWith(mediaDescription)) {
+ mLineIndex = i;
+ continue;
+ }
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ codecRtpMap = codecMatcher.group(1);
+ }
+ }
+ if (mLineIndex == -1) {
+ Log.w(TAG, "No " + mediaDescription + " line, so can't prefer " + codec);
+ return sdpDescription;
+ }
+ if (codecRtpMap == null) {
+ Log.w(TAG, "No rtpmap for " + codec);
+ return sdpDescription;
+ }
+ Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at " + lines[mLineIndex]);
+ String[] origMLineParts = lines[mLineIndex].split(" ");
+ if (origMLineParts.length > 3) {
+ StringBuilder newMLine = new StringBuilder();
+ int origPartIndex = 0;
+ // Format is: m= ...
+ newMLine.append(origMLineParts[origPartIndex++]).append(" ");
+ newMLine.append(origMLineParts[origPartIndex++]).append(" ");
+ newMLine.append(origMLineParts[origPartIndex++]).append(" ");
+ newMLine.append(codecRtpMap);
+ for (; origPartIndex < origMLineParts.length; origPartIndex++) {
+ if (!origMLineParts[origPartIndex].equals(codecRtpMap)) {
+ newMLine.append(" ").append(origMLineParts[origPartIndex]);
+ }
+ }
+ lines[mLineIndex] = newMLine.toString();
+ Log.d(TAG, "Change media description: " + lines[mLineIndex]);
+ } else {
+ Log.e(TAG, "Wrong SDP media description format: " + lines[mLineIndex]);
+ }
+ StringBuilder newSdpDescription = new StringBuilder();
+ for (String line : lines) {
+ newSdpDescription.append(line).append("\r\n");
+ }
+ return newSdpDescription.toString();
+ }
+
+ private void drainCandidates() {
+ if (queuedRemoteCandidates != null) {
+ Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates");
+ for (IceCandidate candidate : queuedRemoteCandidates) {
+ peerConnection.addIceCandidate(candidate);
+ }
+ queuedRemoteCandidates = null;
+ }
+ }
+
+ private void switchCameraInternal() {
+ if (videoCapturer instanceof CameraVideoCapturer) {
+ if (!videoCallEnabled || isError || videoCapturer == null) {
+ Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError);
+ return; // No video is sent or only one camera is available or error happened.
+ }
+ Log.d(TAG, "Switch camera");
+ CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
+ cameraVideoCapturer.switchCamera(null);
+ } else {
+ Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
+ }
+ }
+
+ public void switchCamera() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ switchCameraInternal();
+ }
+ });
+ }
+
+ public void changeCaptureFormat(final int width, final int height, final int framerate) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ changeCaptureFormatInternal(width, height, framerate);
+ }
+ });
+ }
+
+ private void changeCaptureFormatInternal(int width, int height, int framerate) {
+ if (!videoCallEnabled || isError || videoCapturer == null) {
+ Log.e(TAG,
+ "Failed to change capture format. Video: " + videoCallEnabled + ". Error : " + isError);
+ return;
+ }
+ Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
+ videoSource.adaptOutputFormat(width, height, framerate);
+ }
+
+ // Implementation detail: observe ICE & stream changes and react accordingly.
+ private class PCObserver implements PeerConnection.Observer {
+ @Override
+ public void onIceCandidate(final IceCandidate candidate) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ events.onIceCandidate(candidate);
+ }
+ });
+ }
+
+ @Override
+ public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ events.onIceCandidatesRemoved(candidates);
+ }
+ });
+ }
+
+ @Override
+ public void onSignalingChange(PeerConnection.SignalingState newState) {
+ Log.d(TAG, "SignalingState: " + newState);
+ }
+
+ @Override
+ public void onIceConnectionChange(final IceConnectionState newState) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ Log.d(TAG, "IceConnectionState: " + newState);
+ if (newState == IceConnectionState.CONNECTED) {
+ events.onIceConnected();
+ } else if (newState == IceConnectionState.DISCONNECTED) {
+ events.onIceDisconnected();
+ } else if (newState == IceConnectionState.FAILED) {
+ reportError("ICE connection failed.");
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
+ Log.d(TAG, "IceGatheringState: " + newState);
+ }
+
+ @Override
+ public void onIceConnectionReceivingChange(boolean receiving) {
+ Log.d(TAG, "IceConnectionReceiving changed to " + receiving);
+ }
+
+ @Override
+ public void onAddStream(final MediaStream stream) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ if (stream.audioTracks.size() > 1 || stream.videoTracks.size() > 1) {
+ reportError("Weird-looking stream: " + stream);
+ return;
+ }
+ if (stream.videoTracks.size() == 1) {
+ remoteVideoTrack = stream.videoTracks.get(0);
+ remoteVideoTrack.setEnabled(renderVideo);
+ for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
+ remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
+ }
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRemoveStream(final MediaStream stream) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ remoteVideoTrack = null;
+ }
+ });
+ }
+
+ @Override
+ public void onDataChannel(final DataChannel dc) {
+ Log.d(TAG, "New Data channel " + dc.label());
+
+ if (!dataChannelEnabled)
+ return;
+
+ dc.registerObserver(new DataChannel.Observer() {
+ public void onBufferedAmountChange(long previousAmount) {
+ Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state());
+ }
+
+ @Override
+ public void onStateChange() {
+ Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state());
+ }
+
+ @Override
+ public void onMessage(final DataChannel.Buffer buffer) {
+ if (buffer.binary) {
+ Log.d(TAG, "Received binary msg over " + dc);
+ return;
+ }
+ ByteBuffer data = buffer.data;
+ final byte[] bytes = new byte[data.capacity()];
+ data.get(bytes);
+ String strData = new String(bytes);
+ Log.d(TAG, "Got msg: " + strData + " over " + dc);
+ }
+ });
+ }
+
+ @Override
+ public void onRenegotiationNeeded() {
+ // No need to do anything; AppRTC follows a pre-agreed-upon
+ // signaling/negotiation protocol.
+ }
+ }
+
+ // Implementation detail: handle offer creation/signaling and answer setting,
+ // as well as adding remote ICE candidates once the answer SDP is set.
+ private class SDPObserver implements SdpObserver {
+ @Override
+ public void onCreateSuccess(final SessionDescription origSdp) {
+ if (localSdp != null) {
+ reportError("Multiple SDP create.");
+ return;
+ }
+ String sdpDescription = origSdp.description;
+ if (preferIsac) {
+ sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
+ }
+ if (videoCallEnabled) {
+ sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
+ }
+ final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription);
+ localSdp = sdp;
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "Set local SDP from " + sdp.type);
+ peerConnection.setLocalDescription(sdpObserver, sdp);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onSetSuccess() {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ if (isInitiator) {
+ // For offering peer connection we first create offer and set
+ // local SDP, then after receiving answer set remote SDP.
+ if (peerConnection.getRemoteDescription() == null) {
+ // We've just set our local SDP so time to send it.
+ Log.d(TAG, "Local SDP set succesfully");
+ events.onLocalDescription(localSdp);
+ } else {
+ // We've just set remote description, so drain remote
+ // and send local ICE candidates.
+ Log.d(TAG, "Remote SDP set succesfully");
+ drainCandidates();
+ }
+ } else {
+ // For answering peer connection we set remote SDP and then
+ // create answer and set local SDP.
+ if (peerConnection.getLocalDescription() != null) {
+ // We've just set our local SDP so time to send it, drain
+ // remote and send local ICE candidates.
+ Log.d(TAG, "Local SDP set succesfully");
+ events.onLocalDescription(localSdp);
+ drainCandidates();
+ } else {
+ // We've just set remote SDP - do nothing for now -
+ // answer will be created soon.
+ Log.d(TAG, "Remote SDP set succesfully");
+ }
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onCreateFailure(final String error) {
+ reportError("createSDP error: " + error);
+ }
+
+ @Override
+ public void onSetFailure(final String error) {
+ reportError("setSDP error: " + error);
+ }
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/PercentFrameLayout.java b/app/src/main/java/com/myhexaville/androidwebrtc/PercentFrameLayout.java
new file mode 100644
index 0000000..d510654
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/PercentFrameLayout.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.View;
+import android.view.ViewGroup;
+
+/**
+ * Simple container that confines the children to a subrectangle specified as percentage values of
+ * the container size. The children are centered horizontally and vertically inside the confined
+ * space.
+ */
+public class PercentFrameLayout extends ViewGroup {
+ private int xPercent = 0;
+ private int yPercent = 0;
+ private int widthPercent = 100;
+ private int heightPercent = 100;
+
+ public PercentFrameLayout(Context context) {
+ super(context);
+ }
+
+ public PercentFrameLayout(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ }
+
+ public PercentFrameLayout(Context context, AttributeSet attrs, int defStyleAttr) {
+ super(context, attrs, defStyleAttr);
+ }
+
+ public void setPosition(int xPercent, int yPercent, int widthPercent, int heightPercent) {
+ this.xPercent = xPercent;
+ this.yPercent = yPercent;
+ this.widthPercent = widthPercent;
+ this.heightPercent = heightPercent;
+ }
+
+ @Override
+ public boolean shouldDelayChildPressedState() {
+ return false;
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ final int width = getDefaultSize(Integer.MAX_VALUE, widthMeasureSpec);
+ final int height = getDefaultSize(Integer.MAX_VALUE, heightMeasureSpec);
+ setMeasuredDimension(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
+ MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
+
+ final int childWidthMeasureSpec =
+ MeasureSpec.makeMeasureSpec(width * widthPercent / 100, MeasureSpec.AT_MOST);
+ final int childHeightMeasureSpec =
+ MeasureSpec.makeMeasureSpec(height * heightPercent / 100, MeasureSpec.AT_MOST);
+ for (int i = 0; i < getChildCount(); ++i) {
+ final View child = getChildAt(i);
+ if (child.getVisibility() != GONE) {
+ child.measure(childWidthMeasureSpec, childHeightMeasureSpec);
+ }
+ }
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+ final int width = right - left;
+ final int height = bottom - top;
+ // Sub-rectangle specified by percentage values.
+ final int subWidth = width * widthPercent / 100;
+ final int subHeight = height * heightPercent / 100;
+ final int subLeft = left + width * xPercent / 100;
+ final int subTop = top + height * yPercent / 100;
+
+ for (int i = 0; i < getChildCount(); ++i) {
+ final View child = getChildAt(i);
+ if (child.getVisibility() != GONE) {
+ final int childWidth = child.getMeasuredWidth();
+ final int childHeight = child.getMeasuredHeight();
+ // Center child both vertically and horizontally.
+ final int childLeft = subLeft + (subWidth - childWidth) / 2;
+ final int childTop = subTop + (subHeight - childHeight) / 2;
+ child.layout(childLeft, childTop, childLeft + childWidth, childTop + childHeight);
+ }
+ }
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/RoomParametersFetcher.java b/app/src/main/java/com/myhexaville/androidwebrtc/RoomParametersFetcher.java
new file mode 100644
index 0000000..53269a7
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/RoomParametersFetcher.java
@@ -0,0 +1,218 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+
+import android.util.Log;
+
+import com.myhexaville.androidwebrtc.AppRTCClient.SignalingParameters;
+import com.myhexaville.androidwebrtc.util.AsyncHttpURLConnection;
+import com.myhexaville.androidwebrtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.PeerConnection;
+import org.webrtc.SessionDescription;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.LinkedList;
+import java.util.Scanner;
+
+/**
+ * AsyncTask that converts an AppRTC room URL into the set of signaling
+ * parameters to use with that room.
+ */
+public class RoomParametersFetcher {
+ private static final String TAG = "RoomRTCClient";
+ private static final int TURN_HTTP_TIMEOUT_MS = 5000;
+ private final RoomParametersFetcherEvents events;
+ private final String roomUrl;
+ private final String roomMessage;
+ private AsyncHttpURLConnection httpConnection;
+
+ /**
+ * Room parameters fetcher callbacks.
+ */
+ public interface RoomParametersFetcherEvents {
+ /**
+ * Callback fired once the room's signaling parameters
+ * SignalingParameters are extracted.
+ */
+ void onSignalingParametersReady(final SignalingParameters params);
+
+ /**
+ * Callback for room parameters extraction error.
+ */
+ void onSignalingParametersError(final String description);
+ }
+
+ public RoomParametersFetcher(
+ String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
+ this.roomUrl = roomUrl;
+ this.roomMessage = roomMessage;
+ this.events = events;
+ }
+
+ public void makeRequest() {
+ Log.d(TAG, "Connecting to room: " + roomUrl);
+ httpConnection =
+ new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ Log.e(TAG, "Room connection error: " + errorMessage);
+ events.onSignalingParametersError(errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {
+ roomHttpResponseParse(response);
+ }
+ });
+ httpConnection.send();
+ }
+
+ private void roomHttpResponseParse(String response) {
+ Log.d(TAG, "Room response: " + response);
+ try {
+ LinkedList iceCandidates = null;
+ SessionDescription offerSdp = null;
+ JSONObject roomJson = new JSONObject(response);
+
+ String result = roomJson.getString("result");
+ if (!result.equals("SUCCESS")) {
+ events.onSignalingParametersError("Room response error: " + result);
+ return;
+ }
+ response = roomJson.getString("params");
+ roomJson = new JSONObject(response);
+ String roomId = roomJson.getString("room_id");
+ String clientId = roomJson.getString("client_id");
+ String wssUrl = roomJson.getString("wss_url");
+ String wssPostUrl = roomJson.getString("wss_post_url");
+ boolean initiator = (roomJson.getBoolean("is_initiator"));
+ if (!initiator) {
+ iceCandidates = new LinkedList();
+ String messagesString = roomJson.getString("messages");
+ JSONArray messages = new JSONArray(messagesString);
+ for (int i = 0; i < messages.length(); ++i) {
+ String messageString = messages.getString(i);
+ JSONObject message = new JSONObject(messageString);
+ String messageType = message.getString("type");
+ Log.d(TAG, "GAE->C #" + i + " : " + messageString);
+ if (messageType.equals("offer")) {
+ offerSdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
+ } else if (messageType.equals("candidate")) {
+ IceCandidate candidate = new IceCandidate(
+ message.getString("id"), message.getInt("label"), message.getString("candidate"));
+ iceCandidates.add(candidate);
+ } else {
+ Log.e(TAG, "Unknown message: " + messageString);
+ }
+ }
+ }
+ Log.d(TAG, "RoomId: " + roomId + ". ClientId: " + clientId);
+ Log.d(TAG, "Initiator: " + initiator);
+ Log.d(TAG, "WSS url: " + wssUrl);
+ Log.d(TAG, "WSS POST url: " + wssPostUrl);
+
+ LinkedList iceServers =
+ iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
+ boolean isTurnPresent = false;
+ for (PeerConnection.IceServer server : iceServers) {
+ Log.d(TAG, "IceServer: " + server);
+ if (server.uri.startsWith("turn:")) {
+ isTurnPresent = true;
+ break;
+ }
+ }
+ // Request TURN servers.
+ if (!isTurnPresent) {
+ LinkedList turnServers =
+ requestTurnServers(roomJson.getString("ice_server_url"));
+ for (PeerConnection.IceServer turnServer : turnServers) {
+ Log.d(TAG, "TurnServer: " + turnServer);
+ iceServers.add(turnServer);
+ }
+ }
+
+ SignalingParameters params = new SignalingParameters(
+ iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
+ events.onSignalingParametersReady(params);
+ } catch (JSONException e) {
+ events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
+ } catch (IOException e) {
+ events.onSignalingParametersError("Room IO error: " + e.toString());
+ }
+ }
+
+ // Requests & returns a TURN ICE Server based on a request URL. Must be run
+ // off the main thread!
+ private LinkedList requestTurnServers(String url)
+ throws IOException, JSONException {
+ LinkedList turnServers = new LinkedList();
+ Log.d(TAG, "Request TURN from: " + url);
+ HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
+ connection.setDoOutput(true);
+ connection.setRequestProperty("REFERER", "https://appr.tc");
+ connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
+ connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
+ int responseCode = connection.getResponseCode();
+ if (responseCode != 200) {
+ throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
+ + connection.getHeaderField(null));
+ }
+ InputStream responseStream = connection.getInputStream();
+ String response = drainStream(responseStream);
+ connection.disconnect();
+ Log.d(TAG, "TURN response: " + response);
+ JSONObject responseJSON = new JSONObject(response);
+ JSONArray iceServers = responseJSON.getJSONArray("iceServers");
+ for (int i = 0; i < iceServers.length(); ++i) {
+ JSONObject server = iceServers.getJSONObject(i);
+ JSONArray turnUrls = server.getJSONArray("urls");
+ String username = server.has("username") ? server.getString("username") : "";
+ String credential = server.has("credential") ? server.getString("credential") : "";
+ for (int j = 0; j < turnUrls.length(); j++) {
+ String turnUrl = turnUrls.getString(j);
+ turnServers.add(new PeerConnection.IceServer(turnUrl, username, credential));
+ }
+ }
+ return turnServers;
+ }
+
+ // Return the list of ICE servers described by a WebRTCPeerConnection
+ // configuration string.
+ private LinkedList iceServersFromPCConfigJSON(String pcConfig)
+ throws JSONException {
+ JSONObject json = new JSONObject(pcConfig);
+ JSONArray servers = json.getJSONArray("iceServers");
+ LinkedList ret = new LinkedList();
+ for (int i = 0; i < servers.length(); ++i) {
+ JSONObject server = servers.getJSONObject(i);
+ String url = server.getString("urls");
+ String credential = server.has("credential") ? server.getString("credential") : "";
+ ret.add(new PeerConnection.IceServer(url, "", credential));
+ }
+ return ret;
+ }
+
+ // Return the contents of an InputStream as a String.
+ private static String drainStream(InputStream in) {
+ Scanner s = new Scanner(in).useDelimiter("\\A");
+ return s.hasNext() ? s.next() : "";
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/SettingsActivity.java b/app/src/main/java/com/myhexaville/androidwebrtc/SettingsActivity.java
new file mode 100644
index 0000000..2daa356
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/SettingsActivity.java
@@ -0,0 +1,317 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.app.Activity;
+import android.content.SharedPreferences;
+import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
+import android.os.Bundle;
+import android.preference.ListPreference;
+import android.preference.Preference;
+import android.support.v7.app.AppCompatActivity;
+
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.voiceengine.WebRtcAudioUtils;
+
+/**
+ * Settings activity for AppRTC.
+ */
+public class SettingsActivity extends AppCompatActivity implements OnSharedPreferenceChangeListener {
+ private SettingsFragment settingsFragment;
+ private String keyprefVideoCall;
+ private String keyprefScreencapture;
+ private String keyprefCamera2;
+ private String keyprefResolution;
+ private String keyprefFps;
+ private String keyprefCaptureQualitySlider;
+ private String keyprefMaxVideoBitrateType;
+ private String keyprefMaxVideoBitrateValue;
+ private String keyPrefVideoCodec;
+ private String keyprefHwCodec;
+ private String keyprefCaptureToTexture;
+ private String keyprefFlexfec;
+
+ private String keyprefStartAudioBitrateType;
+ private String keyprefStartAudioBitrateValue;
+ private String keyPrefAudioCodec;
+ private String keyprefNoAudioProcessing;
+ private String keyprefAecDump;
+ private String keyprefOpenSLES;
+ private String keyprefDisableBuiltInAEC;
+ private String keyprefDisableBuiltInAGC;
+ private String keyprefDisableBuiltInNS;
+ private String keyprefEnableLevelControl;
+ private String keyprefSpeakerphone;
+
+ private String keyPrefRoomServerUrl;
+ private String keyPrefDisplayHud;
+ private String keyPrefTracing;
+
+ private String keyprefEnableDataChannel;
+ private String keyprefOrdered;
+ private String keyprefMaxRetransmitTimeMs;
+ private String keyprefMaxRetransmits;
+ private String keyprefDataProtocol;
+ private String keyprefNegotiated;
+ private String keyprefDataId;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ keyprefVideoCall = getString(R.string.pref_videocall_key);
+ keyprefScreencapture = getString(R.string.pref_screencapture_key);
+ keyprefCamera2 = getString(R.string.pref_camera2_key);
+ keyprefResolution = getString(R.string.pref_resolution_key);
+ keyprefFps = getString(R.string.pref_fps_key);
+ keyprefCaptureQualitySlider = getString(R.string.pref_capturequalityslider_key);
+ keyprefMaxVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
+ keyprefMaxVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
+ keyPrefVideoCodec = getString(R.string.pref_videocodec_key);
+ keyprefHwCodec = getString(R.string.pref_hwcodec_key);
+ keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
+ keyprefFlexfec = getString(R.string.pref_flexfec_key);
+
+ keyprefStartAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
+ keyprefStartAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
+ keyPrefAudioCodec = getString(R.string.pref_audiocodec_key);
+ keyprefNoAudioProcessing = getString(R.string.pref_noaudioprocessing_key);
+ keyprefAecDump = getString(R.string.pref_aecdump_key);
+ keyprefOpenSLES = getString(R.string.pref_opensles_key);
+ keyprefDisableBuiltInAEC = getString(R.string.pref_disable_built_in_aec_key);
+ keyprefDisableBuiltInAGC = getString(R.string.pref_disable_built_in_agc_key);
+ keyprefDisableBuiltInNS = getString(R.string.pref_disable_built_in_ns_key);
+ keyprefEnableLevelControl = getString(R.string.pref_enable_level_control_key);
+ keyprefSpeakerphone = getString(R.string.pref_speakerphone_key);
+
+ keyprefEnableDataChannel = getString(R.string.pref_enable_datachannel_key);
+ keyprefOrdered = getString(R.string.pref_ordered_key);
+ keyprefMaxRetransmitTimeMs = getString(R.string.pref_max_retransmit_time_ms_key);
+ keyprefMaxRetransmits = getString(R.string.pref_max_retransmits_key);
+ keyprefDataProtocol = getString(R.string.pref_data_protocol_key);
+ keyprefNegotiated = getString(R.string.pref_negotiated_key);
+ keyprefDataId = getString(R.string.pref_data_id_key);
+
+ keyPrefRoomServerUrl = getString(R.string.pref_room_server_url_key);
+ keyPrefDisplayHud = getString(R.string.pref_displayhud_key);
+ keyPrefTracing = getString(R.string.pref_tracing_key);
+
+ // Display the fragment as the main content.
+ settingsFragment = new SettingsFragment();
+ getFragmentManager()
+ .beginTransaction()
+ .replace(android.R.id.content, settingsFragment)
+ .commit();
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+ // Set summary to be the user-description for the selected value
+ SharedPreferences sharedPreferences =
+ settingsFragment.getPreferenceScreen().getSharedPreferences();
+ sharedPreferences.registerOnSharedPreferenceChangeListener(this);
+ updateSummaryB(sharedPreferences, keyprefVideoCall);
+ updateSummaryB(sharedPreferences, keyprefScreencapture);
+ updateSummaryB(sharedPreferences, keyprefCamera2);
+ updateSummary(sharedPreferences, keyprefResolution);
+ updateSummary(sharedPreferences, keyprefFps);
+ updateSummaryB(sharedPreferences, keyprefCaptureQualitySlider);
+ updateSummary(sharedPreferences, keyprefMaxVideoBitrateType);
+ updateSummaryBitrate(sharedPreferences, keyprefMaxVideoBitrateValue);
+ setVideoBitrateEnable(sharedPreferences);
+ updateSummary(sharedPreferences, keyPrefVideoCodec);
+ updateSummaryB(sharedPreferences, keyprefHwCodec);
+ updateSummaryB(sharedPreferences, keyprefCaptureToTexture);
+ updateSummaryB(sharedPreferences, keyprefFlexfec);
+
+ updateSummary(sharedPreferences, keyprefStartAudioBitrateType);
+ updateSummaryBitrate(sharedPreferences, keyprefStartAudioBitrateValue);
+ setAudioBitrateEnable(sharedPreferences);
+ updateSummary(sharedPreferences, keyPrefAudioCodec);
+ updateSummaryB(sharedPreferences, keyprefNoAudioProcessing);
+ updateSummaryB(sharedPreferences, keyprefAecDump);
+ updateSummaryB(sharedPreferences, keyprefOpenSLES);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInAEC);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInAGC);
+ updateSummaryB(sharedPreferences, keyprefDisableBuiltInNS);
+ updateSummaryB(sharedPreferences, keyprefEnableLevelControl);
+ updateSummaryList(sharedPreferences, keyprefSpeakerphone);
+
+ updateSummaryB(sharedPreferences, keyprefEnableDataChannel);
+ updateSummaryB(sharedPreferences, keyprefOrdered);
+ updateSummary(sharedPreferences, keyprefMaxRetransmitTimeMs);
+ updateSummary(sharedPreferences, keyprefMaxRetransmits);
+ updateSummary(sharedPreferences, keyprefDataProtocol);
+ updateSummaryB(sharedPreferences, keyprefNegotiated);
+ updateSummary(sharedPreferences, keyprefDataId);
+ setDataChannelEnable(sharedPreferences);
+
+ updateSummary(sharedPreferences, keyPrefRoomServerUrl);
+ updateSummaryB(sharedPreferences, keyPrefDisplayHud);
+ updateSummaryB(sharedPreferences, keyPrefTracing);
+
+ if (!Camera2Enumerator.isSupported(this)) {
+ Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
+
+ camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
+ camera2Preference.setEnabled(false);
+ }
+
+ // Disable forcing WebRTC based AEC so it won't affect our value.
+ // Otherwise, if it was enabled, isAcousticEchoCancelerSupported would always return false.
+ WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
+ if (!WebRtcAudioUtils.isAcousticEchoCancelerSupported()) {
+ Preference disableBuiltInAECPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInAEC);
+
+ disableBuiltInAECPreference.setSummary(getString(R.string.pref_built_in_aec_not_available));
+ disableBuiltInAECPreference.setEnabled(false);
+ }
+
+ WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(false);
+ if (!WebRtcAudioUtils.isAutomaticGainControlSupported()) {
+ Preference disableBuiltInAGCPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInAGC);
+
+ disableBuiltInAGCPreference.setSummary(getString(R.string.pref_built_in_agc_not_available));
+ disableBuiltInAGCPreference.setEnabled(false);
+ }
+
+ WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
+ if (!WebRtcAudioUtils.isNoiseSuppressorSupported()) {
+ Preference disableBuiltInNSPreference =
+ settingsFragment.findPreference(keyprefDisableBuiltInNS);
+
+ disableBuiltInNSPreference.setSummary(getString(R.string.pref_built_in_ns_not_available));
+ disableBuiltInNSPreference.setEnabled(false);
+ }
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ SharedPreferences sharedPreferences =
+ settingsFragment.getPreferenceScreen().getSharedPreferences();
+ sharedPreferences.unregisterOnSharedPreferenceChangeListener(this);
+ }
+
+ @Override
+ public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
+ // clang-format off
+ if (key.equals(keyprefResolution)
+ || key.equals(keyprefFps)
+ || key.equals(keyprefMaxVideoBitrateType)
+ || key.equals(keyPrefVideoCodec)
+ || key.equals(keyprefStartAudioBitrateType)
+ || key.equals(keyPrefAudioCodec)
+ || key.equals(keyPrefRoomServerUrl)
+ || key.equals(keyprefMaxRetransmitTimeMs)
+ || key.equals(keyprefMaxRetransmits)
+ || key.equals(keyprefDataProtocol)
+ || key.equals(keyprefDataId)) {
+ updateSummary(sharedPreferences, key);
+ } else if (key.equals(keyprefMaxVideoBitrateValue)
+ || key.equals(keyprefStartAudioBitrateValue)) {
+ updateSummaryBitrate(sharedPreferences, key);
+ } else if (key.equals(keyprefVideoCall)
+ || key.equals(keyprefScreencapture)
+ || key.equals(keyprefCamera2)
+ || key.equals(keyPrefTracing)
+ || key.equals(keyprefCaptureQualitySlider)
+ || key.equals(keyprefHwCodec)
+ || key.equals(keyprefCaptureToTexture)
+ || key.equals(keyprefFlexfec)
+ || key.equals(keyprefNoAudioProcessing)
+ || key.equals(keyprefAecDump)
+ || key.equals(keyprefOpenSLES)
+ || key.equals(keyprefDisableBuiltInAEC)
+ || key.equals(keyprefDisableBuiltInAGC)
+ || key.equals(keyprefDisableBuiltInNS)
+ || key.equals(keyprefEnableLevelControl)
+ || key.equals(keyPrefDisplayHud)
+ || key.equals(keyprefEnableDataChannel)
+ || key.equals(keyprefOrdered)
+ || key.equals(keyprefNegotiated)) {
+ updateSummaryB(sharedPreferences, key);
+ } else if (key.equals(keyprefSpeakerphone)) {
+ updateSummaryList(sharedPreferences, key);
+ }
+ // clang-format on
+ if (key.equals(keyprefMaxVideoBitrateType)) {
+ setVideoBitrateEnable(sharedPreferences);
+ }
+ if (key.equals(keyprefStartAudioBitrateType)) {
+ setAudioBitrateEnable(sharedPreferences);
+ }
+ if (key.equals(keyprefEnableDataChannel)) {
+ setDataChannelEnable(sharedPreferences);
+ }
+ }
+
+ private void updateSummary(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ // Set summary to be the user-description for the selected value
+ updatedPref.setSummary(sharedPreferences.getString(key, ""));
+ }
+
+ private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
+ }
+
+ private void updateSummaryB(SharedPreferences sharedPreferences, String key) {
+ Preference updatedPref = settingsFragment.findPreference(key);
+ updatedPref.setSummary(sharedPreferences.getBoolean(key, true)
+ ? getString(R.string.pref_value_enabled)
+ : getString(R.string.pref_value_disabled));
+ }
+
+ private void updateSummaryList(SharedPreferences sharedPreferences, String key) {
+ ListPreference updatedPref = (ListPreference) settingsFragment.findPreference(key);
+ updatedPref.setSummary(updatedPref.getEntry());
+ }
+
+ private void setVideoBitrateEnable(SharedPreferences sharedPreferences) {
+ Preference bitratePreferenceValue =
+ settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
+ String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
+ String bitrateType =
+ sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
+ if (bitrateType.equals(bitrateTypeDefault)) {
+ bitratePreferenceValue.setEnabled(false);
+ } else {
+ bitratePreferenceValue.setEnabled(true);
+ }
+ }
+
+ private void setAudioBitrateEnable(SharedPreferences sharedPreferences) {
+ Preference bitratePreferenceValue =
+ settingsFragment.findPreference(keyprefStartAudioBitrateValue);
+ String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
+ String bitrateType =
+ sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
+ if (bitrateType.equals(bitrateTypeDefault)) {
+ bitratePreferenceValue.setEnabled(false);
+ } else {
+ bitratePreferenceValue.setEnabled(true);
+ }
+ }
+
+ private void setDataChannelEnable(SharedPreferences sharedPreferences) {
+ boolean enabled = sharedPreferences.getBoolean(keyprefEnableDataChannel, true);
+ settingsFragment.findPreference(keyprefOrdered).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefMaxRetransmitTimeMs).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefMaxRetransmits).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefDataProtocol).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefNegotiated).setEnabled(enabled);
+ settingsFragment.findPreference(keyprefDataId).setEnabled(enabled);
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/SettingsFragment.java b/app/src/main/java/com/myhexaville/androidwebrtc/SettingsFragment.java
new file mode 100644
index 0000000..b773399
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/SettingsFragment.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.os.Bundle;
+import android.preference.PreferenceFragment;
+
+/**
+ * Settings fragment for AppRTC.
+ */
+public class SettingsFragment extends PreferenceFragment {
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ // Load the preferences from an XML resource
+ addPreferencesFromResource(R.xml.preferences);
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/TCPChannelClient.java b/app/src/main/java/com/myhexaville/androidwebrtc/TCPChannelClient.java
new file mode 100644
index 0000000..ec70700
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/TCPChannelClient.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.util.Log;
+
+import org.webrtc.ThreadUtils;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.util.concurrent.ExecutorService;
+
+/**
+ * Replacement for WebSocketChannelClient for direct communication between two IP addresses. Handles
+ * the signaling between the two clients using a TCP connection.
+ *
+ * All public methods should be called from a looper executor thread
+ * passed in a constructor, otherwise exception will be thrown.
+ * All events are dispatched on the same thread.
+ */
+public class TCPChannelClient {
+ private static final String TAG = "TCPChannelClient";
+
+ private final ExecutorService executor;
+ private final ThreadUtils.ThreadChecker executorThreadCheck;
+ private final TCPChannelEvents eventListener;
+ private TCPSocket socket;
+
+ /**
+ * Callback interface for messages delivered on TCP Connection. All callbacks are invoked from the
+ * looper executor thread.
+ */
+ public interface TCPChannelEvents {
+ void onTCPConnected(boolean server);
+ void onTCPMessage(String message);
+ void onTCPError(String description);
+ void onTCPClose();
+ }
+
+ /**
+ * Initializes the TCPChannelClient. If IP is a local IP address, starts a listening server on
+ * that IP. If not, instead connects to the IP.
+ *
+ * @param eventListener Listener that will receive events from the client.
+ * @param ip IP address to listen on or connect to.
+ * @param port Port to listen on or connect to.
+ */
+ public TCPChannelClient(
+ ExecutorService executor, TCPChannelEvents eventListener, String ip, int port) {
+ this.executor = executor;
+ executorThreadCheck = new ThreadUtils.ThreadChecker();
+ executorThreadCheck.detachThread();
+ this.eventListener = eventListener;
+
+ InetAddress address;
+ try {
+ address = InetAddress.getByName(ip);
+ } catch (UnknownHostException e) {
+ reportError("Invalid IP address.");
+ return;
+ }
+
+ if (address.isAnyLocalAddress()) {
+ socket = new TCPSocketServer(address, port);
+ } else {
+ socket = new TCPSocketClient(address, port);
+ }
+
+ socket.start();
+ }
+
+ /**
+ * Disconnects the client if not already disconnected. This will fire the onTCPClose event.
+ */
+ public void disconnect() {
+ executorThreadCheck.checkIsOnValidThread();
+
+ socket.disconnect();
+ }
+
+ /**
+ * Sends a message on the socket.
+ *
+ * @param message Message to be sent.
+ */
+ public void send(String message) {
+ executorThreadCheck.checkIsOnValidThread();
+
+ socket.send(message);
+ }
+
+ /**
+ * Helper method for firing onTCPError events. Calls onTCPError on the executor thread.
+ */
+ private void reportError(final String message) {
+ Log.e(TAG, "TCP Error: " + message);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onTCPError(message);
+ }
+ });
+ }
+
+ /**
+ * Base class for server and client sockets. Contains a listening thread that will call
+ * eventListener.onTCPMessage on new messages.
+ */
+ private abstract class TCPSocket extends Thread {
+ // Lock for editing out and rawSocket
+ protected final Object rawSocketLock;
+ private PrintWriter out;
+ private Socket rawSocket;
+
+ /**
+ * Connect to the peer, potentially a slow operation.
+ *
+ * @return Socket connection, null if connection failed.
+ */
+ public abstract Socket connect();
+ /** Returns true if sockets is a server rawSocket. */
+ public abstract boolean isServer();
+
+ TCPSocket() {
+ rawSocketLock = new Object();
+ }
+
+ /**
+ * The listening thread.
+ */
+ @Override
+ public void run() {
+ Log.d(TAG, "Listening thread started...");
+
+ // Receive connection to temporary variable first, so we don't block.
+ Socket tempSocket = connect();
+ BufferedReader in;
+
+ Log.d(TAG, "TCP connection established.");
+
+ synchronized (rawSocketLock) {
+ if (rawSocket != null) {
+ Log.e(TAG, "Socket already existed and will be replaced.");
+ }
+
+ rawSocket = tempSocket;
+
+ // Connecting failed, error has already been reported, just exit.
+ if (rawSocket == null) {
+ return;
+ }
+
+ try {
+ out = new PrintWriter(rawSocket.getOutputStream(), true);
+ in = new BufferedReader(new InputStreamReader(rawSocket.getInputStream()));
+ } catch (IOException e) {
+ reportError("Failed to open IO on rawSocket: " + e.getMessage());
+ return;
+ }
+ }
+
+ Log.v(TAG, "Execute onTCPConnected");
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ Log.v(TAG, "Run onTCPConnected");
+ eventListener.onTCPConnected(isServer());
+ }
+ });
+
+ while (true) {
+ final String message;
+ try {
+ message = in.readLine();
+ } catch (IOException e) {
+ synchronized (rawSocketLock) {
+ // If socket was closed, this is expected.
+ if (rawSocket == null) {
+ break;
+ }
+ }
+
+ reportError("Failed to read from rawSocket: " + e.getMessage());
+ break;
+ }
+
+ // No data received, rawSocket probably closed.
+ if (message == null) {
+ break;
+ }
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ Log.v(TAG, "Receive: " + message);
+ eventListener.onTCPMessage(message);
+ }
+ });
+ }
+
+ Log.d(TAG, "Receiving thread exiting...");
+
+ // Close the rawSocket if it is still open.
+ disconnect();
+ }
+
+ /**
+ * Closes the rawSocket if it is still open. Also fires the onTCPClose event.
+ */
+ public void disconnect() {
+ try {
+ synchronized (rawSocketLock) {
+ if (rawSocket != null) {
+ rawSocket.close();
+ rawSocket = null;
+ out = null;
+
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onTCPClose();
+ }
+ });
+ }
+ }
+ } catch (IOException e) {
+ reportError("Failed to close rawSocket: " + e.getMessage());
+ }
+ }
+
+ /**
+ * Sends a message on the socket. Should only be called on the executor thread.
+ */
+ public void send(String message) {
+ Log.v(TAG, "Send: " + message);
+
+ synchronized (rawSocketLock) {
+ if (out == null) {
+ reportError("Sending data on closed socket.");
+ return;
+ }
+
+ out.write(message + "\n");
+ out.flush();
+ }
+ }
+ }
+
+ private class TCPSocketServer extends TCPSocket {
+ // Server socket is also guarded by rawSocketLock.
+ private ServerSocket serverSocket;
+
+ final private InetAddress address;
+ final private int port;
+
+ public TCPSocketServer(InetAddress address, int port) {
+ this.address = address;
+ this.port = port;
+ }
+
+ /** Opens a listening socket and waits for a connection. */
+ @Override
+ public Socket connect() {
+ Log.d(TAG, "Listening on [" + address.getHostAddress() + "]:" + Integer.toString(port));
+
+ final ServerSocket tempSocket;
+ try {
+ tempSocket = new ServerSocket(port, 0, address);
+ } catch (IOException e) {
+ reportError("Failed to create server socket: " + e.getMessage());
+ return null;
+ }
+
+ synchronized (rawSocketLock) {
+ if (serverSocket != null) {
+ Log.e(TAG, "Server rawSocket was already listening and new will be opened.");
+ }
+
+ serverSocket = tempSocket;
+ }
+
+ try {
+ return tempSocket.accept();
+ } catch (IOException e) {
+ reportError("Failed to receive connection: " + e.getMessage());
+ return null;
+ }
+ }
+
+ /** Closes the listening socket and calls super. */
+ @Override
+ public void disconnect() {
+ try {
+ synchronized (rawSocketLock) {
+ if (serverSocket != null) {
+ serverSocket.close();
+ serverSocket = null;
+ }
+ }
+ } catch (IOException e) {
+ reportError("Failed to close server socket: " + e.getMessage());
+ }
+
+ super.disconnect();
+ }
+
+ @Override
+ public boolean isServer() {
+ return true;
+ }
+ }
+
+ private class TCPSocketClient extends TCPSocket {
+ final private InetAddress address;
+ final private int port;
+
+ public TCPSocketClient(InetAddress address, int port) {
+ this.address = address;
+ this.port = port;
+ }
+
+ /** Connects to the peer. */
+ @Override
+ public Socket connect() {
+ Log.d(TAG, "Connecting to [" + address.getHostAddress() + "]:" + Integer.toString(port));
+
+ try {
+ return new Socket(address, port);
+ } catch (IOException e) {
+ reportError("Failed to connect: " + e.getMessage());
+ return null;
+ }
+ }
+
+ @Override
+ public boolean isServer() {
+ return false;
+ }
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/UnhandledExceptionHandler.java b/app/src/main/java/com/myhexaville/androidwebrtc/UnhandledExceptionHandler.java
new file mode 100644
index 0000000..3102217
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/UnhandledExceptionHandler.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.DialogInterface;
+import android.util.Log;
+import android.util.TypedValue;
+import android.widget.ScrollView;
+import android.widget.TextView;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+/**
+ * Singleton helper: install a default unhandled exception handler which shows
+ * an informative dialog and kills the app. Useful for apps whose
+ * error-handling consists of throwing RuntimeExceptions.
+ * NOTE: almost always more useful to
+ * Thread.setDefaultUncaughtExceptionHandler() rather than
+ * Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
+ */
+public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
+ private static final String TAG = "AppRTCMobileActivity";
+ private final Activity activity;
+
+ public UnhandledExceptionHandler(final Activity activity) {
+ this.activity = activity;
+ }
+
+ public void uncaughtException(Thread unusedThread, final Throwable e) {
+ activity.runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ String title = "Fatal error: " + getTopLevelCauseMessage(e);
+ String msg = getRecursiveStackTrace(e);
+ TextView errorView = new TextView(activity);
+ errorView.setText(msg);
+ errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
+ ScrollView scrollingContainer = new ScrollView(activity);
+ scrollingContainer.addView(errorView);
+ Log.e(TAG, title + "\n\n" + msg);
+ DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(DialogInterface dialog, int which) {
+ dialog.dismiss();
+ System.exit(1);
+ }
+ };
+ AlertDialog.Builder builder = new AlertDialog.Builder(activity);
+ builder.setTitle(title)
+ .setView(scrollingContainer)
+ .setPositiveButton("Exit", listener)
+ .show();
+ }
+ });
+ }
+
+ // Returns the Message attached to the original Cause of |t|.
+ private static String getTopLevelCauseMessage(Throwable t) {
+ Throwable topLevelCause = t;
+ while (topLevelCause.getCause() != null) {
+ topLevelCause = topLevelCause.getCause();
+ }
+ return topLevelCause.getMessage();
+ }
+
+ // Returns a human-readable String of the stacktrace in |t|, recursively
+ // through all Causes that led to |t|.
+ private static String getRecursiveStackTrace(Throwable t) {
+ StringWriter writer = new StringWriter();
+ t.printStackTrace(new PrintWriter(writer));
+ return writer.toString();
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/WebSocketChannelClient.java b/app/src/main/java/com/myhexaville/androidwebrtc/WebSocketChannelClient.java
new file mode 100644
index 0000000..69aecd1
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/WebSocketChannelClient.java
@@ -0,0 +1,297 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+
+import android.os.Handler;
+import android.util.Log;
+
+import com.myhexaville.androidwebrtc.util.AsyncHttpURLConnection;
+import com.myhexaville.androidwebrtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+
+import de.tavendo.autobahn.WebSocket.WebSocketConnectionObserver;
+import de.tavendo.autobahn.WebSocketConnection;
+import de.tavendo.autobahn.WebSocketException;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.LinkedList;
+
+/**
+ * WebSocket client implementation.
+ *
+ *
All public methods should be called from a looper executor thread
+ * passed in a constructor, otherwise exception will be thrown.
+ * All events are dispatched on the same thread.
+ */
+
+public class WebSocketChannelClient {
+ private static final String TAG = "WSChannelRTCClient";
+ private static final int CLOSE_TIMEOUT = 1000;
+ private final WebSocketChannelEvents events;
+ private final Handler handler;
+ private WebSocketConnection ws;
+ private WebSocketObserver wsObserver;
+ private String wsServerUrl;
+ private String postServerUrl;
+ private String roomID;
+ private String clientID;
+ private WebSocketConnectionState state;
+ private final Object closeEventLock = new Object();
+ private boolean closeEvent;
+ // WebSocket send queue. Messages are added to the queue when WebSocket
+ // client is not registered and are consumed in register() call.
+ private final LinkedList wsSendQueue;
+
+ /**
+ * Possible WebSocket connection states.
+ */
+ public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
+
+ /**
+ * Callback interface for messages delivered on WebSocket.
+ * All events are dispatched from a looper executor thread.
+ */
+ public interface WebSocketChannelEvents {
+ void onWebSocketMessage(final String message);
+ void onWebSocketClose();
+ void onWebSocketError(final String description);
+ }
+
+ public WebSocketChannelClient(Handler handler, WebSocketChannelEvents events) {
+ this.handler = handler;
+ this.events = events;
+ roomID = null;
+ clientID = null;
+ wsSendQueue = new LinkedList();
+ state = WebSocketConnectionState.NEW;
+ }
+
+ public WebSocketConnectionState getState() {
+ return state;
+ }
+
+ public void connect(final String wsUrl, final String postUrl) {
+ checkIfCalledOnValidThread();
+ if (state != WebSocketConnectionState.NEW) {
+ Log.e(TAG, "WebSocket is already connected.");
+ return;
+ }
+ wsServerUrl = wsUrl;
+ postServerUrl = postUrl;
+ closeEvent = false;
+
+ Log.d(TAG, "Connecting WebSocket to: " + wsUrl + ". Post URL: " + postUrl);
+ ws = new WebSocketConnection();
+ wsObserver = new WebSocketObserver();
+ try {
+ ws.connect(new URI(wsServerUrl), wsObserver);
+ } catch (URISyntaxException e) {
+ reportError("URI error: " + e.getMessage());
+ } catch (WebSocketException e) {
+ reportError("WebSocket connection error: " + e.getMessage());
+ }
+ }
+
+ public void register(final String roomID, final String clientID) {
+ checkIfCalledOnValidThread();
+ this.roomID = roomID;
+ this.clientID = clientID;
+ if (state != WebSocketConnectionState.CONNECTED) {
+ Log.w(TAG, "WebSocket register() in state " + state);
+ return;
+ }
+ Log.d(TAG, "Registering WebSocket for room " + roomID + ". ClientID: " + clientID);
+ JSONObject json = new JSONObject();
+ try {
+ json.put("cmd", "register");
+ json.put("roomid", roomID);
+ json.put("clientid", clientID);
+ Log.d(TAG, "C->WSS: " + json.toString());
+ ws.sendTextMessage(json.toString());
+ state = WebSocketConnectionState.REGISTERED;
+ // Send any previously accumulated messages.
+ for (String sendMessage : wsSendQueue) {
+ send(sendMessage);
+ }
+ wsSendQueue.clear();
+ } catch (JSONException e) {
+ reportError("WebSocket register JSON error: " + e.getMessage());
+ }
+ }
+
+ public void send(String message) {
+ checkIfCalledOnValidThread();
+ switch (state) {
+ case NEW:
+ case CONNECTED:
+ // Store outgoing messages and send them after websocket client
+ // is registered.
+ Log.d(TAG, "WS ACC: " + message);
+ wsSendQueue.add(message);
+ return;
+ case ERROR:
+ case CLOSED:
+ Log.e(TAG, "WebSocket send() in error or closed state : " + message);
+ return;
+ case REGISTERED:
+ JSONObject json = new JSONObject();
+ try {
+ json.put("cmd", "send");
+ json.put("msg", message);
+ message = json.toString();
+ Log.d(TAG, "C->WSS: " + message);
+ ws.sendTextMessage(message);
+ } catch (JSONException e) {
+ reportError("WebSocket send JSON error: " + e.getMessage());
+ }
+ break;
+ }
+ }
+
+ // This call can be used to send WebSocket messages before WebSocket
+ // connection is opened.
+ public void post(String message) {
+ checkIfCalledOnValidThread();
+ sendWSSMessage("POST", message);
+ }
+
+ public void disconnect(boolean waitForComplete) {
+ checkIfCalledOnValidThread();
+ Log.d(TAG, "Disconnect WebSocket. State: " + state);
+ if (state == WebSocketConnectionState.REGISTERED) {
+ // Send "bye" to WebSocket server.
+ send("{\"type\": \"bye\"}");
+ state = WebSocketConnectionState.CONNECTED;
+ // Send http DELETE to http WebSocket server.
+ sendWSSMessage("DELETE", "");
+ }
+ // Close WebSocket in CONNECTED or ERROR states only.
+ if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
+ ws.disconnect();
+ state = WebSocketConnectionState.CLOSED;
+
+ // Wait for websocket close event to prevent websocket library from
+ // sending any pending messages to deleted looper thread.
+ if (waitForComplete) {
+ synchronized (closeEventLock) {
+ while (!closeEvent) {
+ try {
+ closeEventLock.wait(CLOSE_TIMEOUT);
+ break;
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Wait error: " + e.toString());
+ }
+ }
+ }
+ }
+ }
+ Log.d(TAG, "Disconnecting WebSocket done.");
+ }
+
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state != WebSocketConnectionState.ERROR) {
+ state = WebSocketConnectionState.ERROR;
+ events.onWebSocketError(errorMessage);
+ }
+ }
+ });
+ }
+
+ // Asynchronously send POST/DELETE to WebSocket server.
+ private void sendWSSMessage(final String method, final String message) {
+ String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
+ Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
+ AsyncHttpURLConnection httpConnection =
+ new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ reportError("WS " + method + " error: " + errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {}
+ });
+ httpConnection.send();
+ }
+
+ // Helper method for debugging purposes. Ensures that WebSocket method is
+ // called on a looper thread.
+ private void checkIfCalledOnValidThread() {
+ if (Thread.currentThread() != handler.getLooper().getThread()) {
+ throw new IllegalStateException("WebSocket method is not called on valid thread");
+ }
+ }
+
+ private class WebSocketObserver implements WebSocketConnectionObserver {
+ @Override
+ public void onOpen() {
+ Log.d(TAG, "WebSocket connection opened to: " + wsServerUrl);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ state = WebSocketConnectionState.CONNECTED;
+ // Check if we have pending register request.
+ if (roomID != null && clientID != null) {
+ register(roomID, clientID);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onClose(WebSocketCloseNotification code, String reason) {
+ Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
+ + state);
+ synchronized (closeEventLock) {
+ closeEvent = true;
+ closeEventLock.notify();
+ }
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state != WebSocketConnectionState.CLOSED) {
+ state = WebSocketConnectionState.CLOSED;
+ events.onWebSocketClose();
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onTextMessage(String payload) {
+ Log.d(TAG, "WSS->C: " + payload);
+ final String message = payload;
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (state == WebSocketConnectionState.CONNECTED
+ || state == WebSocketConnectionState.REGISTERED) {
+ events.onWebSocketMessage(message);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRawTextMessage(byte[] payload) {}
+
+ @Override
+ public void onBinaryMessage(byte[] payload) {}
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/WebSocketRTCClient.java b/app/src/main/java/com/myhexaville/androidwebrtc/WebSocketRTCClient.java
new file mode 100644
index 0000000..e97150b
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/WebSocketRTCClient.java
@@ -0,0 +1,419 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc;
+
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Log;
+
+import com.myhexaville.androidwebrtc.RoomParametersFetcher.RoomParametersFetcherEvents;
+import com.myhexaville.androidwebrtc.WebSocketChannelClient.WebSocketChannelEvents;
+import com.myhexaville.androidwebrtc.WebSocketChannelClient.WebSocketConnectionState;
+import com.myhexaville.androidwebrtc.util.AsyncHttpURLConnection;
+import com.myhexaville.androidwebrtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.webrtc.IceCandidate;
+import org.webrtc.SessionDescription;
+
+/**
+ * Negotiates signaling for chatting with https://appr.tc "rooms".
+ * Uses the client<->server specifics of the apprtc AppEngine webapp.
+ *
+ * To use: create an instance of this object (registering a message handler) and
+ * call connectToRoom(). Once room connection is established
+ * onConnectedToRoom() callback with room parameters is invoked.
+ * Messages to other party (with local Ice candidates and answer SDP) can
+ * be sent after WebSocket connection is established.
+ */
+public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
+ private static final String TAG = "WSRTCClient";
+ private static final String ROOM_JOIN = "join";
+ private static final String ROOM_MESSAGE = "message";
+ private static final String ROOM_LEAVE = "leave";
+
+ private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
+
+ private enum MessageType { MESSAGE, LEAVE }
+
+ private final Handler handler;
+ private boolean initiator;
+ private SignalingEvents events;
+ private WebSocketChannelClient wsClient;
+ private ConnectionState roomState;
+ private RoomConnectionParameters connectionParameters;
+ private String messageUrl;
+ private String leaveUrl;
+
+ public WebSocketRTCClient(SignalingEvents events) {
+ this.events = events;
+ roomState = ConnectionState.NEW;
+ final HandlerThread handlerThread = new HandlerThread(TAG);
+ handlerThread.start();
+ handler = new Handler(handlerThread.getLooper());
+ }
+
+ // --------------------------------------------------------------------
+ // AppRTCClient interface implementation.
+ // Asynchronously connect to an AppRTC room URL using supplied connection
+ // parameters, retrieves room parameters and connect to WebSocket server.
+ @Override
+ public void connectToRoom(RoomConnectionParameters connectionParameters) {
+ this.connectionParameters = connectionParameters;
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ connectToRoomInternal();
+ }
+ });
+ }
+
+ @Override
+ public void disconnectFromRoom() {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ disconnectFromRoomInternal();
+ handler.getLooper().quit();
+ }
+ });
+ }
+
+ // Connects to room - function runs on a local looper thread.
+ private void connectToRoomInternal() {
+ String connectionUrl = getConnectionUrl(connectionParameters);
+ Log.d(TAG, "Connect to room: " + connectionUrl);
+ roomState = ConnectionState.NEW;
+ wsClient = new WebSocketChannelClient(handler, this);
+
+ RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
+ @Override
+ public void onSignalingParametersReady(final SignalingParameters params) {
+ WebSocketRTCClient.this.handler.post(new Runnable() {
+ @Override
+ public void run() {
+ WebSocketRTCClient.this.signalingParametersReady(params);
+ }
+ });
+ }
+
+ @Override
+ public void onSignalingParametersError(String description) {
+ WebSocketRTCClient.this.reportError(description);
+ }
+ };
+
+ new RoomParametersFetcher(connectionUrl, null, callbacks).makeRequest();
+ }
+
+ // Disconnect from room and send bye messages - runs on a local looper thread.
+ private void disconnectFromRoomInternal() {
+ Log.d(TAG, "Disconnect. Room state: " + roomState);
+ if (roomState == ConnectionState.CONNECTED) {
+ Log.d(TAG, "Closing room.");
+ sendPostMessage(MessageType.LEAVE, leaveUrl, null);
+ }
+ roomState = ConnectionState.CLOSED;
+ if (wsClient != null) {
+ wsClient.disconnect(true);
+ }
+ }
+
+ // Helper functions to get connection, post message and leave message URLs
+ private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId;
+ }
+
+ private String getMessageUrl(
+ RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
+ + "/" + signalingParameters.clientId;
+ }
+
+ private String getLeaveUrl(
+ RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
+ return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
+ + signalingParameters.clientId;
+ }
+
+ // Callback issued when room parameters are extracted. Runs on local
+ // looper thread.
+ private void signalingParametersReady(final SignalingParameters signalingParameters) {
+ Log.d(TAG, "Room connection completed.");
+ if (connectionParameters.loopback
+ && (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
+ reportError("Loopback room is busy.");
+ return;
+ }
+ if (!connectionParameters.loopback && !signalingParameters.initiator
+ && signalingParameters.offerSdp == null) {
+ Log.w(TAG, "No offer SDP in room response.");
+ }
+ initiator = signalingParameters.initiator;
+ messageUrl = getMessageUrl(connectionParameters, signalingParameters);
+ leaveUrl = getLeaveUrl(connectionParameters, signalingParameters);
+ Log.d(TAG, "Message URL: " + messageUrl);
+ Log.d(TAG, "Leave URL: " + leaveUrl);
+ roomState = ConnectionState.CONNECTED;
+
+ // Fire connection and signaling parameters events.
+ events.onConnectedToRoom(signalingParameters);
+
+ // Connect and register WebSocket client.
+ wsClient.connect(signalingParameters.wssUrl, signalingParameters.wssPostUrl);
+ wsClient.register(connectionParameters.roomId, signalingParameters.clientId);
+ }
+
+ // Send local offer SDP to the other participant.
+ @Override
+ public void sendOfferSdp(final SessionDescription sdp) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending offer SDP in non connected state.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "offer");
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ // In loopback mode rename this offer to answer and route it back.
+ SessionDescription sdpAnswer = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
+ events.onRemoteDescription(sdpAnswer);
+ }
+ }
+ });
+ }
+
+ // Send local answer SDP to the other participant.
+ @Override
+ public void sendAnswerSdp(final SessionDescription sdp) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (connectionParameters.loopback) {
+ Log.e(TAG, "Sending answer in loopback mode.");
+ return;
+ }
+ JSONObject json = new JSONObject();
+ jsonPut(json, "sdp", sdp.description);
+ jsonPut(json, "type", "answer");
+ wsClient.send(json.toString());
+ }
+ });
+ }
+
+ // Send Ice candidate to the other participant.
+ @Override
+ public void sendLocalIceCandidate(final IceCandidate candidate) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "candidate");
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ if (initiator) {
+ // Call initiator sends ice candidates to GAE server.
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate in non connected state.");
+ return;
+ }
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ events.onRemoteIceCandidate(candidate);
+ }
+ } else {
+ // Call receiver sends ice candidates to websocket server.
+ wsClient.send(json.toString());
+ }
+ }
+ });
+ }
+
+ // Send removed Ice candidates to the other participant.
+ @Override
+ public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "type", "remove-candidates");
+ JSONArray jsonArray = new JSONArray();
+ for (final IceCandidate candidate : candidates) {
+ jsonArray.put(toJsonCandidate(candidate));
+ }
+ jsonPut(json, "candidates", jsonArray);
+ if (initiator) {
+ // Call initiator sends ice candidates to GAE server.
+ if (roomState != ConnectionState.CONNECTED) {
+ reportError("Sending ICE candidate removals in non connected state.");
+ return;
+ }
+ sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
+ if (connectionParameters.loopback) {
+ events.onRemoteIceCandidatesRemoved(candidates);
+ }
+ } else {
+ // Call receiver sends ice candidates to websocket server.
+ wsClient.send(json.toString());
+ }
+ }
+ });
+ }
+
+ // --------------------------------------------------------------------
+ // WebSocketChannelEvents interface implementation.
+ // All events are called by WebSocketChannelClient on a local looper thread
+ // (passed to WebSocket client constructor).
+ @Override
+ public void onWebSocketMessage(final String msg) {
+ if (wsClient.getState() != WebSocketConnectionState.REGISTERED) {
+ Log.e(TAG, "Got WebSocket message in non registered state.");
+ return;
+ }
+ try {
+ JSONObject json = new JSONObject(msg);
+ String msgText = json.getString("msg");
+ String errorText = json.optString("error");
+ if (msgText.length() > 0) {
+ json = new JSONObject(msgText);
+ String type = json.optString("type");
+ if (type.equals("candidate")) {
+ events.onRemoteIceCandidate(toJavaCandidate(json));
+ } else if (type.equals("remove-candidates")) {
+ JSONArray candidateArray = json.getJSONArray("candidates");
+ IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
+ for (int i = 0; i < candidateArray.length(); ++i) {
+ candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
+ }
+ events.onRemoteIceCandidatesRemoved(candidates);
+ } else if (type.equals("answer")) {
+ if (initiator) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else {
+ reportError("Received answer for call initiator: " + msg);
+ }
+ } else if (type.equals("offer")) {
+ if (!initiator) {
+ SessionDescription sdp = new SessionDescription(
+ SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
+ events.onRemoteDescription(sdp);
+ } else {
+ reportError("Received offer for call receiver: " + msg);
+ }
+ } else if (type.equals("bye")) {
+ events.onChannelClose();
+ } else {
+ reportError("Unexpected WebSocket message: " + msg);
+ }
+ } else {
+ if (errorText != null && errorText.length() > 0) {
+ reportError("WebSocket error message: " + errorText);
+ } else {
+ reportError("Unexpected WebSocket message: " + msg);
+ }
+ }
+ } catch (JSONException e) {
+ reportError("WebSocket message JSON parsing error: " + e.toString());
+ }
+ }
+
+ @Override
+ public void onWebSocketClose() {
+ events.onChannelClose();
+ }
+
+ @Override
+ public void onWebSocketError(String description) {
+ reportError("WebSocket error: " + description);
+ }
+
+ // --------------------------------------------------------------------
+ // Helper functions.
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, errorMessage);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (roomState != ConnectionState.ERROR) {
+ roomState = ConnectionState.ERROR;
+ events.onChannelError(errorMessage);
+ }
+ }
+ });
+ }
+
+ // Put a |key|->|value| mapping in |json|.
+ private static void jsonPut(JSONObject json, String key, Object value) {
+ try {
+ json.put(key, value);
+ } catch (JSONException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ // Send SDP or ICE candidate to a room server.
+ private void sendPostMessage(
+ final MessageType messageType, final String url, final String message) {
+ String logInfo = url;
+ if (message != null) {
+ logInfo += ". Message: " + message;
+ }
+ Log.d(TAG, "C->GAE: " + logInfo);
+ AsyncHttpURLConnection httpConnection =
+ new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
+ @Override
+ public void onHttpError(String errorMessage) {
+ reportError("GAE POST error: " + errorMessage);
+ }
+
+ @Override
+ public void onHttpComplete(String response) {
+ if (messageType == MessageType.MESSAGE) {
+ try {
+ JSONObject roomJson = new JSONObject(response);
+ String result = roomJson.getString("result");
+ if (!result.equals("SUCCESS")) {
+ reportError("GAE POST error: " + result);
+ }
+ } catch (JSONException e) {
+ reportError("GAE POST JSON error: " + e.toString());
+ }
+ }
+ }
+ });
+ httpConnection.send();
+ }
+
+ // Converts a Java candidate to a JSONObject.
+ private JSONObject toJsonCandidate(final IceCandidate candidate) {
+ JSONObject json = new JSONObject();
+ jsonPut(json, "label", candidate.sdpMLineIndex);
+ jsonPut(json, "id", candidate.sdpMid);
+ jsonPut(json, "candidate", candidate.sdp);
+ return json;
+ }
+
+ // Converts a JSON candidate to a Java object.
+ IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
+ return new IceCandidate(
+ json.getString("id"), json.getInt("label"), json.getString("candidate"));
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/util/AppRTCUtils.java b/app/src/main/java/com/myhexaville/androidwebrtc/util/AppRTCUtils.java
new file mode 100644
index 0000000..4bc4bd7
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/util/AppRTCUtils.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc.util;
+
+import android.os.Build;
+import android.util.Log;
+
+/**
+ * AppRTCUtils provides helper functions for managing thread safety.
+ */
+public final class AppRTCUtils {
+ private AppRTCUtils() {}
+
+ /** Helper method which throws an exception when an assertion has failed. */
+ public static void assertIsTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ /** Helper method for building a string of thread information.*/
+ public static String getThreadInfo() {
+ return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ + "]";
+ }
+
+ /** Information about the current build, taken from system properties. */
+ public static void logDeviceInfo(String tag) {
+ Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
+ + "Release: " + Build.VERSION.RELEASE + ", "
+ + "Brand: " + Build.BRAND + ", "
+ + "Device: " + Build.DEVICE + ", "
+ + "Id: " + Build.ID + ", "
+ + "Hardware: " + Build.HARDWARE + ", "
+ + "Manufacturer: " + Build.MANUFACTURER + ", "
+ + "Model: " + Build.MODEL + ", "
+ + "Product: " + Build.PRODUCT);
+ }
+}
diff --git a/app/src/main/java/com/myhexaville/androidwebrtc/util/AsyncHttpURLConnection.java b/app/src/main/java/com/myhexaville/androidwebrtc/util/AsyncHttpURLConnection.java
new file mode 100644
index 0000000..32ce559
--- /dev/null
+++ b/app/src/main/java/com/myhexaville/androidwebrtc/util/AsyncHttpURLConnection.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.myhexaville.androidwebrtc.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.SocketTimeoutException;
+import java.net.URL;
+import java.util.Scanner;
+
+/**
+ * Asynchronous http requests implementation.
+ */
+public class AsyncHttpURLConnection {
+ private static final int HTTP_TIMEOUT_MS = 8000;
+ private static final String HTTP_ORIGIN = "https://appr.tc";
+ private final String method;
+ private final String url;
+ private final String message;
+ private final AsyncHttpEvents events;
+ private String contentType;
+
+ /**
+ * Http requests callbacks.
+ */
+ public interface AsyncHttpEvents {
+ void onHttpError(String errorMessage);
+ void onHttpComplete(String response);
+ }
+
+ public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
+ this.method = method;
+ this.url = url;
+ this.message = message;
+ this.events = events;
+ }
+
+ public void setContentType(String contentType) {
+ this.contentType = contentType;
+ }
+
+ public void send() {
+ Runnable runHttp = new Runnable() {
+ public void run() {
+ sendHttpMessage();
+ }
+ };
+ new Thread(runHttp).start();
+ }
+
+ private void sendHttpMessage() {
+ try {
+ HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
+ byte[] postData = new byte[0];
+ if (message != null) {
+ postData = message.getBytes("UTF-8");
+ }
+ connection.setRequestMethod(method);
+ connection.setUseCaches(false);
+ connection.setDoInput(true);
+ connection.setConnectTimeout(HTTP_TIMEOUT_MS);
+ connection.setReadTimeout(HTTP_TIMEOUT_MS);
+ // TODO(glaznev) - query request origin from pref_room_server_url_key preferences.
+ connection.addRequestProperty("origin", HTTP_ORIGIN);
+ boolean doOutput = false;
+ if (method.equals("POST")) {
+ doOutput = true;
+ connection.setDoOutput(true);
+ connection.setFixedLengthStreamingMode(postData.length);
+ }
+ if (contentType == null) {
+ connection.setRequestProperty("Content-Type", "text/plain; charset=utf-8");
+ } else {
+ connection.setRequestProperty("Content-Type", contentType);
+ }
+
+ // Send POST request.
+ if (doOutput && postData.length > 0) {
+ OutputStream outStream = connection.getOutputStream();
+ outStream.write(postData);
+ outStream.close();
+ }
+
+ // Get response.
+ int responseCode = connection.getResponseCode();
+ if (responseCode != 200) {
+ events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
+ + connection.getHeaderField(null));
+ connection.disconnect();
+ return;
+ }
+ InputStream responseStream = connection.getInputStream();
+ String response = drainStream(responseStream);
+ responseStream.close();
+ connection.disconnect();
+ events.onHttpComplete(response);
+ } catch (SocketTimeoutException e) {
+ events.onHttpError("HTTP " + method + " to " + url + " timeout");
+ } catch (IOException e) {
+ events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
+ }
+ }
+
+ // Return the contents of an InputStream as a String.
+ private static String drainStream(InputStream in) {
+ Scanner s = new Scanner(in).useDelimiter("\\A");
+ return s.hasNext() ? s.next() : "";
+ }
+}
diff --git a/app/src/main/res/drawable-hdpi/ic_action_full_screen.png b/app/src/main/res/drawable-hdpi/ic_action_full_screen.png
new file mode 100644
index 0000000..22f30d3
Binary files /dev/null and b/app/src/main/res/drawable-hdpi/ic_action_full_screen.png differ
diff --git a/app/src/main/res/drawable-hdpi/ic_action_return_from_full_screen.png b/app/src/main/res/drawable-hdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000..d9436e5
Binary files /dev/null and b/app/src/main/res/drawable-hdpi/ic_action_return_from_full_screen.png differ
diff --git a/app/src/main/res/drawable-hdpi/ic_loopback_call.png b/app/src/main/res/drawable-hdpi/ic_loopback_call.png
new file mode 100644
index 0000000..3931185
Binary files /dev/null and b/app/src/main/res/drawable-hdpi/ic_loopback_call.png differ
diff --git a/app/src/main/res/drawable-ldpi/ic_action_full_screen.png b/app/src/main/res/drawable-ldpi/ic_action_full_screen.png
new file mode 100644
index 0000000..e4a9ff0
Binary files /dev/null and b/app/src/main/res/drawable-ldpi/ic_action_full_screen.png differ
diff --git a/app/src/main/res/drawable-ldpi/ic_action_return_from_full_screen.png b/app/src/main/res/drawable-ldpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000..f5c80f0
Binary files /dev/null and b/app/src/main/res/drawable-ldpi/ic_action_return_from_full_screen.png differ
diff --git a/app/src/main/res/drawable-ldpi/ic_loopback_call.png b/app/src/main/res/drawable-ldpi/ic_loopback_call.png
new file mode 100644
index 0000000..3931185
Binary files /dev/null and b/app/src/main/res/drawable-ldpi/ic_loopback_call.png differ
diff --git a/app/src/main/res/drawable-mdpi/ic_action_full_screen.png b/app/src/main/res/drawable-mdpi/ic_action_full_screen.png
new file mode 100644
index 0000000..e4a9ff0
Binary files /dev/null and b/app/src/main/res/drawable-mdpi/ic_action_full_screen.png differ
diff --git a/app/src/main/res/drawable-mdpi/ic_action_return_from_full_screen.png b/app/src/main/res/drawable-mdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000..f5c80f0
Binary files /dev/null and b/app/src/main/res/drawable-mdpi/ic_action_return_from_full_screen.png differ
diff --git a/app/src/main/res/drawable-mdpi/ic_loopback_call.png b/app/src/main/res/drawable-mdpi/ic_loopback_call.png
new file mode 100644
index 0000000..3931185
Binary files /dev/null and b/app/src/main/res/drawable-mdpi/ic_loopback_call.png differ
diff --git a/app/src/main/res/drawable-xhdpi/ic_action_full_screen.png b/app/src/main/res/drawable-xhdpi/ic_action_full_screen.png
new file mode 100644
index 0000000..6d90c07
Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/ic_action_full_screen.png differ
diff --git a/app/src/main/res/drawable-xhdpi/ic_action_return_from_full_screen.png b/app/src/main/res/drawable-xhdpi/ic_action_return_from_full_screen.png
new file mode 100644
index 0000000..a773b34
Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/ic_action_return_from_full_screen.png differ
diff --git a/app/src/main/res/drawable-xhdpi/ic_loopback_call.png b/app/src/main/res/drawable-xhdpi/ic_loopback_call.png
new file mode 100644
index 0000000..3931185
Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/ic_loopback_call.png differ
diff --git a/app/src/main/res/drawable/add.xml b/app/src/main/res/drawable/add.xml
new file mode 100644
index 0000000..28fac7d
--- /dev/null
+++ b/app/src/main/res/drawable/add.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/app/src/main/res/drawable/call.xml b/app/src/main/res/drawable/call.xml
new file mode 100644
index 0000000..6eb46f0
--- /dev/null
+++ b/app/src/main/res/drawable/call.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/app/src/main/res/drawable/disconnect.xml b/app/src/main/res/drawable/disconnect.xml
new file mode 100644
index 0000000..7831130
--- /dev/null
+++ b/app/src/main/res/drawable/disconnect.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/app/src/main/res/layout/activity_call.xml b/app/src/main/res/layout/activity_call.xml
new file mode 100644
index 0000000..07a3459
--- /dev/null
+++ b/app/src/main/res/layout/activity_call.xml
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/res/layout/activity_connect.xml b/app/src/main/res/layout/activity_connect.xml
new file mode 100644
index 0000000..486118e
--- /dev/null
+++ b/app/src/main/res/layout/activity_connect.xml
@@ -0,0 +1,77 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/app/src/main/res/layout/fragment_call.xml b/app/src/main/res/layout/fragment_call.xml
new file mode 100644
index 0000000..90b1e9c
--- /dev/null
+++ b/app/src/main/res/layout/fragment_call.xml
@@ -0,0 +1,77 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/res/layout/fragment_hud.xml b/app/src/main/res/layout/fragment_hud.xml
new file mode 100644
index 0000000..20d2654
--- /dev/null
+++ b/app/src/main/res/layout/fragment_hud.xml
@@ -0,0 +1,74 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/res/menu/connect_menu.xml b/app/src/main/res/menu/connect_menu.xml
new file mode 100644
index 0000000..a723f54
--- /dev/null
+++ b/app/src/main/res/menu/connect_menu.xml
@@ -0,0 +1,13 @@
+
diff --git a/app/src/main/res/mipmap-hdpi/ic_launcher.png b/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 0000000..cde69bc
Binary files /dev/null and b/app/src/main/res/mipmap-hdpi/ic_launcher.png differ
diff --git a/app/src/main/res/mipmap-mdpi/ic_launcher.png b/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 0000000..c133a0c
Binary files /dev/null and b/app/src/main/res/mipmap-mdpi/ic_launcher.png differ
diff --git a/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..bfa42f0
Binary files /dev/null and b/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ
diff --git a/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 0000000..324e72c
Binary files /dev/null and b/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ
diff --git a/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100644
index 0000000..aee44e1
Binary files /dev/null and b/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ
diff --git a/app/src/main/res/values/arrays.xml b/app/src/main/res/values/arrays.xml
new file mode 100644
index 0000000..c663400
--- /dev/null
+++ b/app/src/main/res/values/arrays.xml
@@ -0,0 +1,59 @@
+
+
+
+ - Default
+ - 4K (3840 x 2160)
+ - Full HD (1920 x 1080)
+ - HD (1280 x 720)
+ - VGA (640 x 480)
+ - QVGA (320 x 240)
+
+
+
+ - Default
+ - 3840 x 2160
+ - 1920 x 1080
+ - 1280 x 720
+ - 640 x 480
+ - 320 x 240
+
+
+
+ - Default
+ - 30 fps
+ - 15 fps
+
+
+
+ - Default
+ - Manual
+
+
+
+ - VP8
+ - VP9
+ - H264
+
+
+
+ - OPUS
+ - ISAC
+
+
+
+ - Auto (proximity sensor)
+ - Enabled
+ - Disabled
+
+
+
+ - auto
+ - true
+ - false
+
+
+
+ - Remove favorite
+
+
+
diff --git a/app/src/main/res/values/colors.xml b/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..3ab3e9c
--- /dev/null
+++ b/app/src/main/res/values/colors.xml
@@ -0,0 +1,6 @@
+
+
+ #3F51B5
+ #303F9F
+ #FF4081
+
diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..e8bbbd5
--- /dev/null
+++ b/app/src/main/res/values/strings.xml
@@ -0,0 +1,215 @@
+
+
+ AppRTC
+ AppRTC Settings
+ Disconnect Call
+
+ Please enter a room name. Room names are shared with everyone, so think
+ of something unique and send it to a friend.
+
+ Favorites
+ No favorites
+ Invalid URL
+ The URL or room name you entered resulted in an invalid URL: %1$s
+
+ Connection error
+ Connecting to: %1$s
+ FATAL ERROR: Missing URL to connect to.
+ Camera2 only supports capturing to texture. Either disable Camera2 or enable capturing to texture in the options.
+ OK
+ Switch front/back camera
+ Slide to change capture format
+ Muted
+ Toggle debug view
+ Toggle microphone on/off
+ Settings
+ Loopback connection
+ Connect to the room
+ Add favorite
+ %1$dx%2$d @ %3$d fps
+
+
+ room_preference
+ room_list_preference
+
+ video_settings_key
+ WebRTC video settings.
+
+ videocall_preference
+ Video call.
+ Enable video in a call.
+ true
+
+ screencapture_preference
+ Use screencapture.
+ false
+
+ camera2_preference
+ Use Camera2.
+ true
+ Not supported on this device.
+
+ resolution_preference
+ Video resolution.
+ Enter AppRTC local video resolution.
+ Default
+
+ fps_preference
+ Camera fps.
+ Enter local camera fps.
+ Default
+
+ capturequalityslider_preference
+ Capture quality slider.
+ Enable slider for changing capture quality.
+ false
+
+ maxvideobitrate_preference
+ Maximum video bitrate setting.
+ Maximum video bitrate setting.
+ Default
+
+ maxvideobitratevalue_preference
+ Video encoder maximum bitrate.
+ Enter video encoder maximum bitrate in kbps.
+ 1700
+
+ videocodec_preference
+ Default video codec.
+ Select default video codec.
+ VP8
+
+ hwcodec_preference
+ Video codec hardware acceleration.
+ Use hardware accelerated video codec (if available).
+ true
+
+ capturetotexture_preference
+ Video capture to surface texture.
+ Capture video to textures (if available).
+ true
+
+ flexfec_preference
+ Codec-agnostic Flexible FEC.
+ Enable FlexFEC.
+ false
+
+ Enabled
+ Disabled
+
+ audio_settings_key
+ WebRTC audio settings.
+
+ startaudiobitrate_preference
+ Audio bitrate setting.
+ Audio bitrate setting.
+ Default
+
+ startaudiobitratevalue_preference
+ Audio codec bitrate.
+ Enter audio codec bitrate in kbps.
+ 32
+
+ audiocodec_preference
+ Default audio codec.
+ Select default audio codec.
+ OPUS
+
+ audioprocessing_preference
+ Disable audio processing.
+ Disable audio processing pipeline.
+ false
+
+ aecdump_preference
+ Create aecdump.
+ Enable diagnostic audio recordings.
+ false
+
+ opensles_preference
+ Use OpenSL ES for audio playback.
+ Use OpenSL ES for audio playback.
+ false
+
+ disable_built_in_aec_preference
+ Disable hardware AEC.
+ Disable hardware AEC.
+ false
+ Hardware AEC is not available
+
+ disable_built_in_agc_preference
+ Disable hardware AGC.
+ Disable hardware AGC.
+ false
+ Hardware AGC is not available
+
+ disable_built_in_ns_preference
+ Disable hardware NS.
+ Disable hardware NS.
+ false
+ Hardware NS is not available
+
+ enable_level_control_preference
+ Enable level control.
+ false
+
+ speakerphone_preference
+ Speakerphone.
+ Speakerphone.
+ auto
+
+ data_settings_key
+ WebRTC data channel settings.
+
+ enable_datachannel_preference
+ Enable datachannel.
+ Enable datachannel.
+ true
+
+ ordered_preference
+ Order messages.
+ Order messages.
+ true
+
+ Subprotocol
+ Subprotocol.
+ Enter subprotocol.
+
+
+ negotiated_preference
+ Negotiated.
+ Negotiated.
+ false
+
+ max_retransmit_time_ms_preference
+ Max delay to retransmit.
+ Enter max delay to retransmit (in ms).
+ -1
+
+ max_retransmits_preference
+ Max attempts to retransmit.
+ Enter max attempts to retransmit.
+ -1
+
+ data_id_preference
+ Data id.
+ Enter data channel id.
+ -1
+
+ misc_settings_key
+ Miscellaneous settings.
+
+ room_server_url_preference
+ Room server URL.
+ Enter a room server URL.
+ https://appr.tc
+
+ displayhud_preference
+ Display call statistics.
+ Display call statistics.
+ false
+
+ tracing_preference
+ Debug performance tracing.
+ Debug performance tracing.
+ false
+
diff --git a/app/src/main/res/values/styles.xml b/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..5885930
--- /dev/null
+++ b/app/src/main/res/values/styles.xml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
diff --git a/app/src/main/res/xml/preferences.xml b/app/src/main/res/xml/preferences.xml
new file mode 100644
index 0000000..6753546
--- /dev/null
+++ b/app/src/main/res/xml/preferences.xml
@@ -0,0 +1,236 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/test/java/com/myhexaville/androidwebrtc/ExampleUnitTest.java b/app/src/test/java/com/myhexaville/androidwebrtc/ExampleUnitTest.java
new file mode 100644
index 0000000..c86618a
--- /dev/null
+++ b/app/src/test/java/com/myhexaville/androidwebrtc/ExampleUnitTest.java
@@ -0,0 +1,17 @@
+package com.myhexaville.androidwebrtc;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+/**
+ * Example local unit test, which will execute on the development machine (host).
+ *
+ * @see Testing documentation
+ */
+public class ExampleUnitTest {
+ @Test
+ public void addition_isCorrect() throws Exception {
+ assertEquals(4, 2 + 2);
+ }
+}
\ No newline at end of file
diff --git a/build.gradle b/build.gradle
new file mode 100644
index 0000000..0ad8f32
--- /dev/null
+++ b/build.gradle
@@ -0,0 +1,26 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+
+buildscript {
+ ext.kotlin_version = '1.0.6'
+ ext.support_version = '25.1.0'
+ repositories {
+ jcenter()
+ }
+ dependencies {
+ classpath 'com.android.tools.build:gradle:2.3.0'
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
+
+ // NOTE: Do not place your application dependencies here; they belong
+ // in the individual module build.gradle files
+ }
+}
+
+allprojects {
+ repositories {
+ jcenter()
+ }
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/gradle.properties b/gradle.properties
new file mode 100644
index 0000000..aac7c9b
--- /dev/null
+++ b/gradle.properties
@@ -0,0 +1,17 @@
+# Project-wide Gradle settings.
+
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+org.gradle.jvmargs=-Xmx1536m
+
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..53ca505
--- /dev/null
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Sun Mar 26 16:21:17 CEST 2017
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
diff --git a/settings.gradle b/settings.gradle
new file mode 100644
index 0000000..e7b4def
--- /dev/null
+++ b/settings.gradle
@@ -0,0 +1 @@
+include ':app'