diff --git a/.idea/misc.xml b/.idea/misc.xml index e0d5b93..b0c7b20 100644 --- a/.idea/misc.xml +++ b/.idea/misc.xml @@ -29,7 +29,7 @@ - + diff --git a/app/build.gradle b/app/build.gradle index 94c8552..6d3ec8a 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -3,6 +3,11 @@ apply plugin: 'com.android.application' android { compileSdkVersion 28 + compileOptions { + sourceCompatibility 1.8 + targetCompatibility 1.8 + } + defaultConfig { applicationId "org.communiquons.android.comunic.client" minSdkVersion 21 @@ -59,6 +64,8 @@ dependencies { implementation 'com.android.support:design:28.0.0-rc02' implementation 'com.android.support:preference-v7:28.0.0-rc02' implementation 'com.android.support:support-v4:28.0.0-rc02' + implementation 'com.squareup.okhttp3:okhttp:3.12.1' + implementation 'org.whispersystems:webrtc-android:M71' testImplementation 'junit:junit:4.12' androidTestImplementation 'com.android.support.test:runner:1.0.2' androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2' diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml index f35621a..ea1b827 100644 --- a/app/src/main/AndroidManifest.xml +++ b/app/src/main/AndroidManifest.xml @@ -7,6 +7,12 @@ + + + + + + iceServers; + public final boolean initiator; + public final String clientId; + public final String wssUrl; + public final String wssPostUrl; + public final SessionDescription offerSdp; + public final List iceCandidates; + + public SignalingParameters(List iceServers, boolean initiator, + String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp, + List iceCandidates) { + this.iceServers = iceServers; + this.initiator = initiator; + this.clientId = clientId; + this.wssUrl = wssUrl; + this.wssPostUrl = wssPostUrl; + this.offerSdp = offerSdp; + this.iceCandidates = iceCandidates; + } + } + + /** + * Callback interface for messages delivered on signaling channel. + * + *

Methods are guaranteed to be invoked on the UI thread of |activity|. + */ + interface SignalingEvents { + /** + * Callback fired once the room's signaling parameters + * SignalingParameters are extracted. + */ + void onConnectedToRoom(final SignalingParameters params); + + /** + * Callback fired once remote SDP is received. + */ + void onRemoteDescription(final SessionDescription sdp); + + /** + * Callback fired once remote Ice candidate is received. + */ + void onRemoteIceCandidate(final IceCandidate candidate); + + /** + * Callback fired once remote Ice candidate removals are received. + */ + void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates); + + /** + * Callback fired once channel is closed. + */ + void onChannelClose(); + + /** + * Callback fired once channel error happened. + */ + void onChannelError(final String description); + } +} \ No newline at end of file diff --git a/app/src/main/java/org/appspot/apprtc/PeerConnectionClient.java b/app/src/main/java/org/appspot/apprtc/PeerConnectionClient.java new file mode 100644 index 0000000..b17fc94 --- /dev/null +++ b/app/src/main/java/org/appspot/apprtc/PeerConnectionClient.java @@ -0,0 +1,1428 @@ +/* + * Copyright 2014 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.appspot.apprtc; + +import android.content.Context; +import android.os.Environment; +import android.os.ParcelFileDescriptor; +import android.support.annotation.Nullable; +import android.util.Log; + +import org.appspot.apprtc.AppRTCClient.SignalingParameters; +import org.webrtc.AudioSource; +import org.webrtc.AudioTrack; +import org.webrtc.CameraVideoCapturer; +import org.webrtc.DataChannel; +import org.webrtc.DefaultVideoDecoderFactory; +import org.webrtc.DefaultVideoEncoderFactory; +import org.webrtc.EglBase; +import org.webrtc.IceCandidate; +import org.webrtc.Logging; +import org.webrtc.MediaConstraints; +import org.webrtc.MediaStream; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnection; +import org.webrtc.PeerConnection.IceConnectionState; +import org.webrtc.PeerConnectionFactory; +import org.webrtc.RtpParameters; +import org.webrtc.RtpReceiver; +import org.webrtc.RtpSender; +import org.webrtc.RtpTransceiver; +import org.webrtc.SdpObserver; +import org.webrtc.SessionDescription; +import org.webrtc.SoftwareVideoDecoderFactory; +import org.webrtc.SoftwareVideoEncoderFactory; +import org.webrtc.StatsObserver; +import org.webrtc.StatsReport; +import org.webrtc.SurfaceTextureHelper; +import org.webrtc.VideoCapturer; +import org.webrtc.VideoDecoderFactory; +import org.webrtc.VideoEncoderFactory; +import org.webrtc.VideoSink; +import org.webrtc.VideoSource; +import org.webrtc.VideoTrack; +import org.webrtc.audio.AudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback; +import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; +import org.webrtc.audio.LegacyAudioDeviceModule; +import org.webrtc.voiceengine.WebRtcAudioManager; +import org.webrtc.voiceengine.WebRtcAudioRecord; +import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode; +import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback; +import org.webrtc.voiceengine.WebRtcAudioTrack; +import org.webrtc.voiceengine.WebRtcAudioTrack.AudioTrackStartErrorCode; +import org.webrtc.voiceengine.WebRtcAudioUtils; + +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Peer connection client implementation. + * + *

All public methods are routed to local looper thread. + * All PeerConnectionEvents callbacks are invoked from the same looper thread. + * This class is a singleton. + */ +public class PeerConnectionClient { + public static final String VIDEO_TRACK_ID = "ARDAMSv0"; + public static final String AUDIO_TRACK_ID = "ARDAMSa0"; + public static final String VIDEO_TRACK_TYPE = "video"; + private static final String TAG = "PCRTCClient"; + private static final String VIDEO_CODEC_VP8 = "VP8"; + private static final String VIDEO_CODEC_VP9 = "VP9"; + private static final String VIDEO_CODEC_H264 = "H264"; + private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline"; + private static final String VIDEO_CODEC_H264_HIGH = "H264 High"; + private static final String AUDIO_CODEC_OPUS = "opus"; + private static final String AUDIO_CODEC_ISAC = "ISAC"; + private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate"; + private static final String VIDEO_FLEXFEC_FIELDTRIAL = + "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/"; + private static final String VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL = "WebRTC-IntelVP8/Enabled/"; + private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL = + "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/"; + private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate"; + private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation"; + private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl"; + private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter"; + private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression"; + private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement"; + private static final int HD_VIDEO_WIDTH = 1280; + private static final int HD_VIDEO_HEIGHT = 720; + private static final int BPS_IN_KBPS = 1000; + private static final String RTCEVENTLOG_OUTPUT_DIR_NAME = "rtc_event_log"; + + // Executor thread is started once in private ctor and is used for all + // peer connection API calls to ensure new peer connection factory is + // created on the same thread as previously destroyed factory. + private static final ExecutorService executor = Executors.newSingleThreadExecutor(); + + private final PCObserver pcObserver = new PCObserver(); + private final SDPObserver sdpObserver = new SDPObserver(); + private final Timer statsTimer = new Timer(); + private final EglBase rootEglBase; + private final Context appContext; + private final PeerConnectionParameters peerConnectionParameters; + private final PeerConnectionEvents events; + + @Nullable + private PeerConnectionFactory factory; + @Nullable + private PeerConnection peerConnection; + @Nullable + private AudioSource audioSource; + @Nullable private SurfaceTextureHelper surfaceTextureHelper; + @Nullable private VideoSource videoSource; + private boolean preferIsac; + private boolean videoCapturerStopped; + private boolean isError; + @Nullable + private VideoSink localRender; + @Nullable private List remoteSinks; + private SignalingParameters signalingParameters; + private int videoWidth; + private int videoHeight; + private int videoFps; + private MediaConstraints audioConstraints; + private MediaConstraints sdpMediaConstraints; + // Queued remote ICE candidates are consumed only after both local and + // remote descriptions are set. Similarly local ICE candidates are sent to + // remote peer after both local and remote description are set. + @Nullable + private List queuedRemoteCandidates; + private boolean isInitiator; + @Nullable + private SessionDescription localSdp; // either offer or answer SDP + @Nullable + private VideoCapturer videoCapturer; + // enableVideo is set to true if video should be rendered and sent. + private boolean renderVideo = true; + @Nullable + private VideoTrack localVideoTrack; + @Nullable + private VideoTrack remoteVideoTrack; + @Nullable + private RtpSender localVideoSender; + // enableAudio is set to true if audio should be sent. + private boolean enableAudio = true; + @Nullable + private AudioTrack localAudioTrack; + @Nullable + private DataChannel dataChannel; + private final boolean dataChannelEnabled; + // Enable RtcEventLog. + @Nullable + private RtcEventLog rtcEventLog; + // Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes + // recorded audio samples to an output file. + @Nullable private RecordedAudioToFileController saveRecordedAudioToFile; + + /** + * Peer connection parameters. + */ + public static class DataChannelParameters { + public final boolean ordered; + public final int maxRetransmitTimeMs; + public final int maxRetransmits; + public final String protocol; + public final boolean negotiated; + public final int id; + + public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, + String protocol, boolean negotiated, int id) { + this.ordered = ordered; + this.maxRetransmitTimeMs = maxRetransmitTimeMs; + this.maxRetransmits = maxRetransmits; + this.protocol = protocol; + this.negotiated = negotiated; + this.id = id; + } + } + + /** + * Peer connection parameters. + */ + public static class PeerConnectionParameters { + public final boolean videoCallEnabled; + public final boolean loopback; + public final boolean tracing; + public final int videoWidth; + public final int videoHeight; + public final int videoFps; + public final int videoMaxBitrate; + public final String videoCodec; + public final boolean videoCodecHwAcceleration; + public final boolean videoFlexfecEnabled; + public final int audioStartBitrate; + public final String audioCodec; + public final boolean noAudioProcessing; + public final boolean aecDump; + public final boolean saveInputAudioToFile; + public final boolean useOpenSLES; + public final boolean disableBuiltInAEC; + public final boolean disableBuiltInAGC; + public final boolean disableBuiltInNS; + public final boolean disableWebRtcAGCAndHPF; + public final boolean enableRtcEventLog; + public final boolean useLegacyAudioDevice; + private final DataChannelParameters dataChannelParameters; + + public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing, + int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec, + boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate, + String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile, + boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC, + boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog, + boolean useLegacyAudioDevice, DataChannelParameters dataChannelParameters) { + this.videoCallEnabled = videoCallEnabled; + this.loopback = loopback; + this.tracing = tracing; + this.videoWidth = videoWidth; + this.videoHeight = videoHeight; + this.videoFps = videoFps; + this.videoMaxBitrate = videoMaxBitrate; + this.videoCodec = videoCodec; + this.videoFlexfecEnabled = videoFlexfecEnabled; + this.videoCodecHwAcceleration = videoCodecHwAcceleration; + this.audioStartBitrate = audioStartBitrate; + this.audioCodec = audioCodec; + this.noAudioProcessing = noAudioProcessing; + this.aecDump = aecDump; + this.saveInputAudioToFile = saveInputAudioToFile; + this.useOpenSLES = useOpenSLES; + this.disableBuiltInAEC = disableBuiltInAEC; + this.disableBuiltInAGC = disableBuiltInAGC; + this.disableBuiltInNS = disableBuiltInNS; + this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF; + this.enableRtcEventLog = enableRtcEventLog; + this.useLegacyAudioDevice = useLegacyAudioDevice; + this.dataChannelParameters = dataChannelParameters; + } + } + + /** + * Peer connection events. + */ + public interface PeerConnectionEvents { + /** + * Callback fired once local SDP is created and set. + */ + void onLocalDescription(final SessionDescription sdp); + + /** + * Callback fired once local Ice candidate is generated. + */ + void onIceCandidate(final IceCandidate candidate); + + /** + * Callback fired once local ICE candidates are removed. + */ + void onIceCandidatesRemoved(final IceCandidate[] candidates); + + /** + * Callback fired once connection is established (IceConnectionState is + * CONNECTED). + */ + void onIceConnected(); + + /** + * Callback fired once connection is disconnected (IceConnectionState is + * DISCONNECTED). + */ + void onIceDisconnected(); + + /** + * Callback fired once DTLS connection is established (PeerConnectionState + * is CONNECTED). + */ + void onConnected(); + + /** + * Callback fired once DTLS connection is disconnected (PeerConnectionState + * is DISCONNECTED). + */ + void onDisconnected(); + + /** + * Callback fired once peer connection is closed. + */ + void onPeerConnectionClosed(); + + /** + * Callback fired once peer connection statistics is ready. + */ + void onPeerConnectionStatsReady(final StatsReport[] reports); + + /** + * Callback fired once peer connection error happened. + */ + void onPeerConnectionError(final String description); + } + + /** + * Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes + * ownership of |eglBase|. + */ + public PeerConnectionClient(Context appContext, EglBase eglBase, + PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) { + this.rootEglBase = eglBase; + this.appContext = appContext; + this.events = events; + this.peerConnectionParameters = peerConnectionParameters; + this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null; + + Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters)); + + final String fieldTrials = getFieldTrials(peerConnectionParameters); + executor.execute(() -> { + Log.d(TAG, "Initialize WebRTC. Field trials: " + fieldTrials); + PeerConnectionFactory.initialize( + PeerConnectionFactory.InitializationOptions.builder(appContext) + .setFieldTrials(fieldTrials) + .setEnableInternalTracer(true) + .createInitializationOptions()); + }); + } + + /** + * This function should only be called once. + */ + public void createPeerConnectionFactory(PeerConnectionFactory.Options options) { + if (factory != null) { + throw new IllegalStateException("PeerConnectionFactory has already been constructed"); + } + executor.execute(() -> createPeerConnectionFactoryInternal(options)); + } + + public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink, + final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) { + if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) { + Log.w(TAG, "Video call enabled but no video capturer provided."); + } + createPeerConnection( + localRender, Collections.singletonList(remoteSink), videoCapturer, signalingParameters); + } + + public void createPeerConnection(final VideoSink localRender, final List remoteSinks, + final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) { + if (peerConnectionParameters == null) { + Log.e(TAG, "Creating peer connection without initializing factory."); + return; + } + this.localRender = localRender; + this.remoteSinks = remoteSinks; + this.videoCapturer = videoCapturer; + this.signalingParameters = signalingParameters; + executor.execute(() -> { + try { + createMediaConstraintsInternal(); + createPeerConnectionInternal(); + maybeCreateAndStartRtcEventLog(); + } catch (Exception e) { + reportError("Failed to create peer connection: " + e.getMessage()); + throw e; + } + }); + } + + public void close() { + executor.execute(this ::closeInternal); + } + + private boolean isVideoCallEnabled() { + return peerConnectionParameters.videoCallEnabled && videoCapturer != null; + } + + private void createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) { + isError = false; + + if (peerConnectionParameters.tracing) { + PeerConnectionFactory.startInternalTracingCapture( + Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + + "webrtc-trace.txt"); + } + + // Check if ISAC is used by default. + preferIsac = peerConnectionParameters.audioCodec != null + && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC); + + // It is possible to save a copy in raw PCM format on a file by checking + // the "Save input audio to file" checkbox in the Settings UI. A callback + // interface is set when this flag is enabled. As a result, a copy of recorded + // audio samples are provided to this client directly from the native audio + // layer in Java. + if (peerConnectionParameters.saveInputAudioToFile) { + if (!peerConnectionParameters.useOpenSLES) { + Log.d(TAG, "Enable recording of microphone input audio to file"); + saveRecordedAudioToFile = new RecordedAudioToFileController(executor); + } else { + // TODO(henrika): ensure that the UI reflects that if OpenSL ES is selected, + // then the "Save inut audio to file" option shall be grayed out. + Log.e(TAG, "Recording of input audio is not supported for OpenSL ES"); + } + } + + final AudioDeviceModule adm = peerConnectionParameters.useLegacyAudioDevice + ? createLegacyAudioDevice() + : createJavaAudioDevice(); + + // Create peer connection factory. + if (options != null) { + Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask); + } + final boolean enableH264HighProfile = + VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec); + final VideoEncoderFactory encoderFactory; + final VideoDecoderFactory decoderFactory; + + if (peerConnectionParameters.videoCodecHwAcceleration) { + encoderFactory = new DefaultVideoEncoderFactory( + rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile); + decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext()); + } else { + encoderFactory = new SoftwareVideoEncoderFactory(); + decoderFactory = new SoftwareVideoDecoderFactory(); + } + + factory = PeerConnectionFactory.builder() + .setOptions(options) + .setAudioDeviceModule(adm) + .setVideoEncoderFactory(encoderFactory) + .setVideoDecoderFactory(decoderFactory) + .createPeerConnectionFactory(); + Log.d(TAG, "Peer connection factory created."); + adm.release(); + } + + AudioDeviceModule createLegacyAudioDevice() { + // Enable/disable OpenSL ES playback. + if (!peerConnectionParameters.useOpenSLES) { + Log.d(TAG, "Disable OpenSL ES audio even if device supports it"); + WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */); + } else { + Log.d(TAG, "Allow OpenSL ES audio if device supports it"); + WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false); + } + + if (peerConnectionParameters.disableBuiltInAEC) { + Log.d(TAG, "Disable built-in AEC even if device supports it"); + WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true); + } else { + Log.d(TAG, "Enable built-in AEC if device supports it"); + WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false); + } + + if (peerConnectionParameters.disableBuiltInNS) { + Log.d(TAG, "Disable built-in NS even if device supports it"); + WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true); + } else { + Log.d(TAG, "Enable built-in NS if device supports it"); + WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false); + } + + WebRtcAudioRecord.setOnAudioSamplesReady(saveRecordedAudioToFile); + + // Set audio record error callbacks. + WebRtcAudioRecord.setErrorCallback(new WebRtcAudioRecordErrorCallback() { + @Override + public void onWebRtcAudioRecordInitError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioRecordStartError( + AudioRecordStartErrorCode errorCode, String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioRecordError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage); + reportError(errorMessage); + } + }); + + WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrack.ErrorCallback() { + @Override + public void onWebRtcAudioTrackInitError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioTrackStartError( + AudioTrackStartErrorCode errorCode, String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioTrackError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage); + reportError(errorMessage); + } + }); + + return new LegacyAudioDeviceModule(); + } + + AudioDeviceModule createJavaAudioDevice() { + // Enable/disable OpenSL ES playback. + if (!peerConnectionParameters.useOpenSLES) { + Log.w(TAG, "External OpenSLES ADM not implemented yet."); + // TODO(magjed): Add support for external OpenSLES ADM. + } + + // Set audio record error callbacks. + AudioRecordErrorCallback audioRecordErrorCallback = new AudioRecordErrorCallback() { + @Override + public void onWebRtcAudioRecordInitError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioRecordStartError( + JavaAudioDeviceModule.AudioRecordStartErrorCode errorCode, String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioRecordError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage); + reportError(errorMessage); + } + }; + + AudioTrackErrorCallback audioTrackErrorCallback = new AudioTrackErrorCallback() { + @Override + public void onWebRtcAudioTrackInitError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioTrackStartError( + JavaAudioDeviceModule.AudioTrackStartErrorCode errorCode, String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioTrackError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage); + reportError(errorMessage); + } + }; + + return JavaAudioDeviceModule.builder(appContext) + .setSamplesReadyCallback(saveRecordedAudioToFile) + .setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC) + .setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS) + .setAudioRecordErrorCallback(audioRecordErrorCallback) + .setAudioTrackErrorCallback(audioTrackErrorCallback) + .createAudioDeviceModule(); + } + + private void createMediaConstraintsInternal() { + // Create video constraints if video call is enabled. + if (isVideoCallEnabled()) { + videoWidth = peerConnectionParameters.videoWidth; + videoHeight = peerConnectionParameters.videoHeight; + videoFps = peerConnectionParameters.videoFps; + + // If video resolution is not specified, default to HD. + if (videoWidth == 0 || videoHeight == 0) { + videoWidth = HD_VIDEO_WIDTH; + videoHeight = HD_VIDEO_HEIGHT; + } + + // If fps is not specified, default to 30. + if (videoFps == 0) { + videoFps = 30; + } + Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps); + } + + // Create audio constraints. + audioConstraints = new MediaConstraints(); + // added for audio performance measurements + if (peerConnectionParameters.noAudioProcessing) { + Log.d(TAG, "Disabling audio processing"); + audioConstraints.mandatory.add( + new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false")); + audioConstraints.mandatory.add( + new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false")); + audioConstraints.mandatory.add( + new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false")); + audioConstraints.mandatory.add( + new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false")); + } + // Create SDP constraints. + sdpMediaConstraints = new MediaConstraints(); + sdpMediaConstraints.mandatory.add( + new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); + sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( + "OfferToReceiveVideo", Boolean.toString(isVideoCallEnabled()))); + } + + private void createPeerConnectionInternal() { + if (factory == null || isError) { + Log.e(TAG, "Peerconnection factory is not created"); + return; + } + Log.d(TAG, "Create peer connection."); + + queuedRemoteCandidates = new ArrayList<>(); + + PeerConnection.RTCConfiguration rtcConfig = + new PeerConnection.RTCConfiguration(signalingParameters.iceServers); + // TCP candidates are only useful when connecting to a server that supports + // ICE-TCP. + rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; + rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; + rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; + rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; + // Use ECDSA encryption. + rtcConfig.keyType = PeerConnection.KeyType.ECDSA; + // Enable DTLS for normal calls and disable for loopback calls. + rtcConfig.enableDtlsSrtp = !peerConnectionParameters.loopback; + rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; + + peerConnection = factory.createPeerConnection(rtcConfig, pcObserver); + + if (dataChannelEnabled) { + DataChannel.Init init = new DataChannel.Init(); + init.ordered = peerConnectionParameters.dataChannelParameters.ordered; + init.negotiated = peerConnectionParameters.dataChannelParameters.negotiated; + init.maxRetransmits = peerConnectionParameters.dataChannelParameters.maxRetransmits; + init.maxRetransmitTimeMs = peerConnectionParameters.dataChannelParameters.maxRetransmitTimeMs; + init.id = peerConnectionParameters.dataChannelParameters.id; + init.protocol = peerConnectionParameters.dataChannelParameters.protocol; + dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init); + } + isInitiator = false; + + // Set INFO libjingle logging. + // NOTE: this _must_ happen while |factory| is alive! + Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO); + + List mediaStreamLabels = Collections.singletonList("ARDAMS"); + if (isVideoCallEnabled()) { + peerConnection.addTrack(createVideoTrack(videoCapturer), mediaStreamLabels); + // We can add the renderers right away because we don't need to wait for an + // answer to get the remote track. + remoteVideoTrack = getRemoteVideoTrack(); + remoteVideoTrack.setEnabled(renderVideo); + for (VideoSink remoteSink : remoteSinks) { + remoteVideoTrack.addSink(remoteSink); + } + } + peerConnection.addTrack(createAudioTrack(), mediaStreamLabels); + if (isVideoCallEnabled()) { + findVideoSender(); + } + + if (peerConnectionParameters.aecDump) { + try { + ParcelFileDescriptor aecDumpFileDescriptor = + ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath() + + File.separator + "Download/audio.aecdump"), + ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE + | ParcelFileDescriptor.MODE_TRUNCATE); + factory.startAecDump(aecDumpFileDescriptor.detachFd(), -1); + } catch (IOException e) { + Log.e(TAG, "Can not open aecdump file", e); + } + } + + if (saveRecordedAudioToFile != null) { + if (saveRecordedAudioToFile.start()) { + Log.d(TAG, "Recording input audio to file is activated"); + } + } + Log.d(TAG, "Peer connection created."); + } + + private File createRtcEventLogOutputFile() { + DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_hhmm_ss", Locale.getDefault()); + Date date = new Date(); + final String outputFileName = "event_log_" + dateFormat.format(date) + ".log"; + return new File( + appContext.getDir(RTCEVENTLOG_OUTPUT_DIR_NAME, Context.MODE_PRIVATE), outputFileName); + } + + private void maybeCreateAndStartRtcEventLog() { + if (appContext == null || peerConnection == null) { + return; + } + if (!peerConnectionParameters.enableRtcEventLog) { + Log.d(TAG, "RtcEventLog is disabled."); + return; + } + rtcEventLog = new RtcEventLog(peerConnection); + rtcEventLog.start(createRtcEventLogOutputFile()); + } + + private void closeInternal() { + if (factory != null && peerConnectionParameters.aecDump) { + factory.stopAecDump(); + } + Log.d(TAG, "Closing peer connection."); + statsTimer.cancel(); + if (dataChannel != null) { + dataChannel.dispose(); + dataChannel = null; + } + if (rtcEventLog != null) { + // RtcEventLog should stop before the peer connection is disposed. + rtcEventLog.stop(); + rtcEventLog = null; + } + if (peerConnection != null) { + peerConnection.dispose(); + peerConnection = null; + } + Log.d(TAG, "Closing audio source."); + if (audioSource != null) { + audioSource.dispose(); + audioSource = null; + } + Log.d(TAG, "Stopping capture."); + if (videoCapturer != null) { + try { + videoCapturer.stopCapture(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + videoCapturerStopped = true; + videoCapturer.dispose(); + videoCapturer = null; + } + Log.d(TAG, "Closing video source."); + if (videoSource != null) { + videoSource.dispose(); + videoSource = null; + } + if (surfaceTextureHelper != null) { + surfaceTextureHelper.dispose(); + surfaceTextureHelper = null; + } + if (saveRecordedAudioToFile != null) { + Log.d(TAG, "Closing audio file for recorded input audio."); + saveRecordedAudioToFile.stop(); + saveRecordedAudioToFile = null; + } + localRender = null; + remoteSinks = null; + Log.d(TAG, "Closing peer connection factory."); + if (factory != null) { + factory.dispose(); + factory = null; + } + rootEglBase.release(); + Log.d(TAG, "Closing peer connection done."); + events.onPeerConnectionClosed(); + PeerConnectionFactory.stopInternalTracingCapture(); + PeerConnectionFactory.shutdownInternalTracer(); + } + + public boolean isHDVideo() { + return isVideoCallEnabled() && videoWidth * videoHeight >= 1280 * 720; + } + + @SuppressWarnings("deprecation") // TODO(sakal): getStats is deprecated. + private void getStats() { + if (peerConnection == null || isError) { + return; + } + boolean success = peerConnection.getStats(new StatsObserver() { + @Override + public void onComplete(final StatsReport[] reports) { + events.onPeerConnectionStatsReady(reports); + } + }, null); + if (!success) { + Log.e(TAG, "getStats() returns false!"); + } + } + + public void enableStatsEvents(boolean enable, int periodMs) { + if (enable) { + try { + statsTimer.schedule(new TimerTask() { + @Override + public void run() { + executor.execute(() -> getStats()); + } + }, 0, periodMs); + } catch (Exception e) { + Log.e(TAG, "Can not schedule statistics timer", e); + } + } else { + statsTimer.cancel(); + } + } + + public void setAudioEnabled(final boolean enable) { + executor.execute(() -> { + enableAudio = enable; + if (localAudioTrack != null) { + localAudioTrack.setEnabled(enableAudio); + } + }); + } + + public void setVideoEnabled(final boolean enable) { + executor.execute(() -> { + renderVideo = enable; + if (localVideoTrack != null) { + localVideoTrack.setEnabled(renderVideo); + } + if (remoteVideoTrack != null) { + remoteVideoTrack.setEnabled(renderVideo); + } + }); + } + + public void createOffer() { + executor.execute(() -> { + if (peerConnection != null && !isError) { + Log.d(TAG, "PC Create OFFER"); + isInitiator = true; + peerConnection.createOffer(sdpObserver, sdpMediaConstraints); + } + }); + } + + public void createAnswer() { + executor.execute(() -> { + if (peerConnection != null && !isError) { + Log.d(TAG, "PC create ANSWER"); + isInitiator = false; + peerConnection.createAnswer(sdpObserver, sdpMediaConstraints); + } + }); + } + + public void addRemoteIceCandidate(final IceCandidate candidate) { + executor.execute(() -> { + if (peerConnection != null && !isError) { + if (queuedRemoteCandidates != null) { + queuedRemoteCandidates.add(candidate); + } else { + peerConnection.addIceCandidate(candidate); + } + } + }); + } + + public void removeRemoteIceCandidates(final IceCandidate[] candidates) { + executor.execute(() -> { + if (peerConnection == null || isError) { + return; + } + // Drain the queued remote candidates if there is any so that + // they are processed in the proper order. + drainCandidates(); + peerConnection.removeIceCandidates(candidates); + }); + } + + public void setRemoteDescription(final SessionDescription sdp) { + executor.execute(() -> { + if (peerConnection == null || isError) { + return; + } + String sdpDescription = sdp.description; + if (preferIsac) { + sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true); + } + if (isVideoCallEnabled()) { + sdpDescription = + preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false); + } + if (peerConnectionParameters.audioStartBitrate > 0) { + sdpDescription = setStartBitrate( + AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate); + } + Log.d(TAG, "Set remote SDP."); + SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription); + peerConnection.setRemoteDescription(sdpObserver, sdpRemote); + }); + } + + public void stopVideoSource() { + executor.execute(() -> { + if (videoCapturer != null && !videoCapturerStopped) { + Log.d(TAG, "Stop video source."); + try { + videoCapturer.stopCapture(); + } catch (InterruptedException e) { + } + videoCapturerStopped = true; + } + }); + } + + public void startVideoSource() { + executor.execute(() -> { + if (videoCapturer != null && videoCapturerStopped) { + Log.d(TAG, "Restart video source."); + videoCapturer.startCapture(videoWidth, videoHeight, videoFps); + videoCapturerStopped = false; + } + }); + } + + public void setVideoMaxBitrate(@Nullable final Integer maxBitrateKbps) { + executor.execute(() -> { + if (peerConnection == null || localVideoSender == null || isError) { + return; + } + Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps); + if (localVideoSender == null) { + Log.w(TAG, "Sender is not ready."); + return; + } + + RtpParameters parameters = localVideoSender.getParameters(); + if (parameters.encodings.size() == 0) { + Log.w(TAG, "RtpParameters are not ready."); + return; + } + + for (RtpParameters.Encoding encoding : parameters.encodings) { + // Null value means no limit. + encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS; + } + if (!localVideoSender.setParameters(parameters)) { + Log.e(TAG, "RtpSender.setParameters failed."); + } + Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps); + }); + } + + private void reportError(final String errorMessage) { + Log.e(TAG, "Peerconnection error: " + errorMessage); + executor.execute(() -> { + if (!isError) { + events.onPeerConnectionError(errorMessage); + isError = true; + } + }); + } + + @Nullable + private AudioTrack createAudioTrack() { + audioSource = factory.createAudioSource(audioConstraints); + localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource); + localAudioTrack.setEnabled(enableAudio); + return localAudioTrack; + } + + @Nullable + private VideoTrack createVideoTrack(VideoCapturer capturer) { + surfaceTextureHelper = + SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext()); + videoSource = factory.createVideoSource(capturer.isScreencast()); + capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver()); + capturer.startCapture(videoWidth, videoHeight, videoFps); + + localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource); + localVideoTrack.setEnabled(renderVideo); + localVideoTrack.addSink(localRender); + return localVideoTrack; + } + + private void findVideoSender() { + for (RtpSender sender : peerConnection.getSenders()) { + if (sender.track() != null) { + String trackType = sender.track().kind(); + if (trackType.equals(VIDEO_TRACK_TYPE)) { + Log.d(TAG, "Found video sender."); + localVideoSender = sender; + } + } + } + } + + // Returns the remote VideoTrack, assuming there is only one. + private @Nullable VideoTrack getRemoteVideoTrack() { + for (RtpTransceiver transceiver : peerConnection.getTransceivers()) { + MediaStreamTrack track = transceiver.getReceiver().track(); + if (track instanceof VideoTrack) { + return (VideoTrack) track; + } + } + return null; + } + + private static String getSdpVideoCodecName(PeerConnectionParameters parameters) { + switch (parameters.videoCodec) { + case VIDEO_CODEC_VP8: + return VIDEO_CODEC_VP8; + case VIDEO_CODEC_VP9: + return VIDEO_CODEC_VP9; + case VIDEO_CODEC_H264_HIGH: + case VIDEO_CODEC_H264_BASELINE: + return VIDEO_CODEC_H264; + default: + return VIDEO_CODEC_VP8; + } + } + + private static String getFieldTrials(PeerConnectionParameters peerConnectionParameters) { + String fieldTrials = ""; + if (peerConnectionParameters.videoFlexfecEnabled) { + fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL; + Log.d(TAG, "Enable FlexFEC field trial."); + } + fieldTrials += VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL; + if (peerConnectionParameters.disableWebRtcAGCAndHPF) { + fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL; + Log.d(TAG, "Disable WebRTC AGC field trial."); + } + return fieldTrials; + } + + @SuppressWarnings("StringSplitter") + private static String setStartBitrate( + String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) { + String[] lines = sdpDescription.split("\r\n"); + int rtpmapLineIndex = -1; + boolean sdpFormatUpdated = false; + String codecRtpMap = null; + // Search for codec rtpmap in format + // a=rtpmap: / [/] + String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"; + Pattern codecPattern = Pattern.compile(regex); + for (int i = 0; i < lines.length; i++) { + Matcher codecMatcher = codecPattern.matcher(lines[i]); + if (codecMatcher.matches()) { + codecRtpMap = codecMatcher.group(1); + rtpmapLineIndex = i; + break; + } + } + if (codecRtpMap == null) { + Log.w(TAG, "No rtpmap for " + codec + " codec"); + return sdpDescription; + } + Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]); + + // Check if a=fmtp string already exist in remote SDP for this codec and + // update it with new bitrate parameter. + regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$"; + codecPattern = Pattern.compile(regex); + for (int i = 0; i < lines.length; i++) { + Matcher codecMatcher = codecPattern.matcher(lines[i]); + if (codecMatcher.matches()) { + Log.d(TAG, "Found " + codec + " " + lines[i]); + if (isVideoCodec) { + lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; + } else { + lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000); + } + Log.d(TAG, "Update remote SDP line: " + lines[i]); + sdpFormatUpdated = true; + break; + } + } + + StringBuilder newSdpDescription = new StringBuilder(); + for (int i = 0; i < lines.length; i++) { + newSdpDescription.append(lines[i]).append("\r\n"); + // Append new a=fmtp line if no such line exist for a codec. + if (!sdpFormatUpdated && i == rtpmapLineIndex) { + String bitrateSet; + if (isVideoCodec) { + bitrateSet = + "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; + } else { + bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "=" + + (bitrateKbps * 1000); + } + Log.d(TAG, "Add remote SDP line: " + bitrateSet); + newSdpDescription.append(bitrateSet).append("\r\n"); + } + } + return newSdpDescription.toString(); + } + + /** Returns the line number containing "m=audio|video", or -1 if no such line exists. */ + private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) { + final String mediaDescription = isAudio ? "m=audio " : "m=video "; + for (int i = 0; i < sdpLines.length; ++i) { + if (sdpLines[i].startsWith(mediaDescription)) { + return i; + } + } + return -1; + } + + private static String joinString( + Iterable s, String delimiter, boolean delimiterAtEnd) { + Iterator iter = s.iterator(); + if (!iter.hasNext()) { + return ""; + } + StringBuilder buffer = new StringBuilder(iter.next()); + while (iter.hasNext()) { + buffer.append(delimiter).append(iter.next()); + } + if (delimiterAtEnd) { + buffer.append(delimiter); + } + return buffer.toString(); + } + + private static @Nullable String movePayloadTypesToFront( + List preferredPayloadTypes, String mLine) { + // The format of the media description line should be: m= ... + final List origLineParts = Arrays.asList(mLine.split(" ")); + if (origLineParts.size() <= 3) { + Log.e(TAG, "Wrong SDP media description format: " + mLine); + return null; + } + final List header = origLineParts.subList(0, 3); + final List unpreferredPayloadTypes = + new ArrayList<>(origLineParts.subList(3, origLineParts.size())); + unpreferredPayloadTypes.removeAll(preferredPayloadTypes); + // Reconstruct the line with |preferredPayloadTypes| moved to the beginning of the payload + // types. + final List newLineParts = new ArrayList<>(); + newLineParts.addAll(header); + newLineParts.addAll(preferredPayloadTypes); + newLineParts.addAll(unpreferredPayloadTypes); + return joinString(newLineParts, " ", false /* delimiterAtEnd */); + } + + private static String preferCodec(String sdpDescription, String codec, boolean isAudio) { + final String[] lines = sdpDescription.split("\r\n"); + final int mLineIndex = findMediaDescriptionLine(isAudio, lines); + if (mLineIndex == -1) { + Log.w(TAG, "No mediaDescription line, so can't prefer " + codec); + return sdpDescription; + } + // A list with all the payload types with name |codec|. The payload types are integers in the + // range 96-127, but they are stored as strings here. + final List codecPayloadTypes = new ArrayList<>(); + // a=rtpmap: / [/] + final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"); + for (String line : lines) { + Matcher codecMatcher = codecPattern.matcher(line); + if (codecMatcher.matches()) { + codecPayloadTypes.add(codecMatcher.group(1)); + } + } + if (codecPayloadTypes.isEmpty()) { + Log.w(TAG, "No payload types with name " + codec); + return sdpDescription; + } + + final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]); + if (newMLine == null) { + return sdpDescription; + } + Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine); + lines[mLineIndex] = newMLine; + return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */); + } + + private void drainCandidates() { + if (queuedRemoteCandidates != null) { + Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates"); + for (IceCandidate candidate : queuedRemoteCandidates) { + peerConnection.addIceCandidate(candidate); + } + queuedRemoteCandidates = null; + } + } + + private void switchCameraInternal() { + if (videoCapturer instanceof CameraVideoCapturer) { + if (!isVideoCallEnabled() || isError) { + Log.e(TAG, + "Failed to switch camera. Video: " + isVideoCallEnabled() + ". Error : " + isError); + return; // No video is sent or only one camera is available or error happened. + } + Log.d(TAG, "Switch camera"); + CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; + cameraVideoCapturer.switchCamera(null); + } else { + Log.d(TAG, "Will not switch camera, video caputurer is not a camera"); + } + } + + public void switchCamera() { + executor.execute(this ::switchCameraInternal); + } + + public void changeCaptureFormat(final int width, final int height, final int framerate) { + executor.execute(() -> changeCaptureFormatInternal(width, height, framerate)); + } + + private void changeCaptureFormatInternal(int width, int height, int framerate) { + if (!isVideoCallEnabled() || isError || videoCapturer == null) { + Log.e(TAG, + "Failed to change capture format. Video: " + isVideoCallEnabled() + + ". Error : " + isError); + return; + } + Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate); + videoSource.adaptOutputFormat(width, height, framerate); + } + + // Implementation detail: observe ICE & stream changes and react accordingly. + private class PCObserver implements PeerConnection.Observer { + @Override + public void onIceCandidate(final IceCandidate candidate) { + executor.execute(() -> events.onIceCandidate(candidate)); + } + + @Override + public void onIceCandidatesRemoved(final IceCandidate[] candidates) { + executor.execute(() -> events.onIceCandidatesRemoved(candidates)); + } + + @Override + public void onSignalingChange(PeerConnection.SignalingState newState) { + Log.d(TAG, "SignalingState: " + newState); + } + + @Override + public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) { + executor.execute(() -> { + Log.d(TAG, "IceConnectionState: " + newState); + if (newState == IceConnectionState.CONNECTED) { + events.onIceConnected(); + } else if (newState == IceConnectionState.DISCONNECTED) { + events.onIceDisconnected(); + } else if (newState == IceConnectionState.FAILED) { + reportError("ICE connection failed."); + } + }); + } + + /*@Override + public void onConnectionChange(final PeerConnection.PeerConnectionState newState) { + executor.execute(() -> { + Log.d(TAG, "PeerConnectionState: " + newState); + if (newState == PeerConnectionState.CONNECTED) { + events.onConnected(); + } else if (newState == PeerConnectionState.DISCONNECTED) { + events.onDisconnected(); + } else if (newState == PeerConnectionState.FAILED) { + reportError("DTLS connection failed."); + } + }); + }*/ + + @Override + public void onIceGatheringChange(PeerConnection.IceGatheringState newState) { + Log.d(TAG, "IceGatheringState: " + newState); + } + + @Override + public void onIceConnectionReceivingChange(boolean receiving) { + Log.d(TAG, "IceConnectionReceiving changed to " + receiving); + } + + @Override + public void onAddStream(final MediaStream stream) { + Log.e(TAG, "New stream!"); + } + + @Override + public void onRemoveStream(final MediaStream stream) { + Log.e(TAG, "Stream removed..."); + } + + @Override + public void onDataChannel(final DataChannel dc) { + Log.d(TAG, "New Data channel " + dc.label()); + + if (!dataChannelEnabled) + return; + + dc.registerObserver(new DataChannel.Observer() { + @Override + public void onBufferedAmountChange(long previousAmount) { + Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state()); + } + + @Override + public void onStateChange() { + Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state()); + } + + @Override + public void onMessage(final DataChannel.Buffer buffer) { + if (buffer.binary) { + Log.d(TAG, "Received binary msg over " + dc); + return; + } + ByteBuffer data = buffer.data; + final byte[] bytes = new byte[data.capacity()]; + data.get(bytes); + String strData = new String(bytes, Charset.forName("UTF-8")); + Log.d(TAG, "Got msg: " + strData + " over " + dc); + } + }); + } + + @Override + public void onRenegotiationNeeded() { + // No need to do anything; AppRTC follows a pre-agreed-upon + // signaling/negotiation protocol. + } + + @Override + public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {} + } + + // Implementation detail: handle offer creation/signaling and answer setting, + // as well as adding remote ICE candidates once the answer SDP is set. + private class SDPObserver implements SdpObserver { + @Override + public void onCreateSuccess(final SessionDescription origSdp) { + if (localSdp != null) { + reportError("Multiple SDP create."); + return; + } + String sdpDescription = origSdp.description; + if (preferIsac) { + sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true); + } + if (isVideoCallEnabled()) { + sdpDescription = + preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false); + } + final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription); + localSdp = sdp; + executor.execute(() -> { + if (peerConnection != null && !isError) { + Log.d(TAG, "Set local SDP from " + sdp.type); + peerConnection.setLocalDescription(sdpObserver, sdp); + } + }); + } + + @Override + public void onSetSuccess() { + executor.execute(() -> { + if (peerConnection == null || isError) { + return; + } + if (isInitiator) { + // For offering peer connection we first create offer and set + // local SDP, then after receiving answer set remote SDP. + if (peerConnection.getRemoteDescription() == null) { + // We've just set our local SDP so time to send it. + Log.d(TAG, "Local SDP set succesfully"); + events.onLocalDescription(localSdp); + } else { + // We've just set remote description, so drain remote + // and send local ICE candidates. + Log.d(TAG, "Remote SDP set succesfully"); + drainCandidates(); + } + } else { + // For answering peer connection we set remote SDP and then + // create answer and set local SDP. + if (peerConnection.getLocalDescription() != null) { + // We've just set our local SDP so time to send it, drain + // remote and send local ICE candidates. + Log.d(TAG, "Local SDP set succesfully"); + events.onLocalDescription(localSdp); + drainCandidates(); + } else { + // We've just set remote SDP - do nothing for now - + // answer will be created soon. + Log.d(TAG, "Remote SDP set succesfully"); + } + } + }); + } + + @Override + public void onCreateFailure(final String error) { + reportError("createSDP error: " + error); + } + + @Override + public void onSetFailure(final String error) { + reportError("setSDP error: " + error); + } + } +} \ No newline at end of file diff --git a/app/src/main/java/org/appspot/apprtc/RecordedAudioToFileController.java b/app/src/main/java/org/appspot/apprtc/RecordedAudioToFileController.java new file mode 100644 index 0000000..7dc191e --- /dev/null +++ b/app/src/main/java/org/appspot/apprtc/RecordedAudioToFileController.java @@ -0,0 +1,153 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.appspot.apprtc; + +import android.media.AudioFormat; +import android.os.Environment; +import android.support.annotation.Nullable; +import android.util.Log; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.concurrent.ExecutorService; +import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; +import org.webrtc.voiceengine.WebRtcAudioRecord; +import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback; + +/** + * Implements the AudioRecordSamplesReadyCallback interface and writes + * recorded raw audio samples to an output file. + */ +public class RecordedAudioToFileController + implements SamplesReadyCallback, WebRtcAudioRecordSamplesReadyCallback { + private static final String TAG = "RecordedAudioToFile"; + private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L; + + private final Object lock = new Object(); + private final ExecutorService executor; + @Nullable private OutputStream rawAudioFileOutputStream; + private boolean isRunning; + private long fileSizeInBytes; + + public RecordedAudioToFileController(ExecutorService executor) { + Log.d(TAG, "ctor"); + this.executor = executor; + } + + /** + * Should be called on the same executor thread as the one provided at + * construction. + */ + public boolean start() { + Log.d(TAG, "start"); + if (!isExternalStorageWritable()) { + Log.e(TAG, "Writing to external media is not possible"); + return false; + } + synchronized (lock) { + isRunning = true; + } + return true; + } + + /** + * Should be called on the same executor thread as the one provided at + * construction. + */ + public void stop() { + Log.d(TAG, "stop"); + synchronized (lock) { + isRunning = false; + if (rawAudioFileOutputStream != null) { + try { + rawAudioFileOutputStream.close(); + } catch (IOException e) { + Log.e(TAG, "Failed to close file with saved input audio: " + e); + } + rawAudioFileOutputStream = null; + } + fileSizeInBytes = 0; + } + } + + // Checks if external storage is available for read and write. + private boolean isExternalStorageWritable() { + String state = Environment.getExternalStorageState(); + if (Environment.MEDIA_MOUNTED.equals(state)) { + return true; + } + return false; + } + + // Utilizes audio parameters to create a file name which contains sufficient + // information so that the file can be played using an external file player. + // Example: /sdcard/recorded_audio_16bits_48000Hz_mono.pcm. + private void openRawAudioOutputFile(int sampleRate, int channelCount) { + final String fileName = Environment.getExternalStorageDirectory().getPath() + File.separator + + "recorded_audio_16bits_" + String.valueOf(sampleRate) + "Hz" + + ((channelCount == 1) ? "_mono" : "_stereo") + ".pcm"; + final File outputFile = new File(fileName); + try { + rawAudioFileOutputStream = new FileOutputStream(outputFile); + } catch (FileNotFoundException e) { + Log.e(TAG, "Failed to open audio output file: " + e.getMessage()); + } + Log.d(TAG, "Opened file for recording: " + fileName); + } + + // Called when new audio samples are ready. + @Override + public void onWebRtcAudioRecordSamplesReady(WebRtcAudioRecord.AudioSamples samples) { + onWebRtcAudioRecordSamplesReady(new JavaAudioDeviceModule.AudioSamples(samples.getAudioFormat(), + samples.getChannelCount(), samples.getSampleRate(), samples.getData())); + } + + // Called when new audio samples are ready. + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples samples) { + // The native audio layer on Android should use 16-bit PCM format. + if (samples.getAudioFormat() != AudioFormat.ENCODING_PCM_16BIT) { + Log.e(TAG, "Invalid audio format"); + return; + } + synchronized (lock) { + // Abort early if stop() has been called. + if (!isRunning) { + return; + } + // Open a new file for the first callback only since it allows us to add audio parameters to + // the file name. + if (rawAudioFileOutputStream == null) { + openRawAudioOutputFile(samples.getSampleRate(), samples.getChannelCount()); + fileSizeInBytes = 0; + } + } + // Append the recorded 16-bit audio samples to the open output file. + executor.execute(() -> { + if (rawAudioFileOutputStream != null) { + try { + // Set a limit on max file size. 58348800 bytes corresponds to + // approximately 10 minutes of recording in mono at 48kHz. + if (fileSizeInBytes < MAX_FILE_SIZE_IN_BYTES) { + // Writes samples.getData().length bytes to output stream. + rawAudioFileOutputStream.write(samples.getData()); + fileSizeInBytes += samples.getData().length; + } + } catch (IOException e) { + Log.e(TAG, "Failed to write audio to file: " + e.getMessage()); + } + } + }); + } +} \ No newline at end of file diff --git a/app/src/main/java/org/appspot/apprtc/RtcEventLog.java b/app/src/main/java/org/appspot/apprtc/RtcEventLog.java new file mode 100644 index 0000000..83039c5 --- /dev/null +++ b/app/src/main/java/org/appspot/apprtc/RtcEventLog.java @@ -0,0 +1,74 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.appspot.apprtc; + +import android.content.Context; +import android.os.ParcelFileDescriptor; +import android.util.Log; +import java.io.File; +import java.io.IOException; +import org.webrtc.PeerConnection; + +public class RtcEventLog { + private static final String TAG = "RtcEventLog"; + private static final int OUTPUT_FILE_MAX_BYTES = 10_000_000; + private final PeerConnection peerConnection; + private RtcEventLogState state = RtcEventLogState.INACTIVE; + + enum RtcEventLogState { + INACTIVE, + STARTED, + STOPPED, + } + + public RtcEventLog(PeerConnection peerConnection) { + if (peerConnection == null) { + throw new NullPointerException("The peer connection is null."); + } + this.peerConnection = peerConnection; + } + + public void start(final File outputFile) { + if (state == RtcEventLogState.STARTED) { + Log.e(TAG, "RtcEventLog has already started."); + return; + } + final ParcelFileDescriptor fileDescriptor; + try { + fileDescriptor = ParcelFileDescriptor.open(outputFile, + ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE + | ParcelFileDescriptor.MODE_TRUNCATE); + } catch (IOException e) { + Log.e(TAG, "Failed to create a new file", e); + return; + } + + // Passes ownership of the file to WebRTC. + boolean success = + peerConnection.startRtcEventLog(fileDescriptor.detachFd(), OUTPUT_FILE_MAX_BYTES); + if (!success) { + Log.e(TAG, "Failed to start RTC event log."); + return; + } + state = RtcEventLogState.STARTED; + Log.d(TAG, "RtcEventLog started."); + } + + public void stop() { + if (state != RtcEventLogState.STARTED) { + Log.e(TAG, "RtcEventLog was not started."); + return; + } + peerConnection.stopRtcEventLog(); + state = RtcEventLogState.STOPPED; + Log.d(TAG, "RtcEventLog stopped."); + } +} \ No newline at end of file diff --git a/app/src/main/java/org/communiquons/android/comunic/client/data/helpers/CallsHelper.java b/app/src/main/java/org/communiquons/android/comunic/client/data/helpers/CallsHelper.java index a2de7da..910f053 100644 --- a/app/src/main/java/org/communiquons/android/comunic/client/data/helpers/CallsHelper.java +++ b/app/src/main/java/org/communiquons/android/comunic/client/data/helpers/CallsHelper.java @@ -15,6 +15,9 @@ import org.communiquons.android.comunic.client.data.models.NextPendingCallInform import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; +import org.webrtc.PeerConnection; + +import java.util.ArrayList; /** * Calls helper @@ -90,6 +93,30 @@ public class CallsHelper extends BaseHelper { return mCallsConfiguration != null && mCallsConfiguration.isEnabled(); } + /** + * Get the list of STUN and TURN servers + * + * @return List of STUN and TURN servers available for Comunic + */ + public static ArrayList GetPeerServers(){ + + ArrayList servers = new ArrayList<>(); + + //Stun server + servers.add(PeerConnection.IceServer.builder( + mCallsConfiguration.getStunServer()).createIceServer()); + + //TURN server + servers.add(PeerConnection.IceServer + .builder(mCallsConfiguration.getTurnServer()) + .setUsername(mCallsConfiguration.getTurnUsername()) + .setPassword(mCallsConfiguration.getTurnPassword()) + .createIceServer() + ); + + return servers; + } + /** * Create a call for a conversation, returns information about this call then * @@ -151,6 +178,26 @@ public class CallsHelper extends BaseHelper { } + /** + * Get information about a call + * + * @param callID Target call ID + * @return Information about the call / null in case of failure + */ + public CallInformation getInfo(int callID){ + + APIRequest request = new APIRequest(getContext(), "calls/getInfo"); + request.addInt("call_id", callID); + + try { + return JSONObjectToCallInformation(request.exec().getJSONObject(), null); + } catch (Exception e) { + e.printStackTrace(); + return null; + } + + } + /** * Try to get and return call information diff --git a/app/src/main/java/org/communiquons/android/comunic/client/data/models/CallInformation.java b/app/src/main/java/org/communiquons/android/comunic/client/data/models/CallInformation.java index 81aa245..6ce7bc5 100644 --- a/app/src/main/java/org/communiquons/android/comunic/client/data/models/CallInformation.java +++ b/app/src/main/java/org/communiquons/android/comunic/client/data/models/CallInformation.java @@ -1,6 +1,7 @@ package org.communiquons.android.comunic.client.data.models; import android.support.annotation.NonNull; +import android.support.annotation.Nullable; import java.util.ArrayList; @@ -75,6 +76,36 @@ public class CallInformation { this.members = members; } + /** + * Find a member by user ID + * + * @param userID The ID of the user to search + * @return Information about the target user + */ + public CallMember findMember(int userID){ + for(CallMember member : members) + if(member.getUserID() == userID) + return member; + + throw new RuntimeException("Specified user was not found in the conversation!"); + } + + /** + * Find a member by call ID + * + * @param userCallID The ID of the target user + * @return Information about the user / null in case of failure + */ + @Nullable + public CallMember findMember(String userCallID){ + for(CallMember member : members) + if(member.getUserCallID().equals(userCallID)) + return member; + + throw new RuntimeException("Specified user was not found in the conversation!"); + } + + public String getCallName() { return callName; } diff --git a/app/src/main/java/org/communiquons/android/comunic/client/data/models/CallsConfiguration.java b/app/src/main/java/org/communiquons/android/comunic/client/data/models/CallsConfiguration.java index a1cb878..f112156 100644 --- a/app/src/main/java/org/communiquons/android/comunic/client/data/models/CallsConfiguration.java +++ b/app/src/main/java/org/communiquons/android/comunic/client/data/models/CallsConfiguration.java @@ -50,7 +50,7 @@ public class CallsConfiguration { this.signalServerPort = signalServerPort; } - public boolean isSignalSererSecure() { + public boolean isSignalServerSecure() { return isSignalSererSecure; } diff --git a/app/src/main/java/org/communiquons/android/comunic/client/ui/activities/CallActivity.java b/app/src/main/java/org/communiquons/android/comunic/client/ui/activities/CallActivity.java index df33bdb..7a19ca3 100644 --- a/app/src/main/java/org/communiquons/android/comunic/client/ui/activities/CallActivity.java +++ b/app/src/main/java/org/communiquons/android/comunic/client/ui/activities/CallActivity.java @@ -1,10 +1,46 @@ package org.communiquons.android.comunic.client.ui.activities; +import android.Manifest; +import android.content.pm.PackageManager; +import android.os.AsyncTask; import android.os.Bundle; -import android.widget.TextView; +import android.support.annotation.Nullable; +import android.support.v4.app.ActivityCompat; +import android.support.v4.content.ContextCompat; +import android.util.Log; +import android.widget.ProgressBar; +import android.widget.Toast; +import org.appspot.apprtc.AppRTCClient; +import org.appspot.apprtc.PeerConnectionClient; import org.communiquons.android.comunic.client.R; +import org.communiquons.android.comunic.client.data.enums.MemberCallStatus; +import org.communiquons.android.comunic.client.data.helpers.CallsHelper; +import org.communiquons.android.comunic.client.data.models.CallInformation; +import org.communiquons.android.comunic.client.data.models.CallMember; +import org.communiquons.android.comunic.client.data.models.CallResponse; +import org.communiquons.android.comunic.client.data.models.CallsConfiguration; +import org.communiquons.android.comunic.client.data.utils.AccountUtils; +import org.communiquons.android.comunic.client.ui.arrays.CallPeersConnectionsList; +import org.communiquons.android.comunic.client.ui.asynctasks.GetCallInformationTask; +import org.communiquons.android.comunic.client.ui.asynctasks.RespondToCallTask; +import org.communiquons.android.comunic.client.ui.models.CallPeerConnection; import org.communiquons.android.comunic.client.ui.receivers.PendingCallsBroadcastReceiver; +import org.communiquons.signalexchangerclient.SignalExchangerCallback; +import org.communiquons.signalexchangerclient.SignalExchangerClient; +import org.communiquons.signalexchangerclient.SignalExchangerInitConfig; +import org.webrtc.Camera1Enumerator; +import org.webrtc.CameraEnumerator; +import org.webrtc.EglBase; +import org.webrtc.IceCandidate; +import org.webrtc.Logging; +import org.webrtc.PeerConnectionFactory; +import org.webrtc.SessionDescription; +import org.webrtc.StatsReport; +import org.webrtc.SurfaceViewRenderer; +import org.webrtc.VideoCapturer; +import org.webrtc.VideoFrame; +import org.webrtc.VideoSink; import java.util.Objects; @@ -13,21 +49,89 @@ import java.util.Objects; * * @author Pierre HUBERT */ -public class CallActivity extends BaseActivity { +public class CallActivity extends BaseActivity implements SignalExchangerCallback { + + /** + * Debug tag + */ + private static final String TAG = CallActivity.class.getSimpleName(); /** * Mandatory argument that includes call id */ public static final String ARGUMENT_CALL_ID = "call_id"; + + /** + * Permissions requests codes + */ + private static final int MY_PERMISSIONS_REQUEST_CAMERA = 100; + private static final int MY_PERMISSIONS_REQUEST_RECORD_AUDIO = 101; + private static final int MY_PERMISSIONS_REQUEST = 102; + + /** + * Refresh call information thread + */ + private RefreshCallInformation mRefreshCallInformation = null; + + /** + * Current call ID and information + */ + private int mCallID = -1; + private CallInformation mCallInformation = null; + + /** + * Signal exchanger client + */ + private SignalExchangerClient mSignalExchangerClient = null; + + + /** + * Connections list + */ + private CallPeersConnectionsList mList = new CallPeersConnectionsList(); + + + /** + * WebRTC attributes + */ + private EglBase rootEglBase; + + + /** + * Views + */ + private ProgressBar mProgressBar; + + @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_call); - //Hide call bar - Objects.requireNonNull(getSupportActionBar()).hide(); + mCallID = getIntent().getIntExtra(ARGUMENT_CALL_ID, 0); + + //Get views + initViews(); + initVideos(); + + //Mark the call as accepted + RespondToCallTask respondToCallTask = new RespondToCallTask(this); + respondToCallTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, + new CallResponse(mCallID, true)); + getTasksManager().addTask(respondToCallTask); + } + + @Override + protected void onStart() { + super.onStart(); + + //Refresh at a regular interval information about the call + mRefreshCallInformation = new RefreshCallInformation(); + mRefreshCallInformation.start(); + + } @Override @@ -37,7 +141,507 @@ public class CallActivity extends BaseActivity { //Hide call notifications PendingCallsBroadcastReceiver.RemoveCallNotification(this); - ((TextView)findViewById(R.id.call_id)).setText( - "Call " + getIntent().getExtras().getInt(ARGUMENT_CALL_ID)); + //Make sure we have access to user camera and microphone + askForPermissions(); + } + + @Override + protected void onStop() { + super.onStop(); + mRefreshCallInformation.interrupt(); + } + + /** + * Request access to user camera and microphone devices + * + * Based on https://github.com/sergiopaniego/WebRTCAndroidExample + */ + private void askForPermissions() { + if ((ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) + != PackageManager.PERMISSION_GRANTED) && + (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) + != PackageManager.PERMISSION_GRANTED)) { + ActivityCompat.requestPermissions(this, + new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, + MY_PERMISSIONS_REQUEST); + } else if (ContextCompat.checkSelfPermission(this, + Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { + ActivityCompat.requestPermissions(this, + new String[]{Manifest.permission.RECORD_AUDIO}, + MY_PERMISSIONS_REQUEST_RECORD_AUDIO); + + } else if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) + != PackageManager.PERMISSION_GRANTED) { + ActivityCompat.requestPermissions(this, + new String[]{Manifest.permission.CAMERA}, + MY_PERMISSIONS_REQUEST_CAMERA); + } + } + + /** + * Get views + */ + private void initViews(){ + + mProgressBar = findViewById(R.id.progressBar); + + + } + + + private void initVideos(){ + + rootEglBase = EglBase.create(); + + } + + + /** + * Refresh call information + */ + private void getCallInformation(){ + + GetCallInformationTask getCallInformationTask = new GetCallInformationTask(this); + getCallInformationTask.setOnPostExecuteListener(this::onGotCallInformation); + getCallInformationTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, mCallID); + + } + + /** + * Once we have got information about the call + * + * @param info Information about the call + */ + private void onGotCallInformation(@Nullable CallInformation info){ + + if(info == null){ + Toast.makeText(this, R.string.err_get_call_info, Toast.LENGTH_SHORT).show(); + return; + } + + setTitle(info.getCallName()); + mCallInformation = info; + + //Check if everyone left the conversation + if(mCallInformation.hasAllMembersLeftCallExcept(AccountUtils.getID(this))){ + Toast.makeText(this, R.string.notice_call_terminated, Toast.LENGTH_SHORT).show(); + finish(); + return; + } + + //Connect to signaling server + if(mSignalExchangerClient == null){ + initializeSignalClient(); + return; + } + + //Check connection is establish + if(!mSignalExchangerClient.isConnected()) + return; + + processClientsConnections(); + } + + + private void initializeSignalClient(){ + + CallsConfiguration callsConfiguration = CallsHelper.GetCallsConfiguration(); + + assert callsConfiguration != null; + mSignalExchangerClient = new SignalExchangerClient(new SignalExchangerInitConfig( + callsConfiguration.getSignalServerName(), + callsConfiguration.getSignalServerPort(), + mCallInformation.findMember(AccountUtils.getID(this)).getUserCallID(), + callsConfiguration.isSignalServerSecure() + ), this); + } + + + private void processClientsConnections(){ + //Process each peer connection + for(CallMember member : mCallInformation.getMembers()) + processClientConnection(member); + } + + private void processClientConnection(CallMember member){ + + //Skip current user + if(member.getUserID() == AccountUtils.getID(this)) + return; + + //Check if the member left the conversation + if(member.getStatus() != MemberCallStatus.ACCEPTED){ + disconnectFromPeer(member); + + return; + } + + if(mList.find(member) == null && member.getUserID() > AccountUtils.getID(this)) { + createPeerConnection(member, false); + mSignalExchangerClient.sendReadyMessage(member.getUserCallID()); + } + + + if(mList.find(member) != null) + Objects.requireNonNull(mList.find(member)).setMember(member); + } + + /** + * Create the peer connection for a specific call member + * + * @param member Target member + * @param isInitiator Specify whether if we should send the offer or not to this user + */ + private void createPeerConnection(CallMember member, boolean isInitiator){ + + Log.v(TAG, "Create peer connection for connection with user " + member.getUserID()); + + CallPeerConnection callPeer = new CallPeerConnection(member); + mList.add(callPeer); + + //Create peer connection + PeerConnectionClient peerConnectionClient = new PeerConnectionClient( + getApplicationContext(), + rootEglBase, + new PeerConnectionClient.PeerConnectionParameters( + true, + false, + false, + 0, + 0, + 0, + 0, + "", + true, + false, + 0, + null, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + null + ), + new PeerConnectionEvents(callPeer) + ); + callPeer.setPeerConnectionClient(peerConnectionClient); + + PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); + peerConnectionClient.createPeerConnectionFactory(options); + + + //Signaling parameters + AppRTCClient.SignalingParameters parameters = new AppRTCClient.SignalingParameters( + CallsHelper.GetPeerServers(), isInitiator, null, + null, null, null, null + ); + + + + + //Initialize video view + SurfaceViewRenderer localView = new SurfaceViewRenderer(this); + localView.init(rootEglBase.getEglBaseContext(), null); + localView.setZOrderMediaOverlay(true); + callPeer.setLocalVideoView(localView); + + SurfaceViewRenderer remoteView = new SurfaceViewRenderer(this); + remoteView.init(rootEglBase.getEglBaseContext(), null); + remoteView.setZOrderMediaOverlay(false); + callPeer.setRemoteViewView(remoteView); + + + ProxyVideoSink localProxyVideoSink = new ProxyVideoSink(); + localProxyVideoSink.setTarget(callPeer.getLocalVideoView()); + + ProxyVideoSink remoteProxyRenderer = new ProxyVideoSink(); + remoteProxyRenderer.setTarget(callPeer.getRemoteViewView()); + callPeer.getRemoteSinks().add(remoteProxyRenderer); + + //Start connection + peerConnectionClient.createPeerConnection( + localProxyVideoSink, + callPeer.getRemoteSinks(), + createCameraCapturer(new Camera1Enumerator(false)), + parameters + ); + + if(isInitiator) + peerConnectionClient.createOffer(); + } + + /** + * Disconnect from a specific peer + * + * @param member Information about related call member + */ + private void disconnectFromPeer(CallMember member){ + + CallPeerConnection callPeer = mList.find(member); + if(callPeer == null) + return; + + callPeer.getPeerConnectionClient().close(); + + mList.remove(callPeer); + } + + //Based on https://github.com/vivek1794/webrtc-android-codelab + @Nullable + private VideoCapturer createCameraCapturer(CameraEnumerator enumerator){ + final String[] deviceNames = enumerator.getDeviceNames(); + + // First, try to find front facing camera + Logging.d(TAG, "Looking for front facing cameras."); + for (String deviceName : deviceNames) { + if (enumerator.isFrontFacing(deviceName)) { + Logging.d(TAG, "Creating front facing camera capturer."); + VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null); + + if (videoCapturer != null) { + return videoCapturer; + } + } + } + + // Front facing camera not found, try something else + Logging.d(TAG, "Looking for other cameras."); + for (String deviceName : deviceNames) { + if (!enumerator.isFrontFacing(deviceName)) { + Logging.d(TAG, "Creating other camera capturer."); + VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null); + + if (videoCapturer != null) { + return videoCapturer; + } + } + } + + return null; + } + + + @Override + public void onSignalServerError(String msg, @Nullable Throwable t) { + runOnUiThread(() -> Toast.makeText(this, + R.string.err_connect_signaling_server, Toast.LENGTH_SHORT).show()); + } + + @Override + public void onConnectedToSignalingServer() { + runOnUiThread(this::processClientsConnections); + } + + @Override + public void onReadyMessageCallback(String target_id, int number_targets) { + Log.e(TAG, "Send ready message callback"); + } + + @Override + public void onReadyMessage(String source_id) { + + runOnUiThread(() -> { + + //Ignore message if a connection has already been established + if (mList.findByCallID(source_id) != null) { + Log.e(TAG, "Ignored ready message from " + source_id + " because a connection has already be made!"); + return; + } + + + CallMember member = mCallInformation.findMember(source_id); + + if (member == null) { + Log.e(TAG, source_id + " sent a ready message but it does not belong to the conversation!"); + return; + } + + Log.v(TAG, source_id + " informed it is ready to establish connection."); + createPeerConnection(member, true); + + }); + } + + @Override + public void onSignal(String source_id, String signal) { + Log.e(TAG, "Received new signal from " + source_id); + } + + @Override + public void onSendSignalCallback(int number_targets) { + Log.e(TAG, "Send signal callback, number of targets: " + number_targets); + } + + @Override + public void gotRemoteIceCandidate(String source_id, IceCandidate iceCandidate) { + + runOnUiThread(() -> { + + CallPeerConnection connection = mList.findByCallID(source_id); + if(connection == null) { + Log.e(TAG, "Dropped ICE candidate from " + source_id + " no peer connection was ready to receive it!"); + return; + } + + connection.getPeerConnectionClient().addRemoteIceCandidate(iceCandidate); + }); + } + + @Override + public void gotRemoteSessionDescription(String source_id, SessionDescription sessionDescription) { + + runOnUiThread(() -> { + + CallPeerConnection connection = mList.findByCallID(source_id); + if(connection == null) { + Log.e(TAG, "Dropped session description from " + source_id + " no peer connection was ready to receive it!"); + return; + } + + connection.getPeerConnectionClient().setRemoteDescription(sessionDescription); + connection.getPeerConnectionClient().createAnswer(); + }); + + + } + + + /** + * Class used to received events that comes from a connection + */ + private class PeerConnectionEvents implements PeerConnectionClient.PeerConnectionEvents { + + private CallPeerConnection connection; + + PeerConnectionEvents(CallPeerConnection connection) { + this.connection = connection; + } + + @Override + public void onLocalDescription(SessionDescription sdp) { + Log.v(TAG, "Got a new local description"); + runOnUiThread(() -> + mSignalExchangerClient.sendSessionDescription( + connection.getMember().getUserCallID(), sdp)); + } + + @Override + public void onIceCandidate(IceCandidate candidate) { + Log.v(TAG, "Got a new ICE candidate"); + runOnUiThread(() -> mSignalExchangerClient.sendIceCandidate( + connection.getMember().getUserCallID(), + candidate)); + } + + @Override + public void onIceCandidatesRemoved(IceCandidate[] candidates) { + Log.v(TAG, "Some ice candidates removed with peer " + + connection.getMember().getUserID()); + } + + @Override + public void onIceConnected() { + Log.v(TAG, "Ice connected with peer " + + connection.getMember().getUserID()); + } + + @Override + public void onIceDisconnected() { + Log.v(TAG, "Ice disconnected from peer " + + connection.getMember().getUserID()); + } + + @Override + public void onConnected() { + Log.v(TAG, "Connected to peer " + + connection.getMember().getUserID()); + } + + @Override + public void onDisconnected() { + Log.v(TAG, "Disconnected from peer " + + connection.getMember().getUserID()); + } + + @Override + public void onPeerConnectionClosed() { + Log.v(TAG, "Connection close from user " + + connection.getMember().getUserID()); + runOnUiThread(() -> disconnectFromPeer(connection.getMember())); + } + + @Override + public void onPeerConnectionStatsReady(StatsReport[] reports) { + Log.v(TAG, "Stats ready for peer connection with " + + connection.getMember().getUserID()); + } + + @Override + public void onPeerConnectionError(String description) { + Log.e(TAG, "Peer connection error with " + + connection.getMember().getUserID() + " " + description); + } + } + + /** + * Refresh call information thread + */ + private class RefreshCallInformation extends Thread { + + private final Object o = new Object(); + private boolean stop = false; + + @Override + public void run() { + super.run(); + + synchronized (o){ + + while(!stop) { + + runOnUiThread(CallActivity.this::getCallInformation); + + + try { + o.wait((long) (1.5 * 1000)); + } catch (InterruptedException e) { + e.printStackTrace(); + } + + } + + } + } + + public void interrupt(){ + stop = true; + } + } + + /** + * I don't know why, but this is an absolute requirement ! (to show videos) + */ + private static class ProxyVideoSink implements VideoSink { + private VideoSink target; + + @Override + synchronized public void onFrame(VideoFrame frame) { + if (target == null) { + Logging.d(TAG, "Dropping frame in proxy because target is null."); + return; + } + + target.onFrame(frame); + } + + synchronized public void setTarget(VideoSink target) { + this.target = target; + } } } diff --git a/app/src/main/java/org/communiquons/android/comunic/client/ui/activities/MainActivity.java b/app/src/main/java/org/communiquons/android/comunic/client/ui/activities/MainActivity.java index e1b8a7b..cb82916 100644 --- a/app/src/main/java/org/communiquons/android/comunic/client/ui/activities/MainActivity.java +++ b/app/src/main/java/org/communiquons/android/comunic/client/ui/activities/MainActivity.java @@ -25,7 +25,7 @@ import android.widget.Toast; import org.communiquons.android.comunic.client.BuildConfig; import org.communiquons.android.comunic.client.R; -import org.communiquons.android.comunic.client.crashreporter.CrashReporter; +import org.communiquons.crashreporter.CrashReporter; import org.communiquons.android.comunic.client.data.enums.VirtualDirectoryType; import org.communiquons.android.comunic.client.data.helpers.APIRequestHelper; import org.communiquons.android.comunic.client.data.helpers.AccountHelper; diff --git a/app/src/main/java/org/communiquons/android/comunic/client/ui/arrays/CallPeersConnectionsList.java b/app/src/main/java/org/communiquons/android/comunic/client/ui/arrays/CallPeersConnectionsList.java new file mode 100644 index 0000000..dc74ddf --- /dev/null +++ b/app/src/main/java/org/communiquons/android/comunic/client/ui/arrays/CallPeersConnectionsList.java @@ -0,0 +1,48 @@ +package org.communiquons.android.comunic.client.ui.arrays; + +import android.support.annotation.Nullable; + +import org.communiquons.android.comunic.client.data.models.CallMember; +import org.communiquons.android.comunic.client.ui.models.CallPeerConnection; + +import java.util.ArrayList; + +/** + * List of clients connections + * + * @author Pierre HUBERT + */ +public class CallPeersConnectionsList extends ArrayList { + + /** + * Find the connection matching a specific call member + * + * + * @param member Information about the target member + * @return Full client connection + */ + @Nullable + public CallPeerConnection find(CallMember member){ + for(CallPeerConnection connection : this) + if(connection.getMember().getUserID() == member.getUserID()) + return connection; + + return null; + } + + + /** + * Try to find a peer connection using call ID + * + * @param id The ID of the user call ID + * @return Information about the peer connection / null object in case of failure + */ + @Nullable + public CallPeerConnection findByCallID(String id){ + for(CallPeerConnection connection : this) + if(connection.getMember().getUserCallID().equals(id)) + return connection; + + return null; + } +} diff --git a/app/src/main/java/org/communiquons/android/comunic/client/ui/asynctasks/GetCallInformationTask.java b/app/src/main/java/org/communiquons/android/comunic/client/ui/asynctasks/GetCallInformationTask.java new file mode 100644 index 0000000..afab4ce --- /dev/null +++ b/app/src/main/java/org/communiquons/android/comunic/client/ui/asynctasks/GetCallInformationTask.java @@ -0,0 +1,32 @@ +package org.communiquons.android.comunic.client.ui.asynctasks; + +import android.content.Context; + +import org.communiquons.android.comunic.client.data.helpers.CallsHelper; +import org.communiquons.android.comunic.client.data.models.CallInformation; + +/** + * Task to get information about a call + * + * @author Pierre HUBERT + */ +public class GetCallInformationTask extends SafeAsyncTask { + + public GetCallInformationTask(Context context) { + super(context); + } + + @Override + protected CallInformation doInBackground(Integer... integers) { + + CallsHelper callsHelper = new CallsHelper(getContext()); + + CallInformation callInformation = callsHelper.getInfo(integers[0]); + + //Try to get call name + if(callInformation == null || callsHelper.getCallName(callInformation) == null) + return null; + + return callInformation; + } +} diff --git a/app/src/main/java/org/communiquons/android/comunic/client/ui/models/CallPeerConnection.java b/app/src/main/java/org/communiquons/android/comunic/client/ui/models/CallPeerConnection.java new file mode 100644 index 0000000..41dd794 --- /dev/null +++ b/app/src/main/java/org/communiquons/android/comunic/client/ui/models/CallPeerConnection.java @@ -0,0 +1,70 @@ +package org.communiquons.android.comunic.client.ui.models; + +import org.appspot.apprtc.PeerConnectionClient; +import org.communiquons.android.comunic.client.data.models.CallMember; +import org.webrtc.SurfaceViewRenderer; +import org.webrtc.VideoSink; + +import java.util.ArrayList; + +/** + * Single remote connection information + * + * @author Pierre HUBERT + */ +public class CallPeerConnection { + + //Private fields + private CallMember member; + private PeerConnectionClient peerConnectionClient; + private ArrayList remoteSinks = new ArrayList<>(); + + //Views + private SurfaceViewRenderer mLocalVideoView; + private SurfaceViewRenderer mRemoteViewView; + + public CallPeerConnection(CallMember member) { + this.member = member; + } + + public CallMember getMember() { + return member; + } + + public void setMember(CallMember member) { + this.member = member; + } + + public PeerConnectionClient getPeerConnectionClient() { + return peerConnectionClient; + } + + public void setPeerConnectionClient(PeerConnectionClient peerConnectionClient) { + this.peerConnectionClient = peerConnectionClient; + } + + public ArrayList getRemoteSinks() { + return remoteSinks; + } + + public void setRemoteSinks(ArrayList remoteSinks) { + this.remoteSinks = remoteSinks; + } + + public SurfaceViewRenderer getRemoteViewView() { + return mRemoteViewView; + } + + public void setRemoteViewView(SurfaceViewRenderer mRemoteViewView) { + this.mRemoteViewView = mRemoteViewView; + } + + public SurfaceViewRenderer getLocalVideoView() { + + return mLocalVideoView; + } + + public void setLocalVideoView(SurfaceViewRenderer mLocalVideoView) { + this.mLocalVideoView = mLocalVideoView; + } +} diff --git a/app/src/main/java/org/communiquons/android/comunic/client/crashreporter/CrashReporter.java b/app/src/main/java/org/communiquons/crashreporter/CrashReporter.java similarity index 99% rename from app/src/main/java/org/communiquons/android/comunic/client/crashreporter/CrashReporter.java rename to app/src/main/java/org/communiquons/crashreporter/CrashReporter.java index 6e5bc1e..00960f8 100644 --- a/app/src/main/java/org/communiquons/android/comunic/client/crashreporter/CrashReporter.java +++ b/app/src/main/java/org/communiquons/crashreporter/CrashReporter.java @@ -1,4 +1,4 @@ -package org.communiquons.android.comunic.client.crashreporter; +package org.communiquons.crashreporter; import android.content.Context; import android.os.AsyncTask; diff --git a/app/src/main/java/org/communiquons/signalexchangerclient/ClientRequest.java b/app/src/main/java/org/communiquons/signalexchangerclient/ClientRequest.java new file mode 100644 index 0000000..7d74029 --- /dev/null +++ b/app/src/main/java/org/communiquons/signalexchangerclient/ClientRequest.java @@ -0,0 +1,84 @@ +package org.communiquons.signalexchangerclient; + +import org.json.JSONException; +import org.json.JSONObject; + +/** + * Signal exchanger client request + * + * @author Pierre HUBERT + */ +class ClientRequest { + + /** + * Contains request information + */ + private JSONObject mList; + + /** + * Initialize object + */ + ClientRequest(){ + this.mList = new JSONObject(); + } + + /** + * Add a string to the request + * + * @param name The name of the string to add + * @param value The value of the string to add + * @return This object to help to concatenate requests + */ + ClientRequest addString(String name, String value){ + try { + mList.put(name, value); + } catch (JSONException e) { + e.printStackTrace(); + throw new RuntimeException("Could not add a string to a JSON object!"); + } + + return this; + } + + /** + * Add a boolean to the request + * + * @param name The name of the string to add + * @param value Boolean value + * @return This object + */ + ClientRequest addBoolean(String name, boolean value){ + try { + mList.put(name, value); + } catch (JSONException e) { + e.printStackTrace(); + } + + return this; + } + + /** + * Add a JSON object to the request + * + * @param name The name of the field to add + * @param value The object + * @return This object + */ + ClientRequest addJSONObject(String name, JSONObject value){ + try { + mList.put(name, value); + } catch (JSONException e) { + e.printStackTrace(); + } + return this; + } + + /** + * Get resulting JSON object + * + * @return Get the resulting JSON object + */ + JSONObject get(){ + return mList; + } +} diff --git a/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerCallback.java b/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerCallback.java new file mode 100644 index 0000000..984b66d --- /dev/null +++ b/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerCallback.java @@ -0,0 +1,76 @@ +package org.communiquons.signalexchangerclient; + +import android.support.annotation.Nullable; + +import org.webrtc.IceCandidate; +import org.webrtc.SessionDescription; + +/** + * This interface should be implemented by the classes + * that makes use of the {@link SignalExchangerClient} + * in order to get updated about new information + * availability + * + * @author Pierre HUBERT + */ +public interface SignalExchangerCallback { + + /** + * Method called when an error occur + * + * @param msg Message associated to the error + * @param t Optional associated throwable + */ + void onSignalServerError(String msg, @Nullable Throwable t); + + /** + * Method called once we are connected to the server + */ + void onConnectedToSignalingServer(); + + /** + * Method called on ready message callback + * + * @param target_id The ID of the target + * @param number_targets The number of peers who received the message + */ + void onReadyMessageCallback(String target_id, int number_targets); + + /** + * Method called when this client receive a new ready message signal + * + * @param source_id The source of the message + */ + void onReadyMessage(String source_id); + + /** + * Method called when the client received a signal + * + * @param source_id The source of the signal + * @param signal The signal + */ + void onSignal(String source_id, String signal); + + /** + * Send signals callback + * + * @param number_targets The number of targets for the signal + */ + void onSendSignalCallback(int number_targets); + + /** + * This method is called once we received a remote Ice Candidate + * + * @param source_id The source of the signal + * @param iceCandidate The candidate itself + */ + void gotRemoteIceCandidate(String source_id, IceCandidate iceCandidate); + + /** + * This method is called when we have got a new remote session description + * + * @param source_id The source of the signal + * @param sessionDescription The session description + */ + void gotRemoteSessionDescription(String source_id, SessionDescription sessionDescription); +} diff --git a/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerClient.java b/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerClient.java new file mode 100644 index 0000000..a435284 --- /dev/null +++ b/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerClient.java @@ -0,0 +1,333 @@ +package org.communiquons.signalexchangerclient; + +import android.support.annotation.NonNull; +import android.support.annotation.Nullable; +import android.util.Log; + +import org.json.JSONException; +import org.json.JSONObject; +import org.webrtc.IceCandidate; +import org.webrtc.SessionDescription; + +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; +import okhttp3.WebSocket; +import okhttp3.WebSocketListener; + +/** + * Signal exchanger client + * + * @author Pierre HUBERT + */ +public class SignalExchangerClient extends WebSocketListener { + + /** + * Debug log + */ + private static final String TAG = SignalExchangerClient.class.getSimpleName(); + + /** + * Instance configuration + */ + private SignalExchangerInitConfig mConfig; + + /** + * Signal exchanger callback + */ + @Nullable + private SignalExchangerCallback mCallback; + + /** + * Http Client + */ + private OkHttpClient mClient; + + /** + * Current WebSocket connection + */ + private WebSocket mWebSocket; + + /** + * Initialize a SignalExchanger client + * + * @param config Configuration of the client + * @param cb Callback function to call when we got information update + */ + public SignalExchangerClient(@NonNull SignalExchangerInitConfig config, + @Nullable SignalExchangerCallback cb){ + + //Save configuration + this.mConfig = config; + this.mCallback = cb; + + //Connect to the WebSocket + String url = (config.isSecure() ? "wss" : "ws") + + "://" + config.getDomain() + ":" + config.getPort() + "/socket"; + + mClient = new OkHttpClient(); + Request request = new Request.Builder().url(url).build(); + + mWebSocket = mClient.newWebSocket(request, this); + } + + /** + * Get current client configuration + * + * @return Configuration of the client + */ + public SignalExchangerInitConfig getConfig() { + return mConfig; + } + + /** + * Set the callback to use on new updates + * + * @param mCallback Callback to use + */ + public void setCallback(@Nullable SignalExchangerCallback mCallback) { + this.mCallback = mCallback; + } + + /** + * Check out whether the current client is connected to a server or not + * + * @return true if the client is connected to a server / false else + */ + public boolean isConnected(){ + return mWebSocket != null; + } + + /** + * Send ready message to a client + * + * @param target_client_id The ID of the target client + */ + public void sendReadyMessage(String target_client_id){ + sendData(new ClientRequest() + .addBoolean("ready_msg", true) + .addString("target_id", target_client_id)); + } + + /** + * Send a signal to a target + * + * @param target_id The ID of the target + * @param signal The signal to send + */ + public void sendSignal(String target_id, String signal){ + sendData(new ClientRequest() + .addString("target_id", target_id) + .addString("signal", signal)); + } + + /** + * Send a session description to a target + * + * @param target_id The ID of the target + * @param description The description + */ + public void sendSessionDescription(String target_id, SessionDescription description){ + try { + JSONObject object = new JSONObject(); + object.put("type", description.type.canonicalForm()); + object.put("sdp", description.description); + sendSignal(target_id, object.toString()); + + } catch (JSONException e) { + e.printStackTrace(); + } + + } + + /** + * Send an Ice Candidate to a remote peer + * + * @param target_id The ID of the target + * @param candidate The candidate to send + */ + public void sendIceCandidate(String target_id, IceCandidate candidate){ + try { + JSONObject candidateObj = new JSONObject(); + candidateObj.put("sdpMid", candidate.sdpMid); + candidateObj.put("sdpMLineIndex", candidate.sdpMLineIndex); + candidateObj.put("candidate", candidate.sdp); + + JSONObject object = new JSONObject(); + object.put("candidate", candidateObj); + sendSignal(target_id, object.toString()); + + + } catch (JSONException e) { + e.printStackTrace(); + } + } + + /** + * Send data to the server + * + * @param request The data to send to the server + */ + private void sendData(@NonNull ClientRequest request){ + + //Continues only in case of active connection + if(!isConnected()) { + return; + } + + //Send data to the server + Log.v(TAG, "Sending " + request.get().toString()); + mWebSocket.send(request.get().toString()); + } + + /** + * Invoked when a web socket has been accepted by the remote peer and may begin transmitting + * messages. + */ + public void onOpen(WebSocket webSocket, Response response) { + + //Save WebSocket object + this.mWebSocket = webSocket; + + //Send the ID of current client to the server + sendData(new ClientRequest() + .addString("client_id", mConfig.getClientID())); + + //Inform we are connected + if(mCallback != null) + mCallback.onConnectedToSignalingServer(); + + } + + /** Invoked when a text (type {@code 0x1}) message has been received. */ + @Override + public void onMessage(WebSocket webSocket, String text) { + Log.v(TAG, "Received new message from server: " + text); + + //Decode message + try { + JSONObject message = new JSONObject(text); + + //Ready message callback + if(message.has("ready_message_sent")){ + + if(mCallback != null) + mCallback.onReadyMessageCallback( + message.getString("target_id"), + message.getInt("number_of_targets") + ); + + } + + //Ready message + else if(message.has("ready_msg")){ + + if(mCallback != null) + mCallback.onReadyMessage( + message.getString("source_id") + ); + + } + + //Signal + else if(message.has("signal")) { + + if(mCallback != null) + mCallback.onSignal( + message.getString("source_id"), + message.getString("signal") + ); + + processReceivedSignal(message.getString("source_id"), + message.getString("signal")); + } + + //Send signal callback + else if(message.has("signal_sent")){ + + if(mCallback != null) + mCallback.onSendSignalCallback( + message.getInt("number_of_targets") + ); + + } + + //Success message + else if(message.has("success")) + Log.v(TAG, "Success: " + message.getString("success")); + + //Unrecognized message + else + Log.e(TAG, "Message from server not understood!"); + + } catch (JSONException e) { + e.printStackTrace(); + + if(mCallback != null) + mCallback.onSignalServerError("Could not parse response from server!", e); + } + } + + /** + * Process a received signal + * + * @param source_id The source of the signal + * @param signal The signal to process + */ + private void processReceivedSignal(String source_id, String signal) throws JSONException { + + JSONObject object = new JSONObject(signal); + + //Ice candidate + if(object.has("candidate")) { + + JSONObject candidate = object.getJSONObject("candidate"); + + if (mCallback != null) + mCallback.gotRemoteIceCandidate( + source_id, new IceCandidate( + candidate.getString("sdpMid"), + candidate.getInt("sdpMLineIndex"), + candidate.getString("candidate") + ) + ); + + } + + //Sdp signal + else if(object.has("sdp") && object.has("type")){ + + SessionDescription.Type type = SessionDescription.Type.fromCanonicalForm( + object.getString("type")); + String sdp = object.getString("sdp"); + + if(mCallback != null) + mCallback.gotRemoteSessionDescription(source_id, + new SessionDescription(type, sdp)); + + } + + else + Log.e(TAG, "Could not understand received signal!"); + + } + + /** + * Invoked when both peers have indicated that no more messages will be transmitted and the + * connection has been successfully released. No further calls to this listener will be made. + */ + public void onClosed(WebSocket webSocket, int code, String reason) { + mWebSocket = null; + } + + /** + * Invoked when a web socket has been closed due to an error reading from or writing to the + * network. Both outgoing and incoming messages may have been lost. No further calls to this + * listener will be made. + */ + public void onFailure(WebSocket webSocket, Throwable t, @Nullable Response response) { + + if(mCallback != null) + mCallback.onSignalServerError(t.getMessage(), t); + } +} diff --git a/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerInitConfig.java b/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerInitConfig.java new file mode 100644 index 0000000..328808d --- /dev/null +++ b/app/src/main/java/org/communiquons/signalexchangerclient/SignalExchangerInitConfig.java @@ -0,0 +1,58 @@ +package org.communiquons.signalexchangerclient; + +/** + * Signal exchanger configuration intialization + * + * @author Pierre HUBERT + */ +public class SignalExchangerInitConfig { + + //Private fields + private String domain; + private int port; + private String clientID; + private boolean isSecure; + + public SignalExchangerInitConfig() { + + } + + public SignalExchangerInitConfig(String domain, int port, String clientID, boolean isSecure) { + this.domain = domain; + this.port = port; + this.clientID = clientID; + this.isSecure = isSecure; + } + + public String getDomain() { + return domain; + } + + public void setDomain(String domain) { + this.domain = domain; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + public String getClientID() { + return clientID; + } + + public void setClientID(String clientID) { + this.clientID = clientID; + } + + public boolean isSecure() { + return isSecure; + } + + public void setSecure(boolean secure) { + isSecure = secure; + } +} diff --git a/app/src/main/res/layout/activity_call.xml b/app/src/main/res/layout/activity_call.xml index a409a54..c8271f9 100644 --- a/app/src/main/res/layout/activity_call.xml +++ b/app/src/main/res/layout/activity_call.xml @@ -6,17 +6,18 @@ android:layout_height="match_parent" tools:context=".ui.activities.CallActivity"> - + \ No newline at end of file diff --git a/app/src/main/res/values-fr/strings.xml b/app/src/main/res/values-fr/strings.xml index 6613d36..c90c8b5 100644 --- a/app/src/main/res/values-fr/strings.xml +++ b/app/src/main/res/values-fr/strings.xml @@ -329,4 +329,7 @@ Répondre Appel entrant Impossible de récupérer les informations de l\'appel en cours ! + Impossible de récupérer les infoamtions sur l\'appel ! + Impossible de connecter au signaling server ! + Appel terminé. \ No newline at end of file diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml index a0eea37..f62966f 100644 --- a/app/src/main/res/values/strings.xml +++ b/app/src/main/res/values/strings.xml @@ -328,4 +328,7 @@ Accept call Incoming call Could not get pending call information! + Could not get call information! + Could not connect to signaling server! + Call terminated