diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index e70a2d3..d3561c1 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -13,6 +13,7 @@
+
diff --git a/app/src/main/java/com/example/intra/CallScreen.kt b/app/src/main/java/com/example/intra/CallScreen.kt
index b59a688..7891fcf 100644
--- a/app/src/main/java/com/example/intra/CallScreen.kt
+++ b/app/src/main/java/com/example/intra/CallScreen.kt
@@ -33,7 +33,12 @@ import coil.compose.AsyncImage
import coil.request.ImageRequest
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.platform.LocalContext
-
+import androidx.compose.ui.viewinterop.AndroidView
+import androidx.compose.material.icons.filled.Videocam
+import androidx.compose.material.icons.filled.VideocamOff
+import androidx.compose.material.icons.filled.Cameraswitch
+import org.webrtc.SurfaceViewRenderer
+import org.webrtc.RendererCommon
@Composable
fun CallScreen(
@@ -42,11 +47,14 @@ fun CallScreen(
onRejectCall: () -> Unit,
onAcceptCall: () -> Unit,
onToggleMute: () -> Unit,
- onToggleSpeaker: () -> Unit
+ onToggleSpeaker: () -> Unit,
+ onToggleVideo: () -> Unit,
+ onSwitchCamera: () -> Unit,
+ webRTCClient: WebRTCClient? = null
) {
// 🔥 TIMER STATE - Call duration track karne ke liye
var callSeconds by remember(state.status) { mutableStateOf(0) }
-
+ val context = LocalContext.current
// 🔥 TIMER LOGIC - Connected hone pe start, status change pe auto stop
LaunchedEffect(state.status) {
@@ -73,6 +81,26 @@ fun CallScreen(
),
contentAlignment = Alignment.Center
) {
+ // --- VIDEO CALL BACKGROUND ---
+ if (state.isVideoCall && state.status == CallStatus.CONNECTED) {
+ AndroidView(
+ factory = { ctx ->
+ SurfaceViewRenderer(ctx).apply {
+ if (webRTCClient != null) {
+ init(webRTCClient.eglBaseContext, null)
+ setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL)
+ webRTCClient.setupRemoteVideoRenderer(this)
+ }
+ }
+ },
+ modifier = Modifier.fillMaxSize(),
+ onRelease = { renderer ->
+ webRTCClient?.removeRemoteVideoRenderer(renderer)
+ renderer.release()
+ }
+ )
+ }
+
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.SpaceBetween,
@@ -82,63 +110,119 @@ fun CallScreen(
) {
// --- TOP SECTION: Avatar & Status (Same) ---
Column(horizontalAlignment = Alignment.CenterHorizontally) {
- Box(
- modifier = Modifier.size(170.dp),
- contentAlignment = Alignment.Center
- ) {
- AvatarGlowRing(
- isActive = state.status != CallStatus.CONNECTED
- )
+ if (!state.isVideoCall || state.status != CallStatus.CONNECTED) {
+ AvatarGlowRing(
+ isActive = state.status != CallStatus.CONNECTED
+ )
- Box(
- modifier = Modifier
- .size(120.dp)
- .clip(CircleShape)
- .background(Color.White.copy(alpha = 0.2f)),
- contentAlignment = Alignment.Center
- ) {
- if (state.profilePhotoUrl != null) {
- AsyncImage(
- model = state.profilePhotoUrl,
- contentDescription = null,
- modifier = Modifier.fillMaxSize()
- )
- } else {
- Icon(
- imageVector = Icons.Default.Person,
- contentDescription = null,
- tint = Color.White,
- modifier = Modifier.size(60.dp)
- )
+ Box(
+ modifier = Modifier
+ .size(120.dp)
+ .clip(CircleShape)
+ .background(Color.White.copy(alpha = 0.2f)),
+ contentAlignment = Alignment.Center
+ ) {
+ if (state.profilePhotoUrl != null) {
+ AsyncImage(
+ model = state.profilePhotoUrl,
+ contentDescription = null,
+ modifier = Modifier.fillMaxSize()
+ )
+ } else {
+ Icon(
+ imageVector = Icons.Default.Person,
+ contentDescription = null,
+ tint = Color.White,
+ modifier = Modifier.size(60.dp)
+ )
+ }
}
- }
}
- Spacer(modifier = Modifier.height(24.dp))
- Text(
- text = state.targetUser,
- color = Color.White,
- fontSize = 30.sp,
- fontWeight = FontWeight.Bold
- )
- Spacer(modifier = Modifier.height(8.dp))
-
- AnimatedContent(
- targetState = state.status,
- transitionSpec = { fadeIn() togetherWith fadeOut() },
- label = "callStatus"
- ) { status ->
+ if (!state.isVideoCall || state.status != CallStatus.CONNECTED) {
+ Spacer(modifier = Modifier.height(24.dp))
Text(
- text = when (status) {
- CallStatus.OUTGOING -> "Calling..."
- CallStatus.INCOMING -> "Incoming Call..."
- CallStatus.CONNECTED -> formatTime(callSeconds)
- else -> ""
- },
- color = Color.White.copy(alpha = 0.7f),
- fontSize = 18.sp
+ text = state.targetUser,
+ color = Color.White,
+ fontSize = 30.sp,
+ fontWeight = FontWeight.Bold
)
+ Spacer(modifier = Modifier.height(8.dp))
+
+ AnimatedContent(
+ targetState = state.status,
+ transitionSpec = { fadeIn() togetherWith fadeOut() },
+ label = "callStatus"
+ ) { status ->
+ Text(
+ text = when (status) {
+ CallStatus.OUTGOING -> "Calling..."
+ CallStatus.INCOMING -> "Incoming Call..."
+ CallStatus.CONNECTED -> formatTime(callSeconds)
+ else -> ""
+ },
+ color = Color.White.copy(alpha = 0.7f),
+ fontSize = 18.sp
+ )
+ }
+ } else {
+ // Transparent overlay texts for Video Call
+ Column(
+ modifier = Modifier.fillMaxWidth().padding(16.dp),
+ horizontalAlignment = Alignment.CenterHorizontally
+ ) {
+ Text(
+ text = state.targetUser,
+ color = Color.White,
+ fontSize = 24.sp,
+ fontWeight = FontWeight.Bold
+ )
+ Text(
+ text = formatTime(callSeconds),
+ color = Color.White,
+ fontSize = 16.sp
+ )
+ }
+ }
+ }
+
+ // --- LOCAL VIDEO (PiP) ---
+ if (state.isVideoCall && state.status == CallStatus.CONNECTED && state.isVideoEnabled) {
+ Box(
+ modifier = Modifier
+ .fillMaxWidth()
+ .weight(1f)
+ .padding(bottom = 32.dp, end = 16.dp),
+ contentAlignment = Alignment.BottomEnd
+ ) {
+ Box(
+ modifier = Modifier
+ .width(100.dp)
+ .height(150.dp)
+ .clip(androidx.compose.foundation.shape.RoundedCornerShape(12.dp))
+ .background(Color.Black)
+ ) {
+ AndroidView(
+ factory = { ctx ->
+ SurfaceViewRenderer(ctx).apply {
+ if (webRTCClient != null) {
+ init(webRTCClient.eglBaseContext, null)
+ setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL)
+ setZOrderMediaOverlay(true)
+ webRTCClient.setupLocalVideoRenderer(this)
+ }
+ }
+ },
+ modifier = Modifier.fillMaxSize(),
+ onRelease = { renderer ->
+ webRTCClient?.removeLocalVideoRenderer(renderer)
+ renderer.release()
+ }
+ )
+ }
}
+ } else {
+ Spacer(modifier = Modifier.weight(1f))
}
// --- BOTTOM SECTION: Buttons ---
@@ -176,6 +260,22 @@ fun CallScreen(
onClick = onToggleMute
)
+ if (state.isVideoCall) {
+ CallActionButton(
+ icon = if (state.isVideoEnabled) Icons.Default.Videocam else Icons.Default.VideocamOff,
+ color = if (state.isVideoEnabled) Color.White else Color.White.copy(alpha = 0.2f),
+ iconTint = if (state.isVideoEnabled) Color.Black else Color.White,
+ onClick = onToggleVideo
+ )
+
+ CallActionButton(
+ icon = Icons.Default.Cameraswitch,
+ color = Color.White.copy(alpha = 0.2f),
+ iconTint = Color.White,
+ onClick = onSwitchCamera
+ )
+ }
+
// End Call Button (Same for Connected/Outgoing)
CallActionButton(
icon = Icons.Default.CallEnd,
@@ -184,12 +284,14 @@ fun CallScreen(
onClick = onEndCall
)
- CallActionButton(
- icon = if (state.isSpeakerOn) Icons.Default.VolumeUp else Icons.Default.VolumeOff,
- color = if (state.isSpeakerOn) Color.White else Color.White.copy(alpha = 0.2f),
- iconTint = if (state.isSpeakerOn) Color.Black else Color.White,
- onClick = onToggleSpeaker
- )
+ if (!state.isVideoCall) {
+ CallActionButton(
+ icon = if (state.isSpeakerOn) Icons.Default.VolumeUp else Icons.Default.VolumeOff,
+ color = if (state.isSpeakerOn) Color.White else Color.White.copy(alpha = 0.2f),
+ iconTint = if (state.isSpeakerOn) Color.Black else Color.White,
+ onClick = onToggleSpeaker
+ )
+ }
}
}
}
diff --git a/app/src/main/java/com/example/intra/CallState.kt b/app/src/main/java/com/example/intra/CallState.kt
index 2909abd..603923b 100644
--- a/app/src/main/java/com/example/intra/CallState.kt
+++ b/app/src/main/java/com/example/intra/CallState.kt
@@ -13,5 +13,8 @@ data class CallState(
val targetUser: String = "", // Kisse baat ho rahi hai
val profilePhotoUrl: String? = null, // ✅ ADD THIS
val isMuted: Boolean = false,
- val isSpeakerOn: Boolean = true
+ val isSpeakerOn: Boolean = true,
+ val isVideoCall: Boolean = false, // Video call indicator
+ val isVideoEnabled: Boolean = true, // Track if camera is ON/OFF
+ val isFrontCamera: Boolean = true // Track which camera is active
)
\ No newline at end of file
diff --git a/app/src/main/java/com/example/intra/CallViewModel.kt b/app/src/main/java/com/example/intra/CallViewModel.kt
index 5b9cbc8..ce2b989 100644
--- a/app/src/main/java/com/example/intra/CallViewModel.kt
+++ b/app/src/main/java/com/example/intra/CallViewModel.kt
@@ -20,25 +20,28 @@ class CallViewModel : ViewModel() {
// --- Actions ---
- fun onIncomingCall(sender: String, profilePhotoUrl: String? = null) {
+ fun onIncomingCall(sender: String, profilePhotoUrl: String? = null, isVideoCall: Boolean = false) {
if (callActive) return
callActive = true
isRinging.value = true
callState.value = CallState(
status = CallStatus.INCOMING,
targetUser = sender,
- profilePhotoUrl = profilePhotoUrl
+ profilePhotoUrl = profilePhotoUrl,
+ isVideoCall = isVideoCall,
+ isSpeakerOn = isVideoCall // Video call mein by default speaker ON hota hai
)
isRinging.value = true // 🔔 Start Ringing
}
- fun onStartOutgoingCall(target: String, profilePhotoUrl: String? = null) {
+ fun onStartOutgoingCall(target: String, profilePhotoUrl: String? = null, isVideoCall: Boolean = false) {
callActive = true
callState.value = CallState(
status = CallStatus.OUTGOING,
targetUser = target,
profilePhotoUrl = profilePhotoUrl,
- isSpeakerOn = true
+ isSpeakerOn = true, // By default keeping it true, WebRTCClient handles it
+ isVideoCall = isVideoCall
)
// Outgoing me ringtone nahi bajti, tone bajti hai (wo baad me dekhenge)
}
@@ -66,4 +69,12 @@ class CallViewModel : ViewModel() {
fun updateSpeakerState(isSpeakerOn: Boolean) {
callState.value = callState.value.copy(isSpeakerOn = isSpeakerOn)
}
+
+ fun updateVideoState(isVideoEnabled: Boolean) {
+ callState.value = callState.value.copy(isVideoEnabled = isVideoEnabled)
+ }
+
+ fun switchCamera(isFrontCamera: Boolean) {
+ callState.value = callState.value.copy(isFrontCamera = isFrontCamera)
+ }
}
\ No newline at end of file
diff --git a/app/src/main/java/com/example/intra/ChatScreen.kt b/app/src/main/java/com/example/intra/ChatScreen.kt
index 994be78..51b2b70 100644
--- a/app/src/main/java/com/example/intra/ChatScreen.kt
+++ b/app/src/main/java/com/example/intra/ChatScreen.kt
@@ -17,6 +17,7 @@ import androidx.compose.material.icons.filled.Call
import androidx.compose.material.icons.filled.Group
import androidx.compose.material.icons.filled.Person
import androidx.compose.material.icons.filled.SmartToy
+import androidx.compose.material.icons.filled.Videocam
import androidx.compose.material3.ExperimentalMaterial3Api
import androidx.compose.material3.Icon
import androidx.compose.material3.IconButton
@@ -66,7 +67,7 @@ fun ChatScreen(
receiverPhotoUrl: String? = null,
onAttachClick: () -> Unit,
onBackClick: () -> Unit,
- onStartCall: () -> Unit,
+ onStartCall: (isVideo: Boolean) -> Unit,
) {
val listState = rememberLazyListState()
var videoUrlToPlay by remember { mutableStateOf(null) }
@@ -213,10 +214,16 @@ fun ChatScreen(
}
},
actions = {
- IconButton(onClick = onStartCall) {
+ IconButton(onClick = { onStartCall(true) }) {
+ Icon(
+ imageVector = Icons.Default.Videocam,
+ contentDescription = "Video Call"
+ )
+ }
+ IconButton(onClick = { onStartCall(false) }) {
Icon(
imageVector = Icons.Default.Call,
- contentDescription = "Call"
+ contentDescription = "Audio Call"
)
}
}
diff --git a/app/src/main/java/com/example/intra/ChatViewModel.kt b/app/src/main/java/com/example/intra/ChatViewModel.kt
index 2b286b1..5e68359 100644
--- a/app/src/main/java/com/example/intra/ChatViewModel.kt
+++ b/app/src/main/java/com/example/intra/ChatViewModel.kt
@@ -339,13 +339,14 @@ class ChatViewModel(
// 📞 CALL REQUEST (Outgoing)
// ===============================
- fun sendCallRequest(receiver: String) {
+ fun sendCallRequest(receiver: String, isVideoCall: Boolean = false) {
val myPhoto = settingsManager.getMyPhoto()
val json = JSONObject().apply {
put("type", "call_request")
put("sender", currentUsername)
put("receiver", receiver)
put("profile_photo", myPhoto)
+ put("is_video_call", isVideoCall)
}
WsManager.send(json.toString())
}
diff --git a/app/src/main/java/com/example/intra/MainActivity.kt b/app/src/main/java/com/example/intra/MainActivity.kt
index 1f85ec9..c2a5928 100644
--- a/app/src/main/java/com/example/intra/MainActivity.kt
+++ b/app/src/main/java/com/example/intra/MainActivity.kt
@@ -121,6 +121,7 @@ class MainActivity : ComponentActivity() {
} else intentPhoto
Log.d("MAIN", "📞 Opening call screen for: $intentSender with photo: $fullPhotoUrl")
+ // Note: For background intent handling we default to audio right now unless we extract it from intent
callViewModel.onIncomingCall(intentSender, fullPhotoUrl)
if (MyApplication.AppState.pendingCallOffer != null) {
@@ -172,11 +173,12 @@ class MainActivity : ComponentActivity() {
"call_request" -> {
val sender = json.optString("sender")
val rawPhoto = json.optString("profile_photo")
+ val isVideoCall = json.optBoolean("is_video_call", false)
val fullPhotoUrl = if (!rawPhoto.isNullOrEmpty() && rawPhoto != "null") {
settingsManager.getBaseUrl().removeSuffix("/") + rawPhoto
} else null
- callViewModel.onIncomingCall(sender, fullPhotoUrl)
+ callViewModel.onIncomingCall(sender, fullPhotoUrl, isVideoCall)
}
"call_rejected", "call_ended" -> {
@@ -285,7 +287,8 @@ class MainActivity : ComponentActivity() {
webRTCClient.answerCall(
callViewModel.callState.value.targetUser,
- offer
+ offer,
+ callViewModel.callState.value.isVideoCall
)
callViewModel.onCallConnected()
proximitySensor.deactivate()
@@ -304,7 +307,21 @@ class MainActivity : ComponentActivity() {
callViewModel.updateSpeakerState(newState)
if (newState) proximitySensor.deactivate()
else proximitySensor.activate()
- }
+ },
+
+ onToggleVideo = {
+ val newVideo = !callViewModel.callState.value.isVideoEnabled
+ webRTCClient.toggleVideo(newVideo)
+ callViewModel.updateVideoState(newVideo)
+ },
+
+ onSwitchCamera = {
+ val isFront = !callViewModel.callState.value.isFrontCamera
+ webRTCClient.switchCamera()
+ callViewModel.switchCamera(isFront)
+ },
+
+ webRTCClient = webRTCClient
)
}
@@ -330,7 +347,7 @@ class MainActivity : ComponentActivity() {
currentChatReceiver = null
},
- onStartCall = {
+ onStartCall = { isVideoCall ->
val target = currentChatReceiver!!
val contact =
contactViewModel.contacts.find { it.username == target }
@@ -341,9 +358,9 @@ class MainActivity : ComponentActivity() {
.removeSuffix("/") + it
}
- chatViewModel.sendCallRequest(target)
- callViewModel.onStartOutgoingCall(target, photo)
- webRTCClient.startCall(target)
+ chatViewModel.sendCallRequest(target, isVideoCall)
+ callViewModel.onStartOutgoingCall(target, photo, isVideoCall)
+ webRTCClient.startCall(target, isVideoCall)
proximitySensor.deactivate()
}
)
@@ -443,6 +460,7 @@ class MainActivity : ComponentActivity() {
// Audio for Calls
permissions.add(android.Manifest.permission.RECORD_AUDIO)
+ permissions.add(android.Manifest.permission.CAMERA) // Video Calls
// Notifications (Android 13+)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
diff --git a/app/src/main/java/com/example/intra/WebRTCClient.kt b/app/src/main/java/com/example/intra/WebRTCClient.kt
index 6c8c8e1..e0ae48c 100644
--- a/app/src/main/java/com/example/intra/WebRTCClient.kt
+++ b/app/src/main/java/com/example/intra/WebRTCClient.kt
@@ -19,10 +19,21 @@ class WebRTCClient(
private var audioSource: AudioSource? = null
private var localAudioTrack: AudioTrack? = null
+ // Video components
+ val eglBaseContext: EglBase.Context by lazy { EglBase.create().eglBaseContext }
+ private var surfaceTextureHelper: SurfaceTextureHelper? = null
+ private var videoCapturer: VideoCapturer? = null
+ private var localVideoSource: VideoSource? = null
+ private var localVideoTrack: VideoTrack? = null
+ private var remoteVideoTrack: VideoTrack? = null
+ private var remoteRendererSaved: SurfaceViewRenderer? = null
+ private var isVideoEnabled = true
+
// Default Speaker State: Hum shuruat Speaker ON se karenge
private var isSpeakerOn = false
private var currentTarget: String = ""
+ private var isVideoCall = false
init {
initWebRTC()
@@ -32,6 +43,7 @@ class WebRTCClient(
// 1. WebRTC Initialization options
val options = PeerConnectionFactory.InitializationOptions.builder(context)
.setEnableInternalTracer(true)
+ .setFieldTrials("WebRTC-H264HighProfile/Enabled/")
.createInitializationOptions()
PeerConnectionFactory.initialize(options)
@@ -42,8 +54,13 @@ class WebRTCClient(
.createAudioDeviceModule()
// 3. Create Factory
+ val videoEncoderFactory = DefaultVideoEncoderFactory(eglBaseContext, true, true)
+ val videoDecoderFactory = DefaultVideoDecoderFactory(eglBaseContext)
+
peerConnectionFactory = PeerConnectionFactory.builder()
.setAudioDeviceModule(audioDeviceModule)
+ .setVideoEncoderFactory(videoEncoderFactory)
+ .setVideoDecoderFactory(videoDecoderFactory)
.setOptions(PeerConnectionFactory.Options().apply {
disableEncryption = false
disableNetworkMonitor = false
@@ -130,12 +147,92 @@ class WebRTCClient(
Log.d("WebRTC", "🎙️ Mic Muted: $shouldMute")
}
+ // ----------------------------------------------------------------
+ // 📹 VIDEO MANAGEMENT
+ // ----------------------------------------------------------------
+
+ private fun createVideoCapturer(): VideoCapturer? {
+ val enumerator = Camera2Enumerator(context)
+ val deviceNames = enumerator.deviceNames
+
+ // Try to find front camera first
+ for (deviceName in deviceNames) {
+ if (enumerator.isFrontFacing(deviceName)) {
+ return enumerator.createCapturer(deviceName, null)
+ }
+ }
+ // Fallback to back camera
+ for (deviceName in deviceNames) {
+ if (enumerator.isBackFacing(deviceName)) {
+ return enumerator.createCapturer(deviceName, null)
+ }
+ }
+ return null
+ }
+
+ fun setupLocalVideoRenderer(localRenderer: SurfaceViewRenderer) {
+ localRenderer.setEnableHardwareScaler(true)
+ localRenderer.setMirror(true) // Mirror for front camera
+ localVideoTrack?.addSink(localRenderer)
+ }
+
+ fun removeLocalVideoRenderer(localRenderer: SurfaceViewRenderer) {
+ localVideoTrack?.removeSink(localRenderer)
+ }
+
+ fun setupRemoteVideoRenderer(remoteRenderer: SurfaceViewRenderer) {
+ remoteRenderer.setEnableHardwareScaler(true)
+ remoteRendererSaved = remoteRenderer
+ remoteVideoTrack?.addSink(remoteRenderer)
+ }
+
+ fun removeRemoteVideoRenderer(remoteRenderer: SurfaceViewRenderer) {
+ remoteVideoTrack?.removeSink(remoteRenderer)
+ if (remoteRendererSaved == remoteRenderer) {
+ remoteRendererSaved = null
+ }
+ }
+
+ private fun startLocalVideo() {
+ if (surfaceTextureHelper == null) {
+ surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext)
+ }
+
+ if (videoCapturer == null) {
+ videoCapturer = createVideoCapturer()
+ }
+
+ if (localVideoSource == null) {
+ localVideoSource = peerConnectionFactory?.createVideoSource(videoCapturer!!.isScreencast)
+ videoCapturer?.initialize(surfaceTextureHelper, context, localVideoSource?.capturerObserver)
+ videoCapturer?.startCapture(1024, 720, 30) // Resolution can be adjusted
+ }
+
+ if (localVideoTrack == null) {
+ localVideoTrack = peerConnectionFactory?.createVideoTrack("video_track_101", localVideoSource)
+ localVideoTrack?.setEnabled(true)
+ }
+ }
+
+ fun toggleVideo(shouldBeOn: Boolean) {
+ if (!isVideoCall) return
+ isVideoEnabled = shouldBeOn
+ localVideoTrack?.setEnabled(isVideoEnabled)
+ Log.d("WebRTC", "📹 Video Track Enabled: $isVideoEnabled")
+ }
+
+ fun switchCamera() {
+ if (!isVideoCall) return
+ (videoCapturer as? CameraVideoCapturer)?.switchCamera(null)
+ }
+
// ----------------------------------------------------------------
// 📞 CALL LOGIC
// ----------------------------------------------------------------
- fun startCall(targetUsername: String) {
+ fun startCall(targetUsername: String, isVideoCall: Boolean = false) {
currentTarget = targetUsername
+ this.isVideoCall = isVideoCall
// 🔥 STEP 0: Set Audio Mode for Call (Fix for J2 Camera & Earpiece Issue)
setupAudioForCall()
@@ -147,6 +244,9 @@ class WebRTCClient(
val constraints = MediaConstraints().apply {
mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"))
+ if (isVideoCall) {
+ mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"))
+ }
}
peerConnection?.createOffer(object : SdpObserver {
@@ -159,6 +259,7 @@ class WebRTCClient(
put("type", "webrtc_offer")
put("sdp", it.description)
put("receiver", targetUsername)
+ put("is_video_call", isVideoCall) // Also let the remote know
}
sendSignal(json.toString())
}
@@ -174,8 +275,9 @@ class WebRTCClient(
}, constraints)
}
- fun answerCall(targetUsername: String, offerSdp: String) {
+ fun answerCall(targetUsername: String, offerSdp: String, isVideoCall: Boolean = false) {
currentTarget = targetUsername
+ this.isVideoCall = isVideoCall
// 🔥 STEP 0: Set Audio Mode for Call (Fix for J2 Camera & Earpiece Issue)
setupAudioForCall()
@@ -190,6 +292,9 @@ class WebRTCClient(
override fun onSetSuccess() {
val constraints = MediaConstraints().apply {
mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"))
+ if (isVideoCall) {
+ mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"))
+ }
}
peerConnection?.createAnswer(object : SdpObserver {
override fun onCreateSuccess(sdp: SessionDescription?) {
@@ -280,6 +385,13 @@ class WebRTCClient(
// ChatGPT sahi tha: Agar hum yahan force karenge, toh
// jab bhi thoda glitch hoga, phone wapas Speaker mode me chala jayega.
// Sirf Track enable karna kaafi hai.
+ } else if (track is VideoTrack) {
+ track.setEnabled(true)
+ remoteVideoTrack = track
+ remoteRendererSaved?.let { renderer ->
+ track.addSink(renderer)
+ }
+ Log.d("WebRTC", "📹 Remote Video Track Added")
}
}
@@ -299,6 +411,11 @@ class WebRTCClient(
localAudioTrack?.setEnabled(true)
val streamId = "local_audio_stream"
peerConnection?.addTrack(localAudioTrack, listOf(streamId))
+
+ if (isVideoCall) {
+ startLocalVideo()
+ peerConnection?.addTrack(localVideoTrack, listOf("local_video_stream"))
+ }
}
// 🔥 FIX 1: Call end karne par signal bhejo
@@ -327,10 +444,28 @@ class WebRTCClient(
audioManager.abandonAudioFocus(null)
localAudioTrack?.setEnabled(false)
+
+ try {
+ videoCapturer?.stopCapture()
+ } catch (e: InterruptedException) {
+ e.printStackTrace()
+ }
+ videoCapturer?.dispose()
+ videoCapturer = null
+
+ localVideoSource?.dispose()
+ localVideoSource = null
+
+ surfaceTextureHelper?.dispose()
+ surfaceTextureHelper = null
+
+ remoteRendererSaved = null
+
peerConnection?.close()
peerConnection = null
currentTarget = ""
+ isVideoCall = false
- Log.d("WebRTC", "❌ Call Ended & Audio Cleaned")
+ Log.d("WebRTC", "❌ Call Ended & Audio/Video Cleaned")
}
}
\ No newline at end of file