
Module | Description |
Core call UI component. Automatically observes CallStore data and renders video streams, with support for UI customization such as layout switching, avatar and icon configuration. | |
Manages the call lifecycle: make, answer, reject, and hang up calls. Provides real-time access to participant audio/video status, call duration, call history, and more. | |
Controls audio/video devices: microphone (toggle/on/off, volume), camera (toggle/on/off, switch, quality), screen sharing, and real-time device status monitoring. |
pod 'AtomicXCore' to your project's Podfile.target 'YourProjectTarget' dopod 'AtomicXCore'end
.xcodeproj directory in the terminal and run pod init to create one.pod install --repo-update
YourProjectName.xcworkspace file.
import UIKitimport AtomicXCoreimport Combineclass ViewController: UIViewController {var cancellables = Set<AnyCancellable>()override func viewDidLoad() {super.viewDidLoad()// Initialize CallStorelet _ = CallStore.shared// Set up user informationlet userID = "test_001" // Replace with your UserIDlet sdkAppID: Int = 1400000001 // Replace with your SDKAppID from the consolelet secretKey = "**************" // Replace with your SecretKey from the console// Generate UserSig (for local testing only; always generate UserSig on your server in production)let userSig = GenerateTestUserSig.genTestUserSig(userID: userID,sdkAppID: sdkAppID,secretKey: secretKey)// Log inLoginStore.shared.login(sdkAppID: sdkAppID,userID: userID,userSig: userSig) { result inswitch result {case .success:Log.info("login success")case .failure(let error):Log.error("login failed, code: \\(error.code), error: \\(error.message)")}}}}
Parameter | Type | Description |
userID | String | Unique identifier for the current user. Only letters, numbers, hyphens, and underscores are allowed. Avoid using simple IDs like 1 or 123 to prevent multi-device login conflicts. |
sdkAppID | int | |
secretKey | String | |
userSig | String | Authentication token for TRTC. Development: Use the local GenerateTestUserSig.genTestUserSig function or the UserSig Tool to generate a temporary UserSig. Production: Always generate UserSig server-side to prevent SecretKey leakage. See Server-side UserSig Generation. For more details, see How to calculate and use UserSig. |
import UIKitimport AtomicXCoreclass CallViewController: UIViewController {override func viewDidLoad() {super.viewDidLoad()view.backgroundColor = .black// Attach CallCoreView to the call screencallCoreView = CallCoreView(frame: view.bounds)callCoreView?.autoresizingMask = [.flexibleWidth, .flexibleHeight]if let callCoreView = callCoreView {view.addSubview(callCoreView)}}}
Feature | Description | Reference |
Set Layout Mode | Switch between layout modes. If not set, layout adapts automatically based on participant count. | Switch Layout Mode |
Set Avatar | Customize avatars for specific users by providing avatar resource paths. | Customize Default Avatar |
Set Volume Indicator Icon | Set custom volume indicator icons for different volume levels. | Customize Volume Indicator Icon |
Set Network Indicator Icon | Set network status indicator icons based on real-time network quality. | Customize Network Indicator Icon |
Set Waiting Animation for Users | Support GIF animations for users in waiting state during multi-party calls. |
hangup and close the call screen.import UIKitimport AtomicXCoreimport Combineclass CallViewController: UIViewController {private lazy var buttonHangup: UIButton = {let buttonWidth: CGFloat = 80let buttonHeight: CGFloat = 80let spacing: CGFloat = 30let bottomMargin: CGFloat = 80let totalWidth = buttonWidth * 3 + spacing * 2let startX = (view.bounds.width - totalWidth) / 2let buttonY = view.bounds.height - bottomMargin - buttonHeightlet button = createButton(frame: CGRect(x: startX + (buttonWidth + spacing) * 2, y: buttonY, width: buttonWidth, height: buttonHeight),title: "hangup")button.backgroundColor = .systemRedbutton.addTarget(self, action: #selector(touchHangupButton), for: .touchUpInside)return button}()override func viewDidLoad() {super.viewDidLoad()// Other initialization code// 1. Add a hang-up buttonview.addSubview(buttonHangup)}@objc private func touchHangupButton() {// 2. Call the hangup API in the click event and destroy the pageCallStore.shared.hangup(completion: nil)}private func createButton(frame: CGRect, title: String) -> UIButton {let button = UIButton(type: .system)button.frame = framebutton.setTitle(title, for: .normal)button.setTitleColor(.white, for: .normal)button.backgroundColor = UIColor(white: 0.3, alpha: 0.8)button.layer.cornerRadius = frame.width / 2button.titleLabel?.font = UIFont.systemFont(ofSize: 14)return button}}
import UIKitimport AtomicXCoreimport Combineclass CallViewController: UIViewController {private lazy var buttonMicrophone: UIButton = {let buttonWidth: CGFloat = 80let buttonHeight: CGFloat = 80let spacing: CGFloat = 30let bottomMargin: CGFloat = 80let totalWidth = buttonWidth * 3 + spacing * 2let startX = (view.bounds.width - totalWidth) / 2let buttonY = view.bounds.height - bottomMargin - buttonHeightlet button = createButton(frame: CGRect(x: startX + buttonWidth + spacing, y: buttonY, width: buttonWidth, height: buttonHeight),title: "Microphone")button.addTarget(self, action: #selector(touchMicrophoneButton), for: .touchUpInside)return button}()override func viewDidLoad() {super.viewDidLoad()// Other initialization code// 1. Add a microphone toggle buttonview.addSubview(buttonMicrophone)}// 2. Toggle the microphone (on/off) in the click event@objc private func touchMicrophoneButton() {let microphoneStatus = DeviceStore.shared.state.value.microphoneStatusif microphoneStatus == .on {DeviceStore.shared.closeLocalMicrophone()} else {DeviceStore.shared.openLocalMicrophone(completion: nil)}}// Helper method to create circular buttonsprivate func createButton(frame: CGRect, title: String) -> UIButton {let button = UIButton(type: .system)button.frame = framebutton.setTitle(title, for: .normal)button.setTitleColor(.white, for: .normal)button.backgroundColor = UIColor(white: 0.3, alpha: 0.8)button.layer.cornerRadius = frame.width / 2button.titleLabel?.font = UIFont.systemFont(ofSize: 14)return button}}
import UIKitimport AtomicXCoreimport Combineclass CallViewController: UIViewController {private lazy var buttonCamera: UIButton = {let buttonWidth: CGFloat = 80let buttonHeight: CGFloat = 80let spacing: CGFloat = 30let bottomMargin: CGFloat = 80let totalWidth = buttonWidth * 3 + spacing * 2let startX = (view.bounds.width - totalWidth) / 2let buttonY = view.bounds.height - bottomMargin - buttonHeightlet button = createButton(frame: CGRect(x: startX, y: buttonY, width: buttonWidth, height: buttonHeight),title: "Camera" // Camera)button.addTarget(self, action: #selector(touchCameraButton), for: .touchUpInside)return button}()override func viewDidLoad() {super.viewDidLoad()// Other initialization code// 1. Add camera toggle buttonview.addSubview(buttonCamera)}// 2. Camera button click event@objc private func touchCameraButton() {let cameraStatus = DeviceStore.shared.state.value.cameraStatusif cameraStatus == .on {DeviceStore.shared.closeLocalCamera()} else {let isFront = DeviceStore.shared.state.value.isFrontCameraDeviceStore.shared.openLocalCamera(isFront: isFront, completion: nil)}}// Helper method to create circular buttonsprivate func createButton(frame: CGRect, title: String) -> UIButton {let button = UIButton(type: .system)button.frame = framebutton.setTitle(title, for: .normal)button.setTitleColor(.white, for: .normal)button.backgroundColor = UIColor(white: 0.3, alpha: 0.8)button.layer.cornerRadius = frame.width / 2button.titleLabel?.font = UIFont.systemFont(ofSize: 14)return button}}
import UIKitimport AtomicXCoreimport Combineclass CallViewController: UIViewController {private var cancellables = Set<AnyCancellable>()override func viewDidLoad() {super.viewDidLoad()// Other initialization code// 1. Observe microphone and camera statusobserveDeviceState()}private func observeDeviceState() {DeviceStore.shared.state.subscribe().map { $0.cameraStatus }.removeDuplicates().receive(on: DispatchQueue.main).sink { [weak self] cameraStatus in// 2. Update camera button textlet title = cameraStatus == .on ? "Turn Off Camera" : "Turn On Camera"self?.buttonCamera?.setTitle(title, for: .normal)}.store(in: &cancellables)DeviceStore.shared.state.subscribe().map { $0.microphoneStatus }.removeDuplicates().receive(on: DispatchQueue.main).sink { [weak self] microphoneStatus in// 2. Update microphone button textlet title = microphoneStatus == .on ? "Turn Off Mic" : "Turn On Mic"self?.buttonMicrophone?.setTitle(title, for: .normal)}.store(in: &cancellables)}}
Info.plist with appropriate usage descriptions. These will be shown to users when the system requests permissions:<key>NSCameraUsageDescription</key><string>Camera access is required for video calls and group video calls.</string><key>NSMicrophoneUsageDescription</key><string>Microphone access is required for audio calls, group audio calls, video calls, and group video calls.</string>
import AVFoundationimport UIKitextension UIViewController {// Check microphone permissionfunc checkMicrophonePermission(completion: @escaping (Bool) -> Void) {let status = AVCaptureDevice.authorizationStatus(for: .audio)switch status {case .authorized:completion(true)case .notDetermined:AVCaptureDevice.requestAccess(for: .audio) { granted inDispatchQueue.main.async {completion(granted)}}case .denied, .restricted:completion(false)@unknown default:completion(false)}}// Check camera permissionfunc checkCameraPermission(completion: @escaping (Bool) -> Void) {let status = AVCaptureDevice.authorizationStatus(for: .video)switch status {case .authorized:completion(true)case .notDetermined:AVCaptureDevice.requestAccess(for: .video) { granted inDispatchQueue.main.async {completion(granted)}}case .denied, .restricted:completion(false)@unknown default:completion(false)}}// Show permission alertfunc showPermissionAlert(message: String) {let alert = UIAlertController(title: "Permission Required", // Permission Requiredmessage: message,preferredStyle: .alert)alert.addAction(UIAlertAction(title: "Settings", style: .default) { _ in // Settingsif let url = URL(string: UIApplication.openSettingsURLString) {UIApplication.shared.open(url)}})alert.addAction(UIAlertAction(title: "Cancel", style: .cancel)) // Cancelpresent(alert, animated: true)}}
calls to start a call.import UIKitimport AtomicXCoreimport Combineclass MainViewController: UIViewController {// 1. Initiate a callprivate func startCall(userIdList: [String], mediaType: CallMediaType) {var params = CallParams()params.timeout = 30 // Set call timeout to 30 secondsCallStore.shared.calls(participantIds: userIdList,callMediaType: mediaType, // Call type: .audio or .videoparams: params) { [weak self] result inswitch result {case .success:// 2. Enable media devicesself?.openDevices(for: mediaType)// 3. Launch the call interfaceDispatchQueue.main.async {let callVC = CallViewController()callVC.modalPresentationStyle = .fullScreenself?.present(callVC, animated: true)}case .failure(let error):Log.error("Failed to initiate call: \\(error)")}}}private func openDevices(for mediaType: CallMediaType) {DeviceStore.shared.openLocalMicrophone(completion: nil)if mediaType == .video {let isFront = DeviceStore.shared.state.value.isFrontCameraDeviceStore.shared.openLocalCamera(isFront: isFront, completion: nil)}}}
Params | Type | Required | Description |
participantIds | List<String> | Yes | A list of target user IDs. |
callMediaType | Yes | The media type of the call, used to specify whether to initiate an audio or video call. CallMediaType.video : Video call.CallMediaType.audio : Audio call. | |
params | No | Extended call parameters, such as Room ID, call invitation timeout, etc. roomId (String) : Room ID. An optional parameter; if not specified, it will be automatically assigned by the server.timeout (Int) : Call Timeout (in seconds).userData (String) : Custom User Data for application-specific information.chatGroupId (String) : Chat Group ID, used for group call scenarios.isEphemeralCall (Boolean) : Ephemeral Call. If set to true, no call history record will be generated. |
onCallEnded event.onCallEnded is triggered, dismiss the call screen.import UIKitimport AtomicXCoreimport Combineclass CallViewController: UIViewController {override func viewDidLoad() {super.viewDidLoad()// Other initialization code// 1. Add call event listeneraddListener()}private func addListener() {CallStore.shared.callEventPublisher.receive(on: DispatchQueue.main).sink { [weak self] event inif case .onCallEnded = event {// 2. Dismiss the call interfaceself?.dismiss(animated: true)}}.store(in: &cancellables)}}
Params | Type | Description |
callId | String | Unique ID for this call. |
mediaType | The media type of the call: CallMediaType.video : Video call.CallMediaType.audio : Audio call. | |
reason | The reason why the call ended. unknown : Unable to determine the reason for termination.hangup : Normal termination; a user actively ended the call.reject : The callee declined the incoming call.noResponse : The callee did not answer within the timeout period.offline : The callee is currently offline.lineBusy : The callee is already in another call.canceled : The caller canceled the call before it was answered.otherDeviceAccepted : The call was answered on another logged-in device.otherDeviceReject : The call was declined on another logged-in device.endByServer : The call was forced to end by the server. | |
userId | String | The User ID of the person who triggered the call termination. |


// Set volume indicator iconslet volumeLevelIcons: [VolumeLevel: String] = [.mute: "Path to the corresponding icon resource"]callCoreView.setVolumeLevelIcons(icons: volumeLevelIcons)
Params | Type | Required | Description |
icons | [VolumeLevel: String] | Yes | A mapping table of volume levels to icon resources. The dictionary structure is defined as follows: Key ( VolumeLevel ) Represents the volume intensity level: VolumeLevel.mute :Microphone is off or muted.VolumeLevel.low :Volume range (0, 25].VolumeLevel.medium : Volume range (25, 50].VolumeLevel.high : Volume range (50, 75].VolumeLevel.peak : Volume range (75, 100].Value ( String ) The resource path or name of the icon corresponding to the volume level. |
Icons | Description | Download Links |
![]() | Volume Indicator Icon. You can set this icon for VolumeLevel.low or VolumeLevel.medium. It will be displayed when the user's volume exceeds the specified level. | |
![]() | Volume Indicator Icon. You can set this icon for VolumeLevel.mute. It will be displayed when the user is currently muted. |

// Set network quality iconslet networkQualityIcons: [NetworkQuality: String] = [.bad: "Path to the corresponding icon"]callCoreView.setNetworkQualityIcons(icons: networkQualityIcons)
Params | Type | Required | Description |
icons | [NetworkQuality: String] | Yes | Network Quality Icon Mapping Table. The dictionary structure is defined as follows: Key ( NetworkQuality ) : NetworkQuality NetworkQuality.unknown :Network status is undetermined. NetworkQuality.excellent:Outstanding network connection.NetworkQuality.good : Stable and good network connection.NetworkQuality.poor : Weak network signal.NetworkQuality.bad : Very weak or unstable network. NetworkQuality.veryBad :Extremely poor network, near disconnection. NetworkQuality.down :Network is disconnected. Value ( String ) : The absolute path or resource name of the icon corresponding to the network status. |
Icons | Description | Download Links |
![]() | Poor Network Indicator. You can set this icon for NetworkQuality.bad, NetworkQuality.veryBad or NetworkQuality.down .It will be displayed when the network quality is poor. |
// Set user avatarsvar avatars: [String: String] = [:]let userId = "" // User IDlet avatarPath = "" // Path to user's default avatar resourceavatars[userId] = avatarPathcallCoreView.setParticipantAvatars(avatars: avatars)
Params | Type | Required | Description |
avatars | [String: String] | Yes | User Avatar Mapping Table. The dictionary structure is described as follows: Key : The userID of the user.Value : The absolute path to the user's avatar resource. |
Icons | Description | Download Links |
![]() | Default Profile Picture. You can set this as the default avatar for a user when their profile image fails to load or if no avatar is provided. |

// Set waiting animationlet waitingAnimationPath = "" // Path to waiting animation GIF resourcecallCoreView.setWaitingAnimation(path: waitingAnimationPath)
Params | Type | Required | Description |
path | String | Yes | Absolute path to a GIF format image resource. |
Icons | Description | Download Links |
![]() | User Waiting Animation Animations for group calls. Once configured, this animation will be displayed when the user's status is "Waiting to Answer" (Pending). |
CallStore.observerState.activeCall for changes.activeCall.duration field is reactive and will automatically update your UI.import UIKitimport AtomicXCoreimport Combineclass TimerView: UILabel {private var cancellables = Set<AnyCancellable>()override init(frame: CGRect) {super.init(frame: frame)setupView()}required init?(coder: NSCoder) {super.init(coder: coder)setupView()}private func setupView() {textColor = .whitetextAlignment = .centerfont = .systemFont(ofSize: 16)}override func didMoveToWindow() {super.didMoveToWindow()if window != nil {// [Recommended Usage] Animation for group calls.// Once configured, this animation is displayed when the user's status is "Waiting to Answer".registerActiveCallObserver()} else {cancellables.removeAll()}}private func registerActiveCallObserver() {CallStore.shared.state.subscribe().map { $0.activeCall }.removeDuplicates { $0.duration == $1.duration }.receive(on: DispatchQueue.main).sink { [weak self] activeCall in// Update call durationself?.updateDurationView(activeCall: activeCall)}.store(in: &cancellables)}private func updateDurationView(activeCall: CallInfo) {let currentDuration = activeCall.durationlet minutes = currentDuration / 60let seconds = currentDuration % 60text = String(format: "%02d:%02d", minutes, seconds)}}
var userProfile = UserProfile()userProfile.userID = "" // Your User IDuserProfile.avatarURL = "" // URL of the avatar imageuserProfile.nickname = "" // The nickname to be setLoginStore.shared.setSelfInfo(userProfile: userProfile) { result inswitch result {case .success:// Success callback: profile updated successfullycase .failure(let error):// Failure callback: handle the error}}
Params | Type | Required | Description |
userProfile | Yes | User info struct: userID: User ID avatarURL: User avatar URL nickname: User nickname |
Float mode for 1-on-1 calls and Grid mode for multi-party calls.Float Mode | Grid Mode | PIP Mode |
![]() | ![]() | ![]() |
Layout: While waiting, display your own video full screen. After answering, show the remote video full screen and your own video as a floating window. Interaction: Drag the small window or tap to swap big/small video. | Layout: All participant videos are tiled in a grid. Best for 2+ participants. Tap to enlarge a video. Interaction: Tap a participant to enlarge their video. | Layout: In 1v1, remote video is fixed; in multi-party, the active speaker is shown full screen. Interaction: Shows your own video while waiting, displays call timer after answering. |
func setLayoutTemplate(_ template: CallLayoutTemplate)
Params | Type | Description |
template | CallCoreView's layout mode CallLayoutTemplate.float :Layout: While waiting, display your own video full screen. After answering, show the remote video full screen and your own video as a floating window. Interaction: Drag the small window or tap to swap big/small video. CallLayoutTemplate.grid :Layout: All participant videos are tiled in a grid. Best for 2+ participants. Tap to enlarge a video. Interaction: Tap a participant to enlarge their video. CallLayoutTemplate.pip : Layout: In 1v1, remote video is fixed; in multi-party, the active speaker is shown full screen. Interaction: Shows your own video while waiting, displays call timer after answering. |
timeout field in CallParams to specify the call invitation timeout.var callParams = CallParams()callParams.timeout = 30 // Set call timeout to 30 seconds.CallStore.shared.calls(participantIds: userIdList,callMediaType: .video,params: callParams,completion: nil)
Params | Type | Required | Description |
participantIds | List<String> | Yes | A list of User IDs for the target participants. |
callMediaType | Yes | The media type of the call, used to specify whether to initiate an audio or video call. CallMediaType.video : Video Call.CallMediaType.audio : Audio Call. | |
params | No | Extended call parameters, such as Room ID, call invitation timeout, etc. roomId (String) : Room ID. An optional parameter; if not specified, it will be automatically assigned by the server.timeout (Int) : Call Timeout (in seconds).userData (String) : User Custom Data for app-specific logic.chatGroupId (String) : Chat Group ID, used specifically for group call scenarios.isEphemeralCall (Boolean) : Ephemeral Call. Whether the call is encrypted and transient (will not generate a call history record). |
CallPipView component to enable in-app floating windows. When the call interface is covered by another screen (e.g., the user navigates away but the call is ongoing), a floating window displays call status and lets users quickly return to the call.import UIKitimport AtomicXCoreimport Combine/*** Floating Window Controller* * Used to display the call in a floating window, containing a CallCoreView internally.*/class FloatWindowViewController: UIViewController {var tapGestureAction: (() -> Void)?private var cancellables = Set<AnyCancellable>()private lazy var callCoreView: CallCoreView = {let view = CallCoreView(frame: self.view.bounds)view.autoresizingMask = [.flexibleWidth, .flexibleHeight]view.setLayoutTemplate(.pip) // Set to Picture-in-Picture (PIP) layout modeview.isUserInteractionEnabled = false // Disable interaction to allow taps to pass through to the parent viewreturn view}()override func viewDidLoad() {super.viewDidLoad()view.backgroundColor = UIColor(white: 0.1, alpha: 1.0)view.layer.cornerRadius = 10view.layer.masksToBounds = trueview.addSubview(callCoreView)// Add tap gesture recognizerlet tapGesture = UITapGestureRecognizer(target: self, action: #selector(handleTap))view.addGestureRecognizer(tapGesture)// Delay status observation to prevent the window from closing immediately upon creationDispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { [weak self] inself?.observeCallStatus()}}@objc private func handleTap() {tapGestureAction?()}/*** Observe call status changes* Automatically closes the floating window when the call ends.*/private func observeCallStatus() {CallStore.shared.state.subscribe(StatePublisherSelector<CallState, CallParticipantStatus>(keyPath: \\.selfInfo.status)).removeDuplicates().receive(on: DispatchQueue.main).sink { [weak self] status inif status == .none {// Call ended, post notification to hide the floating windowNotificationCenter.default.post(name: NSNotification.Name("HideFloatingWindow"), object: nil)}}.store(in: &cancellables)}deinit {cancellables.removeAll()}}
import UIKitimport AtomicXCoreclass MainViewController: UIViewController {private var floatWindow: UIWindow?override func viewDidLoad() {super.viewDidLoad()// Listen for the notification to show the floating windowNotificationCenter.default.addObserver(self,selector: #selector(showFloatingWindow),name: NSNotification.Name("ShowFloatingWindow"),object: nil)// Listen for the notification to hide the floating windowNotificationCenter.default.addObserver(self,selector: #selector(hideFloatingWindow),name: NSNotification.Name("HideFloatingWindow"),object: nil)}/*** Displays the in-app floating window.*/@objc private func showFloatingWindow() {// Check if the call is currently active/acceptedlet selfStatus = CallStore.shared.state.value.selfInfo.statusguard selfStatus == .accept else {return}// Prevent duplicate creation if the floating window already existsguard floatWindow == nil else { return }// ⚠️ CRITICAL: The current windowScene must be used to create the new windowguard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene else {return}// Define floating window dimensions (9:16 aspect ratio)let pipWidth: CGFloat = 100let pipHeight: CGFloat = pipWidth * 16 / 9let pipX = UIScreen.main.bounds.width - pipWidth - 20let pipY: CGFloat = 100// Create the floating window (associated with the windowScene)let window = UIWindow(windowScene: windowScene)window.windowLevel = .alert + 1 // Ensure it stays above standard UIwindow.backgroundColor = .clearwindow.frame = CGRect(x: pipX, y: pipY, width: pipWidth, height: pipHeight)// Initialize the floating window controllerlet floatVC = FloatWindowViewController()floatVC.tapGestureAction = { [weak self] inself?.openCallViewController()}window.rootViewController = floatVCself.floatWindow = window// Make the window visiblewindow.isHidden = falsewindow.makeKeyAndVisible()// Immediately restore the main window as the key window to maintain proper app focusif let mainWindow = windowScene.windows.first(where: { $0 != window }) {mainWindow.makeKey()}}/*** Hides the in-app floating window.*/@objc private func hideFloatingWindow() {floatWindow?.isHidden = truefloatWindow = nil}/*** Opens the call interface (triggered upon tapping the floating window).*/private func openCallViewController() {// Dismiss the floating window firsthideFloatingWindow()// Retrieve the current top-most ViewControllerguard let topVC = getTopViewController() else {return}let callVC = CallViewController()callVC.modalPresentationStyle = .fullScreentopVC.present(callVC, animated: true)}/*** Utility to retrieve the current top-most ViewController in the view hierarchy.*/private func getTopViewController() -> UIViewController? {guard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene,let keyWindow = windowScene.windows.first(where: { $0.isKeyWindow }),let rootVC = keyWindow.rootViewController else {return nil}var topVC = rootVCwhile let presentedVC = topVC.presentedViewController {topVC = presentedVC}return topVC}deinit {NotificationCenter.default.removeObserver(self)}}
import UIKitimport AtomicXCoreclass CallViewController: UIViewController {override func viewWillAppear(_ animated: Bool) {super.viewWillAppear(animated)// When entering the call interface, post a notification to hide the floating windowNotificationCenter.default.post(name: NSNotification.Name("HideFloatingWindow"), object: nil)}override func viewWillDisappear(_ animated: Bool) {super.viewWillDisappear(animated)// When leaving the call interface, check if the call is still activelet selfStatus = CallStore.shared.state.value.selfInfo.statusif selfStatus == .accept {// If the call is still ongoing, post a notification to show the floating windowNotificationCenter.default.post(name: NSNotification.Name("ShowFloatingWindow"), object: nil)}}}
Background Modes under Signing & Capabilities and enable Audio, AirPlay, and Picture in Picture. import Foundationimport AtomicXCore// Fill Mode Enumerationenum PictureInPictureFillMode: Int, Codable {case fill = 0 // Aspect Fill (Scale to fill, may crop)case fit = 1 // Aspect Fit (Scale to fit, no cropping)}// User Video Regionstruct PictureInPictureRegion: Codable {let userId: String // Unique User IDlet width: Double // Width (0.0 - 1.0, relative to canvas)let height: Double // Height (0.0 - 1.0, relative to canvas)let x: Double // X coordinate (0.0 - 1.0, relative to top-left of canvas)let y: Double // Y coordinate (0.0 - 1.0, relative to top-left of canvas)let fillMode: PictureInPictureFillMode // Rendering fill modelet streamType: String // Stream type ("high" for HD or "low" for SD)let backgroundColor: String // Hex background color (e.g., "#000000")}// Canvas Configurationstruct PictureInPictureCanvas: Codable {let width: Int // Canvas width in pixelslet height: Int // Canvas height in pixelslet backgroundColor: String // Hex background color}// Picture-in-Picture Parametersstruct PictureInPictureParams: Codable {let enable: Bool // Toggle PiP functionalitylet cameraBackgroundCapture: Bool? // Whether to continue camera capture in backgroundlet canvas: PictureInPictureCanvas? // Canvas settings (Optional)let regions: [PictureInPictureRegion]? // List of user video regions (Optional)}// PiP API Request Objectstruct PictureInPictureRequest: Codable {let api: String // API identifier namelet params: PictureInPictureParams // Parameter payload}
configPictureInPicture method.let params = PictureInPictureParams(enable: true,cameraBackgroundCapture: true,canvas: nil,regions: nil)let request = PictureInPictureRequest(api: "configPictureInPicture",params: params)// Encode to JSON string and call the Experimental APIlet encoder = JSONEncoder()if let data = try? encoder.encode(request),let jsonString = String(data: data, encoding: .utf8) {TUICallEngine.createInstance().callExperimentalAPI(jsonObject: jsonString)}
UIApplication.shared.isIdleTimerDisabled = true when the call starts, and restore it when the call ends.class CallViewController: UIViewController {override func viewDidLoad() {super.viewDidLoad()// Disable automatic screen lock to keep the screen onUIApplication.shared.isIdleTimerDisabled = true}override func viewWillDisappear(_ animated: Bool) {super.viewWillDisappear(animated)// Restore the automatic screen lock behaviorUIApplication.shared.isIdleTimerDisabled = false}}
import Combineprivate var cancellables = Set<AnyCancellable>()private func observeSelfCallStatus() {CallStore.shared.state.subscribe().map { $0.selfInfo.status }.removeDuplicates().receive(on: DispatchQueue.main).sink { [weak self] status inif status == .accept || status == .none {// Stop playing ringtonereturn}if status == .waiting {// Start playing ringtone}}.store(in: &cancellables)}
Target → Signing & Capabilities.+ Capability.Background Modes.Audio, AirPlay, and Picture in Picture (for audio capture and PiP)Voice over IP (for VoIP calls)Remote notifications (optional, for offline push)Info.plist will then include:<key>UIBackgroundModes</key><array><string>audio</string><string>voip</string><string>remote-notification</string></array>
viewDidLoad or before making/answering a call.import AVFoundation/*** Configure the Audio Session to support background audio capture.* * Recommended to call this method in the following scenarios:* 1. Inside viewDidLoad of the call interface.* 2. Before initiating a call (calls).* 3. Before answering a call (accept).*/private func setupAudioSession() {let audioSession = AVAudioSession.sharedInstance()do {// Set the audio session category to PlayAndRecord.// .allowBluetooth: Support for standard Bluetooth headsets.// .allowBluetoothA2DP: Support for high-quality Bluetooth audio (A2DP protocol).try audioSession.setCategory(.playAndRecord, options: [.allowBluetooth, .allowBluetoothA2DP])// Activate the audio session.try audioSession.setActive(true)} catch {// Audio session configuration failed.print("Failed to configure Audio Session: \\(error)")}}
.playback mode./*** Switch the Audio Session for ringtone playback.* * Use Case: When using AVAudioPlayer to play the ringtone.*/private func setAudioSessionForRingtone() {let audioSession = AVAudioSession.sharedInstance()do {// Switch to playback mode (optimizes for output only)try audioSession.setCategory(.playback, options: [.allowBluetooth, .allowBluetoothA2DP])// Force the ringtone to play through the built-in speakertry audioSession.overrideOutputAudioPort(.speaker)try audioSession.setActive(true)} catch {// Failed to configure Audio Session for ringtoneprint("Ringtone audio session error: \\(error)")}}/*** Restore to Call Mode after the ringtone stops playing.*/private func restoreAudioSessionForCall() {let audioSession = AVAudioSession.sharedInstance()do {// Restore to PlayAndRecord mode (required for two-way VoIP communication)try audioSession.setCategory(.playAndRecord, options: [.allowBluetooth, .allowBluetoothA2DP])try audioSession.setActive(true)} catch {// Failed to restore Audio Sessionprint("Failed to restore call audio session: \\(error)")}}
Feedback