iOS/macOS Quickstart

Get started with LiveKit and Swift

1. Install LiveKit SDK

let package = Package(
...
dependencies: [
.package(name: "LiveKit", url: "https://github.com/livekit/client-sdk-swift.git", .upToNextMajor("2.0.2")),
],
targets: [
.target(
name: "MyApp",
dependencies: ["LiveKit"]
)
]
)

2. Declare permissions

Camera and microphone usage need to be declared in your Info.plist file.

<dict>
...
<key>NSCameraUsageDescription</key>
<string>$(PRODUCT_NAME) uses your camera</string>
<key>NSMicrophoneUsageDescription</key>
<string>$(PRODUCT_NAME) uses your microphone</string>
...
</dict>

Your application can still run a voice call when it is switched to the background if the background mode is enabled. Select the app target in Xcode, click the Capabilities tab, enable Background Modes, and check Audio, AirPlay, and Picture in Picture.

Your Info.plist should have the following entries:

<dict>
...
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
</array>

3. Implement your app

This app will connect to the room and render your video as well as a remote user's video.

Note that this example hardcodes a token we generated for you that expires in 2 hours. In a real app, you’ll need your server to generate a token for you.

import LiveKit
import UIKit
// !! Note !!
// This sample hardcodes a token which expires in 2 hours.
private let url: String = "<your LiveKit server URL>"
private let token: String = "<generate a token>"
// In production you should generate tokens on your server, and your client
// should request a token from your server.
class QuickStartViewController: UIViewController {
lazy var room = Room(delegate: self)
lazy var remoteVideoView = VideoView()
lazy var localVideoView = VideoView()
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
view.addSubview(remoteVideoView)
view.addSubview(localVideoView)
setupViewConstraints(remoteVideoView, localVideoView)
Task {
do {
try await room.connect(url: url, token: token)
// Publish camera & mic
try await room.localParticipant.setCamera(enabled: true)
try await room.localParticipant.setMicrophone(enabled: true)
} catch {
print("Failed to connect: \(error)")
}
}
}
func setupViewConstraints(_ subview1: VideoView, _ subview2: VideoView) {
subview1.translatesAutoresizingMaskIntoConstraints = false
subview2.translatesAutoresizingMaskIntoConstraints = false
// Set up constraints for subview1
NSLayoutConstraint.activate([
subview1.topAnchor.constraint(equalTo: view.safeAreaLayoutGuide.topAnchor),
subview1.leadingAnchor.constraint(equalTo: view.leadingAnchor),
subview1.trailingAnchor.constraint(equalTo: view.trailingAnchor),
subview1.heightAnchor.constraint(equalTo: view.safeAreaLayoutGuide.heightAnchor, multiplier: 0.5)
])
// Set up constraints for subview2
NSLayoutConstraint.activate([
subview2.topAnchor.constraint(equalTo: subview1.bottomAnchor),
subview2.leadingAnchor.constraint(equalTo: view.leadingAnchor),
subview2.trailingAnchor.constraint(equalTo: view.trailingAnchor),
subview2.bottomAnchor.constraint(equalTo: view.safeAreaLayoutGuide.bottomAnchor)
])
}
}
extension QuickStartViewController: RoomDelegate {
func room(_ room: Room, participant: LocalParticipant, didPublishTrack publication: LocalTrackPublication) {
guard let track = publication.track as? VideoTrack else {
return
}
DispatchQueue.main.async {
self.localVideoView.track = track
}
}
func room(_ room: Room, participant: RemoteParticipant, didSubscribeTrack publication: RemoteTrackPublication) {
guard let track = publication.track as? VideoTrack else {
return
}
DispatchQueue.main.async {
self.remoteVideoView.track = track
}
}
}

4. Connect to the same Room from web

Open our example app in your browser and join the same room with these credentials:

Server URL: <your LiveKit server URL>
Token: <another token for the room>

5. Create a backend server to generate tokens

Set up a server to generate tokens for your app at runtime by following this guide: Generating Tokens.