React Native Quickstart

Get started with LiveKit and React Native

1. Install LiveKit SDK

LiveKit provides a client SDK for React Native. Install the package and its dependency with:

npm install @livekit/react-native @livekit/react-native-webrtc
note:

If you're planning to integrate LiveKit into an Expo app, see our quickstart guide for Expo instead.

This library depends on @livekit/react-native-webrtc, which has additional installation instructions for Android.

Once the @livekit/react-native-webrtc dependency is installed, one last step is required. In your MainApplication.java file:

import com.livekit.reactnative.LiveKitReactNative;
import com.livekit.reactnative.audio.AudioType;
public class MainApplication extends Application implements ReactApplication {
@Override
public void onCreate() {
// Place this above any other RN related initialization
// When the AudioType is omitted, it'll default to CommunicationAudioType.
// Use AudioType.MediaAudioType if user is only consuming audio, and not publishing
LiveKitReactNative.setup(this, new AudioType.CommunicationAudioType());
//...
}
}

If you are using Expo, LiveKit is available on Expo through development builds. See the instructions found here.

Finally, in your index.js file, setup the LiveKit SDK by calling registerGlobals(). This sets up the required WebRTC libraries for use in Javascript, and is needed for LiveKit to work.

import { registerGlobals } from '@livekit/react-native';
// ...
registerGlobals();

2. Connect to a room, publish video & audio

import * as React from 'react';
import {
StyleSheet,
View,
FlatList,
ListRenderItem,
} from 'react-native';
import { useEffect } from 'react';
import {
AudioSession,
LiveKitRoom,
useTracks,
TrackReferenceOrPlaceholder,
VideoTrack,
isTrackReference,
registerGlobals,
} from '@livekit/react-native';
import { Track } from 'livekit-client';
// !! Note !!
// This sample hardcodes a token which expires in 2 hours.
const wsURL = "<your LiveKit server URL>"
const token = "<generate a token>"
export default function App() {
// Start the audio session first.
useEffect(() => {
let start = async () => {
await AudioSession.startAudioSession();
};
start();
return () => {
AudioSession.stopAudioSession();
};
}, []);
return (
<LiveKitRoom
serverUrl={wsURL}
token={token}
connect={true}
options={{
// Use screen pixel density to handle screens with differing densities.
adaptiveStream: { pixelDensity: 'screen' },
}}
audio={true}
video={true}
>
<RoomView />
</LiveKitRoom>
);
};
const RoomView = () => {
// Get all camera tracks.
const tracks = useTracks([Track.Source.Camera]);
const renderTrack: ListRenderItem<TrackReferenceOrPlaceholder> = ({item}) => {
// Render using the VideoTrack component.
if(isTrackReference(item)) {
return (<VideoTrack trackRef={item} style={styles.participantView} />)
} else {
return (<View style={styles.participantView} />)
}
};
return (
<View style={styles.container}>
<FlatList
data={tracks}
renderItem={renderTrack}
/>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'stretch',
justifyContent: 'center',
},
participantView: {
height: 300,
},
});

3. Create a backend server to generate tokens

Set up a server to generate tokens for your app at runtime by following this guide: Generating Tokens.