@@ -122,44 +122,102 @@ import { registerGlobals } from '@livekit/react-native';
122122registerGlobals();
123123```
124124
125- A Room object can then be created and connected to.
125+ In your app, wrap your component in a ` LiveKitRoom ` component, which manages a
126+ Room object and allows you to use our hooks to create your own real-time video/audio app.
126127
127128``` js
128- import { Participant , Room , Track } from ' livekit-client' ;
129- import { useRoom , AudioSession , VideoView } from ' @livekit/react-native' ;
130-
131- /* ...*/
132-
133- // Create a room state
134- const [room ] = useState (() => new Room ());
135-
136- // Get the participants from the room
137- const { participants } = useRoom (room);
129+ import * as React from ' react' ;
130+ import {
131+ StyleSheet ,
132+ View ,
133+ FlatList ,
134+ ListRenderItem ,
135+ } from ' react-native' ;
136+ import { useEffect } from ' react' ;
137+ import {
138+ AudioSession ,
139+ LiveKitRoom ,
140+ useTracks ,
141+ TrackReferenceOrPlaceholder ,
142+ VideoTrack ,
143+ isTrackReference ,
144+ registerGlobals ,
145+ } from ' @livekit/react-native' ;
146+ import { Track } from ' livekit-client' ;
147+
148+ const wsURL = " wss://example.com"
149+ const token = " your-token-here"
150+
151+ export default function App () {
152+ // Start the audio session first.
153+ useEffect (() => {
154+ let start = async () => {
155+ await AudioSession .startAudioSession ();
156+ };
157+
158+ start ();
159+ return () => {
160+ AudioSession .stopAudioSession ();
161+ };
162+ }, []);
163+
164+ return (
165+ < LiveKitRoom
166+ serverUrl= {wsURL}
167+ token= {token}
168+ connect= {true }
169+ options= {{
170+ // Use screen pixel density to handle screens with differing densities.
171+ adaptiveStream: { pixelDensity: ' screen' },
172+ }}
173+ audio= {true }
174+ video= {true }
175+ >
176+ < RoomView / >
177+ < / LiveKitRoom>
178+ );
179+ };
138180
139- useEffect (() => {
140- let connect = async () => {
141- await AudioSession .startAudioSession ();
142- await room .connect (url, token, {});
143- console .log (' connected to ' , url, ' ' , token);
144- };
145- connect ();
146- return () => {
147- room .disconnect ();
148- AudioSession .stopAudioSession ();
181+ const RoomView = () => {
182+ // Get all camera tracks.
183+ // The useTracks hook grabs the tracks from LiveKitRoom component
184+ // providing the context for the Room object.
185+ const tracks = useTracks ([Track .Source .Camera ]);
186+
187+ const renderTrack: ListRenderItem <TrackReferenceOrPlaceholder > = ({item}) => {
188+ // Render using the VideoTrack component.
189+ if (isTrackReference (item)) {
190+ return (< VideoTrack trackRef= {item} style= {styles .participantView } / > )
191+ } else {
192+ return (< View style= {styles .participantView } / > )
193+ }
149194 };
150- }, [url, token, room]);
151195
152- const videoView = participants .length > 0 && (
153- < VideoView
154- style= {{ flex: 1 , width: ' 100%' }}
155- videoTrack= {participants[0 ].getTrack (Track .Source .Camera )? .videoTrack }
156- / >
157- );
196+ return (
197+ < View style= {styles .container }>
198+ < FlatList
199+ data= {tracks}
200+ renderItem= {renderTrack}
201+ / >
202+ < / View>
203+ );
204+ };
205+
206+ const styles = StyleSheet .create ({
207+ container: {
208+ flex: 1 ,
209+ alignItems: ' stretch' ,
210+ justifyContent: ' center' ,
211+ },
212+ participantView: {
213+ height: 300 ,
214+ },
215+ });
158216```
159217
160218[ API documentation is located here.] ( https://htmlpreview.github.io/?https://raw.githubusercontent.com/livekit/client-sdk-react-native/main/docs/modules.html )
161219
162- Additional documentation for the LiveKit SDK can be found at https://docs.livekit.io/references/client-sdks/
220+ Additional documentation for the LiveKit SDK can be found at https://docs.livekit.io/
163221
164222## Audio sessions
165223
0 commit comments