BytePlus React Native SDK is the audio and video call SDK provided by BytePlus for React Native.
Require Node >= 18.
Platform | Version |
---|---|
Android | 6.0 (API 23) |
iOS | 13.4 |
npm install @byteplus/react-native-rtc
yarn add @byteplus/react-native-rtc
@/core/index.ts
import {
RTCManager, IEngine, IRoom, RTCVideoEventHandler, RTCRoomEventHandler, IJoinRoomProps, ICreateRTCEngineOptions,
} from '@byteplus/react-native-rtc';
class RTCClient {
manager?: RTCManager;
engine?: IEngine | null;
room?: IRoom | null;
constructor() {
this.manager = new RTCManager();
}
/** Engine related */
async createEngine({ appID }: ICreateRTCEngineOptions) {
this.engine = await this.manager!.createRTCEngine({ appID });
}
setRTCVideoEventHandler(handlers: RTCVideoEventHandler) {
this.engine?.setRtcVideoEventHandler(handlers);
}
setRTCRoomEventHandler(handlers: RTCRoomEventHandler) {
this.room?.setRTCRoomEventHandler(handlers);
}
startAudioCapture() {
return this.engine?.startAudioCapture();
}
startVideoCapture() {
return this.engine?.startVideoCapture();
}
destroyEngine() {
this.leaveRoom();
this.room?.destroy();
this.room = null;
this.manager!.destroyRTCEngine();
this.engine = null;
}
/** Room related */
joinRoom(params: IJoinRoomProps) {
return this.room?.joinRoom({
token: 'Your token',
...params,
});
}
leaveRoom() {
this.effectPlayerUnloadAll();
this.stopScreenCapture();
this.room?.leaveRoom();
}
createRoom(roomId: string) {
this.room = this.engine?.createRTCRoom(roomId);
return this.room;
}
}
export default new RTCClient();
@/page/login.tsx
Mainly focus on the usage of NativeViewComponent
. Note that after the component is registered, call setLocalVideoCanvas
in onLoad
to set the local rendering view. The same applies to remote users.
import { Platform } from 'react-native';
import { request, PERMISSIONS } from 'react-native-permissions';
import { NativeViewComponent, StreamIndex, RenderMode } from '@byteplus/react-native-rtc';
import RTCClient from '@/core';
const viewId = 'my-view';
const Login = () => {
const requestDevicePermission = async () => {
if (Platform.OS === 'ios') {
await request(PERMISSIONS.IOS.CAMERA);
await request(PERMISSIONS.IOS.MICROPHONE);
} else {
await request(PERMISSIONS.ANDROID.CAMERA);
await request(PERMISSIONS.ANDROID.RECORD_AUDIO);
}
};
const handleViewLoad = async () => {
/** Fetch user's device permission */
await requestDevicePermission();
/** Engine Initialization */
await RTCClient.createEngine({
appID: 'Your appId',
});
/** Set relative callbacks */
RTCClient.setRTCVideoEventHandler(...Your custom events);
/** Set local stream renderer */
RTCClient.setLocalVideoCanvas(
StreamIndex.STREAM_INDEX_MAIN,
{
viewId,
renderMode: RenderMode.ByteRTCRenderModeFit,
},
);
/** Create room instance */
RTCClient.createRoom(roomId!);
/** Set relative callbacks */
RTCClient.setRTCRoomEventHandler(roomEventListeners);
/** Join room */
RTCClient.joinRoom({
userId: localUser.userId,
extras: {
source_language: room.language,
},
roomConfigs: {
profile: room.roomMode,
isAutoPublish: room.autoPublish,
isAutoSubscribeAudio: room.autoSubscribeAudio,
isAutoSubscribeVideo: room.autoSubscribeVideo,
},
});
/** Capture local streams */
RTCClient.startVideoCapture();
RTCClient.startAudioCapture();
}
return (
<KeyboardAvoidingView
behavior={Platform.OS === "ios" ? "padding" : "height"}
>
<NativeViewComponent
viewId={viewId}
onLoad={handleViewLoad}
kind={
Platform.select({
android: 'TextureView',
ios: 'UIView',
})!
}
/>
</KeyboardAvoidingView>
);
};
export default Login;
We recommend using the latest RTC React Native SDK version in combination with react-native-vod-player SDK v1.2.4.
@/core/index.ts
Based on the rtc core definition in Basic Example
, add some apis:
...
import type { TTVideoEngine } from '@byteplus/react-native-vod-player';
class RTCClient {
...
/**
* @brief Start to observe vod player, capture frames from vod player and set stream for external screen stream.
*/
startVodPlayerCapture(player: TTVideoEngine) {
return this.engine?.startVodPlayerCapture(player);
}
/**
* @brief Stop all vod player observer in rtc.
* @note Once you invoke this api, you should invoke `startVodPlayerCapture` to observe vod player.
*/
stopVodPlayerCapture(player: TTVideoEngine) {
return this.engine?.stopVodPlayerCapture(player);
}
}
@/page/vodRtc.ts
import React, {useEffect, useRef, useState} from 'react';
import fs from 'react-native-fs';
import {Platform, SafeAreaView, ScrollView} from 'react-native';
import {Button, Input, Text, Toast, View} from '@ant-design/react-native';
import {
NativeViewComponent,
StreamIndex,
RenderMode,
MediaStreamType,
LocalLogLevel,
} from '@byteplus/react-native-rtc';
import {
createDirectUrlSource,
type TTVideoEngine,
} from '@byteplus/react-native-vod-player';
import {
launchImageLibrary,
type ImagePickerResponse,
} from 'react-native-image-picker';
import RTCClient from '@/core';
import { createVeplayer } from '@/core/veplayer';
import RowItem from '@/components/RowItem';
import {GlobalStyles} from '@/style';
const viewId = 'my-view';
const auth = {
appId: 'Your RTC AppID',
roomId: 'Your Room ID',
userId: 'Your User ID',
token: 'Your RTC Token',
};
const Page = () => {
const [isViewLoaded, setViewLoaded] = useState<boolean>(false);
const [filepath, setFilepath] = useState('');
const hasCaptureRef = useRef(false);
const playerRef = useRef<TTVideoEngine>();
const handleViewLoad = () => {
setViewLoaded(true);
};
const handleSelectVideoFile = async () => {
try {
const callback = (response: ImagePickerResponse) => {
if (!response.didCancel && !response.errorCode) {
const filePath = response.assets?.[0]?.uri;
if (filePath) {
setFilepath(filePath);
}
}
};
launchImageLibrary(
{
mediaType: 'video',
},
callback,
);
} catch {
Toast.fail('Select media file failed.');
}
};
const handlePublish = () => {
RTCClient.publishScreen(MediaStreamType.RTC_MEDIA_STREAM_TYPE_BOTH);
};
const setVideoSource = () => {
const source = filepath
? createDirectUrlSource({
url: filepath,
cacheKey: filepath,
})
: createDirectUrlSource({
url: 'Your media url, like https://xxxx.mp4',
cacheKey: 'remote',
vid: 'remote',
});
playerRef.current!.setVideoSource(source);
};
const handlePlay = async () => {
if (hasCaptureRef.current) {
playerRef.current!.play();
return;
}
if (!playerRef.current) {
return;
}
setVideoSource();
await RTCClient.startVodPlayerCapture(playerRef.current);
await playerRef.current!.play();
RTCClient.publishScreen(MediaStreamType.RTC_MEDIA_STREAM_TYPE_BOTH);
hasCaptureRef.current = true;
};
const handleStop = async () => {
if (hasCaptureRef.current) {
playerRef.current!.pause();
}
}
const handleDestroy = async () => {
if (!playerRef.current) {
return;
}
await RTCClient.stopVodPlayerCapture(playerRef.current);
hasCaptureRef.current = false;
}
const initializePlayer = async () => {
/**
* @brief It's not necessary to set viewId for vod player.
* @note You should realize veplayer for youself, refer to @byteplus/react-native-vod-player SDK.
*/
playerRef.current = await createVeplayer({ viewId: '' });
playerRef.current.setListener({
onLoadStateChanged(engine, loadState) {
console.log('onLoadStateChanged: ', loadState);
},
onError(message, code) {
console.error('onError: ', message, code);
},
onPlaybackStateChanged(engine, playbackState) {
console.log('onPlaybackStateChanged: ', playbackState);
},
});
};
const initializeRTC = async () => {
/** Init your engine */
let DefaultPath = fs.ExternalDirectoryPath;
if (Platform.OS === 'ios') {
DefaultPath = fs.DocumentDirectoryPath;
}
/** Set log */
RTCClient.setLogConfig({
logLevel: LocalLogLevel.INFO,
logPath: DefaultPath,
logFileSize: 10,
logFilenamePrefix: '',
});
/** Create RTC Engine */
await RTCClient.createEngine({
appID: auth.appId,
parameters: {},
});
/** Set Local video canvas for player */
RTCClient.setLocalVideoCanvas(StreamIndex.STREAM_INDEX_SCREEN, {
viewId,
renderMode: RenderMode.ByteRTCRenderModeFit,
});
/** Join room */
RTCClient.createRoom(auth.roomId);
RTCClient.setRTCRoomEventHandler({
onUserJoined(userInfo, elapsed) {
console.log('onUserJoined: ', userInfo, elapsed);
},
});
RTCClient.joinRoom({
token: auth.token,
userId: auth.userId,
roomConfigs: {
profile: 0,
isAutoPublish: true,
isAutoSubscribeAudio: false,
isAutoSubscribeVideo: false,
},
});
};
useEffect(() => {
if (isViewLoaded) {
initializeRTC();
initializePlayer();
console.log('init success');
}
}, [isViewLoaded]);
useEffect(() => {
return () => {
RTCClient.engine?.stopVodPlayerCapture(playerRef.current);
}
}, []);
return (
<SafeAreaView>
<ScrollView
style={{
display: 'flex',
flexDirection: 'column',
width: '100%',
height: '100%',
backgroundColor: 'gray',
}}>
<RowItem
theme="dark"
leftItem="File path"
leftItemStyle={{width: '25%'}}
rightItem={
<Input disabled placeholder="Select media file" value={filepath} />
}
/>
<Button
style={{...GlobalStyles.rowBtn, marginBottom: 6}}
onPress={handleSelectVideoFile}>
<Text style={{color: 'gray'}}>Select media file</Text>
</Button>
<Button
style={{...GlobalStyles.rowBtn, marginBottom: 6}}
onPress={() => setFilepath('')}>
<Text style={{color: 'gray'}}>Clear media file</Text>
</Button>
<Button
style={{...GlobalStyles.rowBtn, marginBottom: 6}}
onPress={handlePublish}>
<Text style={{color: 'gray'}}>Push Stream</Text>
</Button>
<Button
style={{...GlobalStyles.rowBtn, marginBottom: 6}}
onPress={handlePlay}>
<Text style={{color: 'gray'}}>Play</Text>
</Button>
<Button
style={{...GlobalStyles.rowBtn, marginBottom: 6}}
onPress={handleStop}>
<Text style={{color: 'gray'}}>Pause</Text>
</Button>
<Button
style={{...GlobalStyles.rowBtn, marginBottom: 6}}
onPress={handleDestroy}>
<Text style={{color: 'gray'}}>Destroy</Text>
</Button>
<View
style={{
flex: 1,
width: '100%',
minHeight: 300,
backgroundColor: '#000',
}}>
<Text>{`${viewId}`}</Text>
<NativeViewComponent
viewId={viewId}
style={{
width: '100%',
height: '100%',
}}
onLoad={handleViewLoad}
kind={
Platform.select({
android: 'SurfaceView',
ios: 'UIView',
})!
}
/>
</View>
</ScrollView>
</SafeAreaView>
);
};
export default Page;
- In Android/iOS scenarios, the screen sharing method is slightly different. For details, please refer to Android screen sharing and iOS screen sharing.
- Not support debug in iOS simulator, using real device instead.