diff --git a/app.json b/app.json
index bdd6172..dc31e13 100644
--- a/app.json
+++ b/app.json
@@ -6,7 +6,7 @@
"orientation": "portrait",
"icon": "./assets/images/icon.png",
"scheme": "myapp",
- "userInterfaceStyle": "automatic",
+ "userInterfaceStyle": "dark",
"newArchEnabled": true,
"ios": {
"supportsTablet": true,
@@ -28,7 +28,8 @@
"android.permission.SYSTEM_ALERT_WINDOW",
"android.permission.WAKE_LOCK",
"android.permission.BLUETOOTH"
- ]
+ ],
+ "softwareKeyboardLayoutMode": "pan"
},
"web": {
"bundler": "metro",
diff --git a/app/(start)/index.tsx b/app/(start)/index.tsx
index e3ad665..d6efae9 100644
--- a/app/(start)/index.tsx
+++ b/app/(start)/index.tsx
@@ -1,15 +1,79 @@
+import { ConnectionDetails, fetchToken } from '@/hooks/useConnectionDetails';
import { useRouter } from 'expo-router';
-import { StyleSheet, Button, View } from 'react-native';
+import { useEffect, useState } from 'react';
+import {
+ StyleSheet,
+ View,
+ Image,
+ Text,
+ TouchableOpacity,
+ ActivityIndicator,
+} from 'react-native';
export default function StartScreen() {
const router = useRouter();
+ let [isConnecting, setConnecting] = useState(false);
+ let [connectionDetails, setConnectionDetails] = useState<
+ ConnectionDetails | undefined
+ >(undefined);
+
+ // Fetch token when we're connecting.
+ useEffect(() => {
+ if (isConnecting) {
+ fetchToken().then((details) => {
+ console.log(details);
+ setConnectionDetails(details);
+ if (!details) {
+ setConnecting(false);
+ }
+ });
+ }
+ }, [isConnecting]);
+
+ // Navigate to Assistant screen when we have the connection details.
+ useEffect(() => {
+ if (isConnecting && connectionDetails) {
+ setConnecting(false);
+ setConnectionDetails(undefined);
+ router.navigate('../assistant');
+ }
+ }, [isConnecting, router, connectionDetails]);
+
+ let connectText: string;
+
+ if (isConnecting) {
+ connectText = 'Connecting';
+ } else {
+ connectText = 'Start Voice Assistant';
+ }
+
return (
-
);
}
@@ -20,4 +84,29 @@ const styles = StyleSheet.create({
alignItems: 'center',
justifyContent: 'center',
},
+ logo: {
+ width: 59,
+ height: 56,
+ marginBottom: 16,
+ },
+ text: {
+ color: '#ffffff',
+ marginBottom: 24,
+ },
+ activityIndicator: {
+ marginEnd: 8,
+ },
+ button: {
+ flexDirection: 'row',
+ backgroundColor: '#002CF2',
+ paddingVertical: 12,
+ paddingHorizontal: 12,
+ borderRadius: 24,
+ alignItems: 'center',
+ justifyContent: 'center',
+ minWidth: 200, // Ensure button has a minimum width when loading
+ },
+ buttonText: {
+ color: '#ffffff',
+ },
});
diff --git a/app/assistant/index.tsx b/app/assistant/index.tsx
index a2b0f3b..54caa77 100644
--- a/app/assistant/index.tsx
+++ b/app/assistant/index.tsx
@@ -1,29 +1,31 @@
import {
+ Animated,
+ Dimensions,
StyleSheet,
+ useAnimatedValue,
View,
- Text,
- useColorScheme,
- Image,
- Pressable,
- ScrollView,
+ ViewStyle,
} from 'react-native';
-import React, { useEffect } from 'react';
+import React, { useCallback, useEffect, useState } from 'react';
import {
AudioSession,
- BarVisualizer,
LiveKitRoom,
useIOSAudioManagement,
useLocalParticipant,
useParticipantTracks,
useRoomContext,
- useTrackTranscription,
- useVoiceAssistant,
+ VideoTrack,
} from '@livekit/react-native';
import { useConnectionDetails } from '@/hooks/useConnectionDetails';
import { SafeAreaView } from 'react-native-safe-area-context';
-import { Track } from 'livekit-client';
import { useRouter } from 'expo-router';
+import ControlBar from './ui/ControlBar';
+import ChatBar from './ui/ChatBar';
+import ChatLog from './ui/ChatLog';
+import AgentVisualization from './ui/AgentVisualization';
+import useDataStreamTranscriptions from '@/hooks/useDataStreamTranscriptions';
+import { Track } from 'livekit-client';
export default function AssistantScreen() {
// Start the audio session first.
@@ -61,180 +63,356 @@ const RoomView = () => {
const room = useRoomContext();
useIOSAudioManagement(room, true);
- const { isMicrophoneEnabled, localParticipant } = useLocalParticipant();
+ const {
+ isMicrophoneEnabled,
+ isCameraEnabled,
+ isScreenShareEnabled,
+ cameraTrack: localCameraTrack,
+ localParticipant,
+ } = useLocalParticipant();
+ const localParticipantIdentity = localParticipant.identity;
- // Transcriptions
- const localTracks = useParticipantTracks(
- [Track.Source.Microphone],
- localParticipant.identity
+ const localScreenShareTrack = useParticipantTracks(
+ [Track.Source.ScreenShare],
+ localParticipantIdentity
);
- const { segments: userTranscriptions } = useTrackTranscription(
- localTracks[0]
- );
-
- const { agentTranscriptions } = useVoiceAssistant();
- const lastUserTranscription = (
- userTranscriptions.length > 0
- ? userTranscriptions[userTranscriptions.length - 1].text
- : ''
- ) as string;
- const lastAgentTranscription = (
- agentTranscriptions.length > 0
- ? agentTranscriptions[agentTranscriptions.length - 1].text
- : ''
- ) as string;
+ const localVideoTrack =
+ localCameraTrack && isCameraEnabled
+ ? {
+ participant: localParticipant,
+ publication: localCameraTrack,
+ source: Track.Source.Camera,
+ }
+ : localScreenShareTrack.length > 0 && isScreenShareEnabled
+ ? localScreenShareTrack[0]
+ : null;
- // Controls
- var micImage = isMicrophoneEnabled
- ? require('../../assets/images/baseline_mic_white_24dp.png')
- : require('../../assets/images/baseline_mic_off_white_24dp.png');
-
- var exitImage = require('../../assets/images/close_white_24dp.png');
+ // Transcriptions
+ const transcriptionState = useDataStreamTranscriptions();
+ const addTranscription = transcriptionState.addTranscription;
+ const [isChatEnabled, setChatEnabled] = useState(false);
+ const [chatMessage, setChatMessage] = useState('');
- return (
-
-
-
-
-
-
-
-
- [
- { backgroundColor: pressed ? 'rgb(210, 230, 255)' : '#007DFF' },
- styles.button,
- ]}
- onPress={() => {
- localParticipant.setMicrophoneEnabled(!isMicrophoneEnabled);
- }}
- >
-
-
- [
- {
- backgroundColor: pressed ? 'rgb(210, 230, 255)' : '#FF0000',
- },
- styles.button,
- ]}
- onPress={() => {
- router.back();
- }}
- >
-
-
-
-
+ const onChatSend = useCallback(
+ (message: string) => {
+ addTranscription(localParticipantIdentity, message);
+ setChatMessage('');
+ },
+ [localParticipantIdentity, addTranscription, setChatMessage]
);
-};
-const UserTranscriptionText = (props: { text: string }) => {
- let { text } = props;
- const colorScheme = useColorScheme();
- const themeStyle =
- colorScheme === 'light'
- ? styles.userTranscriptionLight
- : styles.userTranscriptionDark;
- const themeTextStyle =
- colorScheme === 'light' ? styles.lightThemeText : styles.darkThemeText;
+ // Control callbacks
+ const onMicClick = useCallback(() => {
+ localParticipant.setMicrophoneEnabled(!isMicrophoneEnabled);
+ }, [isMicrophoneEnabled, localParticipant]);
+ const onCameraClick = useCallback(() => {
+ localParticipant.setCameraEnabled(!isCameraEnabled);
+ }, [isCameraEnabled, localParticipant]);
+ const onScreenShareClick = useCallback(() => {
+ localParticipant.setScreenShareEnabled(!isScreenShareEnabled);
+ }, [isScreenShareEnabled, localParticipant]);
+ const onChatClick = useCallback(() => {
+ setChatEnabled(!isChatEnabled);
+ }, [isChatEnabled, setChatEnabled]);
+ const onExitClick = useCallback(() => {
+ router.back();
+ }, [router]);
- return (
- text && (
-
-
- {text}
-
-
- )
+ // Layout positioning
+ const [containerWidth, setContainerWidth] = useState(
+ Dimensions.get('window').width
);
-};
-
-const AgentTranscriptionText = (props: { text: string }) => {
- let { text } = props;
- const colorScheme = useColorScheme();
- const themeTextStyle =
- colorScheme === 'light' ? styles.lightThemeText : styles.darkThemeText;
- return (
- text && (
- {text}
- )
+ const [containerHeight, setContainerHeight] = useState(
+ Dimensions.get('window').height
);
-};
+ const agentVisualizationPosition = useAgentVisualizationPosition(
+ isChatEnabled,
+ isCameraEnabled || isScreenShareEnabled
+ );
+ const localVideoPosition = useLocalVideoPosition(isChatEnabled, {
+ width: containerWidth,
+ height: containerHeight,
+ });
+
+ let localVideoView = localVideoTrack ? (
+
+
+
+ ) : null;
-const SimpleVoiceAssistant = () => {
- const { state, audioTrack } = useVoiceAssistant();
return (
- {
+ const { width, height } = event.nativeEvent.layout;
+ setContainerWidth(width);
+ setContainerHeight(height);
}}
- trackRef={audioTrack}
- style={styles.voiceAssistant}
- />
+ >
+
+
+ {
+ setChatMessage(value);
+ }}
+ onChatSend={onChatSend}
+ />
+
+
+
+
+
+ {localVideoView}
+
+
+
);
};
+
const styles = StyleSheet.create({
container: {
width: '100%',
height: '100%',
alignItems: 'center',
},
- voiceAssistant: {
- width: '100%',
- height: 100,
+ spacer: {
+ height: '24%',
},
logContainer: {
width: '100%',
- flex: 1,
+ flexGrow: 1,
flexDirection: 'column',
+ marginBottom: 8,
},
- controlsContainer: {
- alignItems: 'center',
- flexDirection: 'row',
- },
- button: {
- width: 60,
- height: 60,
- padding: 10,
- margin: 12,
- borderRadius: 30,
+ chatBar: {
+ left: 0,
+ right: 0,
+ marginHorizontal: 16,
+ marginBottom: 16,
},
- icon: {
- width: 40,
- height: 40,
+ controlBar: {
+ left: 0,
+ right: 0,
+ zIndex: 2,
+ marginHorizontal: 16,
+ marginBottom: 8,
},
- userTranscriptionContainer: {
+ video: {
width: '100%',
- alignContent: 'flex-end',
- },
- userTranscription: {
- width: 'auto',
- fontSize: 18,
- alignSelf: 'flex-end',
- borderRadius: 6,
- padding: 8,
- margin: 16,
- },
- userTranscriptionLight: {
- backgroundColor: '#B0B0B0',
- },
- userTranscriptionDark: {
- backgroundColor: '#404040',
- },
-
- agentTranscription: {
- fontSize: 20,
- textAlign: 'left',
- margin: 16,
- },
- lightThemeText: {
- color: '#000000',
+ height: '100%',
},
- darkThemeText: {
- color: '#FFFFFF',
+ agentVisualization: {
+ width: '100%',
+ height: '100%',
},
});
+
+const expandedAgentWidth = 1;
+const expandedAgentHeight = 1;
+const expandedLocalWidth = 0.3;
+const expandedLocalHeight = 0.2;
+const collapsedWidth = 0.3;
+const collapsedHeight = 0.2;
+
+const createAnimConfig = (toValue: any) => {
+ return {
+ toValue,
+ stiffness: 200,
+ damping: 30,
+ useNativeDriver: false,
+ isInteraction: false,
+ overshootClamping: true,
+ };
+};
+
+const useAgentVisualizationPosition = (
+ isChatVisible: boolean,
+ hasLocalVideo: boolean
+) => {
+ const width = useAnimatedValue(
+ isChatVisible ? collapsedWidth : expandedAgentWidth
+ );
+ const height = useAnimatedValue(
+ isChatVisible ? collapsedHeight : expandedAgentHeight
+ );
+
+ useEffect(() => {
+ const widthAnim = Animated.spring(
+ width,
+ createAnimConfig(isChatVisible ? collapsedWidth : expandedAgentWidth)
+ );
+ const heightAnim = Animated.spring(
+ height,
+ createAnimConfig(isChatVisible ? collapsedHeight : expandedAgentHeight)
+ );
+
+ widthAnim.start();
+ heightAnim.start();
+
+ return () => {
+ widthAnim.stop();
+ heightAnim.stop();
+ };
+ }, [width, height, isChatVisible]);
+
+ const x = useAnimatedValue(0);
+ const y = useAnimatedValue(0);
+ useEffect(() => {
+ let targetX: number;
+ let targetY: number;
+
+ if (!isChatVisible) {
+ targetX = 0;
+ targetY = 0;
+ } else {
+ if (!hasLocalVideo) {
+ // Just agent visualizer showing in top section.
+ targetX = 0.5 - collapsedWidth / 2;
+ targetY = 16;
+ } else {
+ // Handle agent visualizer showing next to local video.
+ targetX = 0.32 - collapsedWidth / 2;
+ targetY = 16;
+ }
+ }
+
+ const xAnim = Animated.spring(x, createAnimConfig(targetX));
+ const yAnim = Animated.spring(y, createAnimConfig(targetY));
+
+ xAnim.start();
+ yAnim.start();
+
+ return () => {
+ xAnim.stop();
+ yAnim.stop();
+ };
+ }, [x, y, isChatVisible, hasLocalVideo]);
+
+ return {
+ left: x.interpolate({
+ inputRange: [0, 1],
+ outputRange: ['0%', '100%'],
+ }),
+ top: y, // y is defined in pixels
+ width: width.interpolate({
+ inputRange: [0, 1],
+ outputRange: ['0%', '100%'],
+ }),
+ height: height.interpolate({
+ inputRange: [0, 1],
+ outputRange: ['0%', '100%'],
+ }),
+ };
+};
+
+const useLocalVideoPosition = (
+ isChatVisible: boolean,
+ containerDimens: { width: number; height: number }
+): ViewStyle => {
+ const width = useAnimatedValue(
+ isChatVisible ? collapsedWidth : expandedLocalWidth
+ );
+ const height = useAnimatedValue(
+ isChatVisible ? collapsedHeight : expandedLocalHeight
+ );
+
+ useEffect(() => {
+ const widthAnim = Animated.spring(
+ width,
+ createAnimConfig(isChatVisible ? collapsedWidth : expandedLocalWidth)
+ );
+ const heightAnim = Animated.spring(
+ height,
+ createAnimConfig(isChatVisible ? collapsedHeight : expandedLocalHeight)
+ );
+
+ widthAnim.start();
+ heightAnim.start();
+
+ return () => {
+ widthAnim.stop();
+ heightAnim.stop();
+ };
+ }, [width, height, isChatVisible]);
+
+ const x = useAnimatedValue(0);
+ const y = useAnimatedValue(0);
+ useEffect(() => {
+ let targetX: number;
+ let targetY: number;
+
+ if (!isChatVisible) {
+ targetX = 1 - expandedLocalWidth - 16 / containerDimens.width;
+ targetY = 1 - expandedLocalHeight - 106 / containerDimens.height;
+ } else {
+ // Handle agent visualizer showing next to local video.
+ targetX = 0.66 - collapsedWidth / 2;
+ targetY = 0; // marginTop handles this.
+ }
+
+ const xAnim = Animated.spring(x, createAnimConfig(targetX));
+ const yAnim = Animated.spring(y, createAnimConfig(targetY));
+ xAnim.start();
+ yAnim.start();
+ return () => {
+ xAnim.stop();
+ yAnim.stop();
+ };
+ }, [containerDimens.width, containerDimens.height, x, y, isChatVisible]);
+
+ return {
+ left: x.interpolate({
+ inputRange: [0, 1],
+ outputRange: ['0%', '100%'],
+ }),
+ top: y.interpolate({
+ inputRange: [0, 1],
+ outputRange: ['0%', '100%'],
+ }),
+ width: width.interpolate({
+ inputRange: [0, 1],
+ outputRange: ['0%', '100%'],
+ }),
+ height: height.interpolate({
+ inputRange: [0, 1],
+ outputRange: ['0%', '100%'],
+ }),
+ marginTop: 16,
+ };
+};
diff --git a/app/assistant/ui/AgentVisualization.tsx b/app/assistant/ui/AgentVisualization.tsx
new file mode 100644
index 0000000..bfb61c3
--- /dev/null
+++ b/app/assistant/ui/AgentVisualization.tsx
@@ -0,0 +1,72 @@
+import { useVoiceAssistant } from '@livekit/components-react';
+import { BarVisualizer, VideoTrack } from '@livekit/react-native';
+import React, { useCallback, useState } from 'react';
+import {
+ LayoutChangeEvent,
+ StyleProp,
+ StyleSheet,
+ View,
+ ViewStyle,
+} from 'react-native';
+
+type AgentVisualizationProps = {
+ style: StyleProp;
+};
+
+const barSize = 0.2;
+
+export default function AgentVisualization({ style }: AgentVisualizationProps) {
+ const { state, audioTrack, videoTrack } = useVoiceAssistant();
+ const [barWidth, setBarWidth] = useState(0);
+ const [barBorderRadius, setBarBorderRadius] = useState(0);
+ const layoutCallback = useCallback((event: LayoutChangeEvent) => {
+ const { x, y, width, height } = event.nativeEvent.layout;
+ console.log(x, y, width, height);
+ setBarWidth(barSize * height);
+ setBarBorderRadius(barSize * height);
+ }, []);
+ let videoView = videoTrack ? (
+
+ ) : null;
+ return (
+
+
+
+
+ {videoView}
+
+ );
+}
+
+const styles = StyleSheet.create({
+ container: {
+ alignItems: 'center',
+ justifyContent: 'center',
+ },
+ videoTrack: {
+ position: 'absolute',
+ width: '100%',
+ height: '100%',
+ zIndex: 1,
+ },
+ barVisualizerContainer: {
+ width: '100%',
+ height: '30%',
+ zIndex: 0,
+ },
+ barVisualizer: {
+ width: '100%',
+ height: '100%',
+ },
+});
diff --git a/app/assistant/ui/ChatBar.tsx b/app/assistant/ui/ChatBar.tsx
new file mode 100644
index 0000000..cb47c55
--- /dev/null
+++ b/app/assistant/ui/ChatBar.tsx
@@ -0,0 +1,78 @@
+import {
+ Image,
+ KeyboardAvoidingView,
+ Platform,
+ StyleProp,
+ StyleSheet,
+ TextInput,
+ TouchableOpacity,
+ View,
+ ViewStyle,
+} from 'react-native';
+
+type ChatBarProps = {
+ style: StyleProp;
+ value: string;
+ onChangeText: (text: string) => void;
+ onChatSend: (text: string) => void;
+};
+
+export default function ChatBar({
+ style,
+ value,
+ onChangeText,
+ onChatSend,
+}: ChatBarProps) {
+ return (
+
+
+
+ onChatSend(value)}
+ >
+
+
+
+
+
+
+ );
+}
+
+const styles = StyleSheet.create({
+ container: {
+ width: '100%',
+ flexDirection: 'row',
+ backgroundColor: '#131313',
+ borderRadius: 24,
+ padding: 8,
+ },
+ input: {
+ outlineStyle: undefined,
+ flexGrow: 1,
+ marginStart: 8,
+ marginEnd: 16,
+ color: '#FFFFFF',
+ },
+ button: {
+ width: 32,
+ height: 32,
+ alignItems: 'center',
+ justifyContent: 'center',
+ borderRadius: '50%',
+ backgroundColor: '#666666',
+ },
+});
diff --git a/app/assistant/ui/ChatLog.tsx b/app/assistant/ui/ChatLog.tsx
new file mode 100644
index 0000000..5869abc
--- /dev/null
+++ b/app/assistant/ui/ChatLog.tsx
@@ -0,0 +1,111 @@
+import { Transcription } from '@/hooks/useDataStreamTranscriptions';
+import { useLocalParticipant } from '@livekit/components-react';
+import { useCallback } from 'react';
+import {
+ ListRenderItemInfo,
+ StyleProp,
+ StyleSheet,
+ Text,
+ useColorScheme,
+ View,
+ ViewStyle,
+} from 'react-native';
+import Animated, { LinearTransition } from 'react-native-reanimated';
+
+export type ChatLogProps = {
+ style: StyleProp;
+ transcriptions: Transcription[];
+};
+export default function ChatLog({ style, transcriptions }: ChatLogProps) {
+ const { localParticipant } = useLocalParticipant();
+ const localParticipantIdentity = localParticipant.identity;
+
+ const renderItem = useCallback(
+ ({ item }: ListRenderItemInfo) => {
+ const isLocalUser = item.identity === localParticipantIdentity;
+ if (isLocalUser) {
+ return ;
+ } else {
+ return ;
+ }
+ },
+ [localParticipantIdentity]
+ );
+
+ return (
+
+ );
+}
+
+const UserTranscriptionText = (props: { text: string }) => {
+ let { text } = props;
+ const colorScheme = useColorScheme();
+ const themeStyle =
+ colorScheme === 'light'
+ ? styles.userTranscriptionLight
+ : styles.userTranscriptionDark;
+ const themeTextStyle =
+ colorScheme === 'light' ? styles.lightThemeText : styles.darkThemeText;
+
+ return (
+ text && (
+
+
+ {text}
+
+
+ )
+ );
+};
+
+const AgentTranscriptionText = (props: { text: string }) => {
+ let { text } = props;
+ const colorScheme = useColorScheme();
+ const themeTextStyle =
+ colorScheme === 'light' ? styles.lightThemeText : styles.darkThemeText;
+ return (
+ text && (
+ {text}
+ )
+ );
+};
+
+const styles = StyleSheet.create({
+ userTranscriptionContainer: {
+ width: '100%',
+ alignContent: 'flex-end',
+ },
+ userTranscription: {
+ width: 'auto',
+ fontSize: 17,
+ alignSelf: 'flex-end',
+ borderRadius: 6,
+ paddingHorizontal: 12,
+ paddingVertical: 6,
+ margin: 16,
+ },
+ userTranscriptionLight: {
+ backgroundColor: '#B0B0B0',
+ },
+ userTranscriptionDark: {
+ backgroundColor: '#131313',
+ },
+
+ agentTranscription: {
+ fontSize: 17,
+ textAlign: 'left',
+ margin: 16,
+ },
+ lightThemeText: {
+ color: '#000000',
+ },
+ darkThemeText: {
+ color: '#FFFFFF',
+ },
+});
diff --git a/app/assistant/ui/ControlBar.tsx b/app/assistant/ui/ControlBar.tsx
new file mode 100644
index 0000000..34fa37f
--- /dev/null
+++ b/app/assistant/ui/ControlBar.tsx
@@ -0,0 +1,158 @@
+import { TrackReference, useLocalParticipant } from '@livekit/components-react';
+import { BarVisualizer } from '@livekit/react-native';
+import { useEffect, useState } from 'react';
+import {
+ ViewStyle,
+ StyleSheet,
+ View,
+ Image,
+ TouchableOpacity,
+ StyleProp,
+} from 'react-native';
+
+type ControlBarProps = {
+ style?: StyleProp;
+ options: ControlBarOptions;
+};
+
+type ControlBarOptions = {
+ isMicEnabled: boolean;
+ onMicClick: () => void;
+ isCameraEnabled: boolean;
+ onCameraClick: () => void;
+ isScreenShareEnabled: boolean;
+ onScreenShareClick: () => void;
+ isChatEnabled: boolean;
+ onChatClick: () => void;
+ onExitClick: () => void;
+};
+
+export default function ControlBar({ style = {}, options }: ControlBarProps) {
+ const { microphoneTrack, localParticipant } = useLocalParticipant();
+ const [trackRef, setTrackRef] = useState(
+ undefined
+ );
+
+ useEffect(() => {
+ if (microphoneTrack) {
+ setTrackRef({
+ participant: localParticipant,
+ publication: microphoneTrack,
+ source: microphoneTrack.source,
+ });
+ } else {
+ setTrackRef(undefined);
+ }
+ }, [microphoneTrack, localParticipant]);
+
+ // Images
+ let micImage = options.isMicEnabled
+ ? require('@/assets/images/mic_24dp.png')
+ : require('@/assets/images/mic_off_24dp.png');
+ let cameraImage = options.isCameraEnabled
+ ? require('@/assets/images/videocam_24dp.png')
+ : require('@/assets/images/videocam_off_24dp.png');
+ let screenShareImage = options.isScreenShareEnabled
+ ? require('@/assets/images/present_to_all_24dp.png')
+ : require('@/assets/images/present_to_all_off_24dp.png');
+ let chatImage = options.isChatEnabled
+ ? require('@/assets/images/chat_24dp.png')
+ : require('@/assets/images/chat_off_24dp.png');
+ let exitImage = require('@/assets/images/call_end_24dp.png');
+
+ return (
+
+ options.onMicClick()}
+ >
+
+
+
+
+ options.onCameraClick()}
+ >
+
+
+ options.onScreenShareClick()}
+ >
+
+
+ options.onChatClick()}
+ >
+
+
+ options.onExitClick()}
+ >
+
+
+
+ );
+}
+
+const styles = StyleSheet.create({
+ container: {
+ flexDirection: 'row',
+ alignItems: 'stretch',
+ paddingHorizontal: 8,
+ backgroundColor: '#070707',
+ borderColor: '#202020',
+ borderRadius: 53,
+ borderWidth: 1,
+ },
+ button: {
+ flex: 1,
+ flexDirection: 'row',
+ height: 44,
+ padding: 10,
+ marginHorizontal: 4,
+ marginVertical: 8,
+ borderRadius: 8,
+ alignItems: 'center',
+ justifyContent: 'center',
+ },
+ enabledButton: {
+ backgroundColor: '#131313',
+ },
+ icon: {
+ width: 20,
+ },
+ micVisualizer: {
+ width: 20,
+ height: 20,
+ },
+});
diff --git a/assets/images/arrow_upward_24dp.png b/assets/images/arrow_upward_24dp.png
new file mode 100644
index 0000000..2f23cef
Binary files /dev/null and b/assets/images/arrow_upward_24dp.png differ
diff --git a/assets/images/call_end_24dp.png b/assets/images/call_end_24dp.png
new file mode 100644
index 0000000..a0e153d
Binary files /dev/null and b/assets/images/call_end_24dp.png differ
diff --git a/assets/images/chat_24dp.png b/assets/images/chat_24dp.png
new file mode 100644
index 0000000..b8636ef
Binary files /dev/null and b/assets/images/chat_24dp.png differ
diff --git a/assets/images/chat_off_24dp.png b/assets/images/chat_off_24dp.png
new file mode 100644
index 0000000..e919378
Binary files /dev/null and b/assets/images/chat_off_24dp.png differ
diff --git a/assets/images/mic_24dp.png b/assets/images/mic_24dp.png
new file mode 100644
index 0000000..95497d7
Binary files /dev/null and b/assets/images/mic_24dp.png differ
diff --git a/assets/images/mic_off_24dp.png b/assets/images/mic_off_24dp.png
new file mode 100644
index 0000000..1eb8c89
Binary files /dev/null and b/assets/images/mic_off_24dp.png differ
diff --git a/assets/images/present_to_all_24dp.png b/assets/images/present_to_all_24dp.png
new file mode 100644
index 0000000..01bf740
Binary files /dev/null and b/assets/images/present_to_all_24dp.png differ
diff --git a/assets/images/present_to_all_off_24dp.png b/assets/images/present_to_all_off_24dp.png
new file mode 100644
index 0000000..ffb0a1e
Binary files /dev/null and b/assets/images/present_to_all_off_24dp.png differ
diff --git a/assets/images/start-logo.png b/assets/images/start-logo.png
new file mode 100644
index 0000000..116a789
Binary files /dev/null and b/assets/images/start-logo.png differ
diff --git a/assets/images/videocam_24dp.png b/assets/images/videocam_24dp.png
new file mode 100644
index 0000000..117d03e
Binary files /dev/null and b/assets/images/videocam_24dp.png differ
diff --git a/assets/images/videocam_off_24dp.png b/assets/images/videocam_off_24dp.png
new file mode 100644
index 0000000..ac6ae98
Binary files /dev/null and b/assets/images/videocam_off_24dp.png differ
diff --git a/hooks/useConnectionDetails.ts b/hooks/useConnectionDetails.ts
index bf9e1fd..60597f8 100644
--- a/hooks/useConnectionDetails.ts
+++ b/hooks/useConnectionDetails.ts
@@ -17,19 +17,29 @@ const hardcodedToken = '';
*/
export function useConnectionDetails(): ConnectionDetails | undefined {
const [details, setDetails] = useState(() => {
+ return undefined;
+ });
+
+ useEffect(() => {
+ fetchToken().then(details => {
+ setDetails(details);
+ });
+ }, []);
+
+ return details;
+}
+
+export async function fetchToken() : Promise {
+
if (!sandboxID) {
return {
url: hardcodedUrl,
token: hardcodedToken,
};
}
- return undefined;
- });
-
- useEffect(() => {
const fetchToken = async () => {
if (!sandboxID) {
- return;
+ return undefined;
}
const response = await fetch(tokenEndpoint, {
headers: { 'X-Sandbox-ID': sandboxID },
@@ -37,20 +47,18 @@ export function useConnectionDetails(): ConnectionDetails | undefined {
const json = await response.json();
if (json.serverUrl && json.participantToken) {
- setDetails({
+ return {
url: json.serverUrl,
token: json.participantToken,
- });
+ };
+ } else {
+ return undefined;
}
};
-
- fetchToken();
- }, []);
-
- return details;
+ return fetchToken();
}
-type ConnectionDetails = {
+export type ConnectionDetails = {
url: string;
token: string;
};
diff --git a/hooks/useDataStreamTranscriptions.ts b/hooks/useDataStreamTranscriptions.ts
new file mode 100644
index 0000000..e77d37f
--- /dev/null
+++ b/hooks/useDataStreamTranscriptions.ts
@@ -0,0 +1,145 @@
+import { useRoomContext, useVoiceAssistant } from "@livekit/components-react"
+import { TextStreamReader, TranscriptionSegment } from "livekit-client"
+import { useCallback, useEffect, useState } from "react"
+
+export type Transcription = {
+ identity: string,
+ segment: TranscriptionSegment,
+}
+
+export type TranscriptionsState = {
+ transcriptions: Transcription[],
+ addTranscription: (identity: string, message: string) => void,
+}
+
+export default function useDataStreamTranscriptions(): TranscriptionsState {
+ const room = useRoomContext();
+ const { agent } = useVoiceAssistant();
+ const agentIdentity = agent?.identity;
+
+ const [transcriptionMap] = useState