I use the Ricoh Theta SDK and try to have smooth render of live preview to a react native app experimentation.
I followed guideline to create react native project with SDK. I’m able to initialize, read live preview and take picture.
Read live preview is not smooth, I mean when I got new byte data, my image is refreshed correctly but the change between the two byte data state is visible in the app.
So I tried react-native demo
and preview is very smooth. I’m not able to do so in my project with documentation
Here is basic code used
I put 2 buttons
- First to initialize ricoh connection and set up live preview
- Second to take capture
/**
* Sample React Native App
* https://github.com/facebook/react-native
*
* @format
*/
import React, { useEffect } from 'react';
import type {PropsWithChildren} from 'react';
import {
Alert,
Image,
Platform,
Pressable,
SafeAreaView,
StyleSheet,
Text,
useColorScheme,
View,
} from 'react-native';
import {
Colors,
} from 'react-native/Libraries/NewAppScreen';
import {NativeModules, NativeEventEmitter} from 'react-native';
import {
initialize,
getPhotoCaptureBuilder,
BitrateEnum,
getLivePreview,
stopLivePreview,
THETA_EVENT_NAME,
Options,
} from 'theta-client-react-native';
function App(): React.JSX.Element {
const isDarkMode = useColorScheme() === 'dark';
const [dataUrl, setDataUrl] = React.useState<string | undefined>();
const [takenPhoto, setTakenPhoto] = React.useState<string | undefined>();
const backgroundStyle = {
backgroundColor: isDarkMode ? Colors.darker : Colors.lighter,
flex: 1,
};
const startLivePreview = async () => {
try {
await getLivePreview();
} catch(e) {
Alert.alert('getLivePreview', 'error: \n' + JSON.stringify(e), [
{text: 'OK'},
]);
}
};
const onConnectPress = async () => {
// http://192.168.1.1
// https://fake-theta.vercel.app
try {
await initialize('http://192.168.1.1');
const emitter = new NativeEventEmitter(
NativeModules.ThetaClientReactNative,
);
const eventListener = emitter.addListener(THETA_EVENT_NAME, event => {
setDataUrl(event.data);
});
await startLivePreview();
} catch(e) {
console.log(e);
}
};
const onTakePress = async () => {
const photoCapture = await getPhotoCaptureBuilder().build();
await setOptions({ bitrate: 1048576 });
const url = await photoCapture.takePicture();
console.log(url);
setTakenPhoto(url);
};
return (
<SafeAreaView style={backgroundStyle}>
<Pressable onPress={onConnectPress} style={[styles.button]}>
<Text style={[styles.buttonText]}>Connexion</Text>
</Pressable>
<Pressable onPress={onTakePress} style={[styles.button]}>
<Text style={[styles.buttonText]}>Take picture</Text>
</Pressable>
{takenPhoto !== undefined ? <Image source={{uri: takenPhoto}} style={[styles.flex]}/> : ''}
{dataUrl !== undefined ? <Image source={{uri: dataUrl}} style={styles.flex} /> : ''}
</SafeAreaView>
);
}
const styles = StyleSheet.create({
flex: {
width: 300,
height: 150,
},
button: {
backgroundColor: '#e0e0e0',
width: 200,
height: 100,
marginTop: 50,
marginLeft: 50,
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
},
buttonText: {
color: 'black',
fontSize: 20,
},
});
export default App;