integrate working toy
This commit is contained in:
parent
e397f8819f
commit
163b28383c
|
@ -1,9 +1,7 @@
|
||||||
import React, { useState, useEffect } from "react";
|
import React, { useState, useEffect } from "react";
|
||||||
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
|
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
|
||||||
import * as FileSystem from 'expo-file-system';
|
import * as FileSystem from 'expo-file-system';
|
||||||
import { AVPlaybackStatus, Audio } from "expo-av";
|
import { Audio } from "expo-av";
|
||||||
import { Buffer } from "buffer";
|
|
||||||
import base64 from 'react-native-base64';
|
|
||||||
|
|
||||||
interface MainProps {
|
interface MainProps {
|
||||||
route: {
|
route: {
|
||||||
|
@ -15,58 +13,58 @@ interface MainProps {
|
||||||
|
|
||||||
const Main: React.FC<MainProps> = ({ route }) => {
|
const Main: React.FC<MainProps> = ({ route }) => {
|
||||||
const { scannedData } = route.params;
|
const { scannedData } = route.params;
|
||||||
|
const [connectionStatus, setConnectionStatus] = useState<string>("Connecting...");
|
||||||
const [connectionStatus, setConnectionStatus] =
|
|
||||||
useState<string>("Connecting...");
|
|
||||||
const [ws, setWs] = useState<WebSocket | null>(null);
|
const [ws, setWs] = useState<WebSocket | null>(null);
|
||||||
const [recording, setRecording] = useState<Audio.Recording | null>(null);
|
const [recording, setRecording] = useState<Audio.Recording | null>(null);
|
||||||
const [audioQueue, setAudioQueue] = useState<string[]>([]);
|
const [audioQueue, setAudioQueue] = useState<string[]>([]);
|
||||||
const [isPlaying, setIsPlaying] = useState<boolean>(false);
|
const [sound, setSound] = useState<Audio.Sound | null>();
|
||||||
const Buffer = require('buffer/').Buffer;
|
const audioDir = FileSystem.documentDirectory + '01/audio/';
|
||||||
|
|
||||||
const constructTempFilePath = async (buffer: Buffer) => {
|
|
||||||
const tempFilePath = `${FileSystem.cacheDirectory}${Date.now()}` + "speech.mp3";
|
|
||||||
await FileSystem.writeAsStringAsync(
|
|
||||||
tempFilePath,
|
|
||||||
buffer.toString("base64"),
|
|
||||||
{
|
|
||||||
encoding: FileSystem.EncodingType.Base64,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
return tempFilePath;
|
async function dirExists() {
|
||||||
};
|
/**
|
||||||
|
* Checks if audio directory exists in device storage, if not creates it.
|
||||||
|
*/
|
||||||
|
const dirInfo = await FileSystem.getInfoAsync(audioDir);
|
||||||
|
if (!dirInfo.exists) {
|
||||||
|
console.log("audio directory doesn't exist, creating...");
|
||||||
|
await FileSystem.makeDirectoryAsync(audioDir, { intermediates: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const playNextAudio = async () => {
|
const playNextAudio = async () => {
|
||||||
console.log("in playNextAudio audioQueue is", audioQueue);
|
await dirExists();
|
||||||
console.log("isPlaying is", isPlaying);
|
console.log("in playNextAudio audioQueue is", audioQueue.length);
|
||||||
|
|
||||||
if (audioQueue.length > 0) {
|
if (audioQueue.length > 0) {
|
||||||
const uri = audioQueue.shift() as string;
|
const uri = audioQueue.shift() as string;
|
||||||
console.log("load audio from", uri);
|
console.log("load audio from", uri);
|
||||||
setIsPlaying(true);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { sound } = await Audio.Sound.createAsync({ uri });
|
const { sound } = await Audio.Sound.createAsync({ uri });
|
||||||
await sound.playAsync();
|
setSound(sound);
|
||||||
console.log("playing audio from", uri);
|
|
||||||
|
|
||||||
sound.setOnPlaybackStatusUpdate(_onPlaybackStatusUpdate);
|
console.log("playing audio from", uri);
|
||||||
|
await sound?.playAsync();
|
||||||
} catch (error){
|
} catch (error){
|
||||||
console.log("Error playing audio", error);
|
console.log("Error playing audio", error);
|
||||||
setIsPlaying(false);
|
|
||||||
playNextAudio();
|
playNextAudio();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const _onPlaybackStatusUpdate = (status: AVPlaybackStatus) => {
|
useEffect(() => {
|
||||||
if (status.isLoaded && status.didJustFinish) {
|
return sound
|
||||||
setIsPlaying(false);
|
? () => {
|
||||||
playNextAudio();
|
console.log('Unloading Sound');
|
||||||
}
|
sound.unloadAsync();
|
||||||
};
|
setSound(null);
|
||||||
|
playNextAudio();
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
}, [sound]);
|
||||||
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
let websocket: WebSocket;
|
let websocket: WebSocket;
|
||||||
|
@ -84,13 +82,21 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
||||||
const message = JSON.parse(e.data);
|
const message = JSON.parse(e.data);
|
||||||
|
|
||||||
if (message.content) {
|
if (message.content) {
|
||||||
|
|
||||||
const parsedMessage = message.content.replace(/^b'|['"]|['"]$/g, "");
|
const parsedMessage = message.content.replace(/^b'|['"]|['"]$/g, "");
|
||||||
const buffer = Buffer.from(parsedMessage, 'base64')
|
console.log("parsedMessage", parsedMessage.slice(0, 30));
|
||||||
console.log("parsed message", buffer.toString());
|
|
||||||
|
|
||||||
const uri = await constructTempFilePath(buffer);
|
const filePath = `${audioDir}${Date.now()}.mp3`;
|
||||||
setAudioQueue((prevQueue) => [...prevQueue, uri]);
|
await FileSystem.writeAsStringAsync(
|
||||||
|
filePath,
|
||||||
|
parsedMessage,
|
||||||
|
{
|
||||||
|
encoding: FileSystem.EncodingType.Base64,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log("audio file written to", filePath);
|
||||||
|
|
||||||
|
setAudioQueue((prevQueue) => [...prevQueue, filePath]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (message.format === "bytes.raw" && message.end) {
|
if (message.format === "bytes.raw" && message.end) {
|
||||||
|
@ -138,7 +144,7 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
||||||
});
|
});
|
||||||
console.log("Starting recording..");
|
console.log("Starting recording..");
|
||||||
const { recording: newRecording } = await Audio.Recording.createAsync(
|
const { recording: newRecording } = await Audio.Recording.createAsync(
|
||||||
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
|
Audio.RecordingOptionsPresets.HIGH_QUALITY
|
||||||
);
|
);
|
||||||
setRecording(newRecording);
|
setRecording(newRecording);
|
||||||
console.log("Recording started");
|
console.log("Recording started");
|
||||||
|
@ -152,8 +158,12 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
||||||
setRecording(null);
|
setRecording(null);
|
||||||
if (recording) {
|
if (recording) {
|
||||||
await recording.stopAndUnloadAsync();
|
await recording.stopAndUnloadAsync();
|
||||||
|
await Audio.setAudioModeAsync({
|
||||||
|
allowsRecordingIOS: false,
|
||||||
|
});
|
||||||
const uri = recording.getURI();
|
const uri = recording.getURI();
|
||||||
console.log("Recording stopped and stored at", uri);
|
console.log("Recording stopped and stored at", uri);
|
||||||
|
|
||||||
if (ws && uri) {
|
if (ws && uri) {
|
||||||
const response = await fetch(uri);
|
const response = await fetch(uri);
|
||||||
const blob = await response.blob();
|
const blob = await response.blob();
|
||||||
|
@ -191,14 +201,15 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
</View>
|
</View>
|
||||||
);
|
);
|
||||||
};
|
}
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
const styles = StyleSheet.create({
|
||||||
container: {
|
container: {
|
||||||
flex: 1,
|
flex: 1,
|
||||||
justifyContent: "center",
|
justifyContent: 'center',
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
backgroundColor: "#fff",
|
backgroundColor: '#ecf0f1',
|
||||||
|
padding: 10,
|
||||||
},
|
},
|
||||||
circle: {
|
circle: {
|
||||||
width: 100,
|
width: 100,
|
||||||
|
|
Loading…
Reference in New Issue