Create audio system based on Web Audio API
This commit is contained in:
parent
7b2b204389
commit
7b17c46987
|
@ -38,6 +38,7 @@
|
||||||
"qs": "6.11.0",
|
"qs": "6.11.0",
|
||||||
"sass": "1.54.9",
|
"sass": "1.54.9",
|
||||||
"showdown": "2.1.0",
|
"showdown": "2.1.0",
|
||||||
|
"standardized-audio-context": "^25.3.32",
|
||||||
"text-clipper": "2.2.0",
|
"text-clipper": "2.2.0",
|
||||||
"transliteration": "2.3.5",
|
"transliteration": "2.3.5",
|
||||||
"universal-cookie": "4.0.4",
|
"universal-cookie": "4.0.4",
|
||||||
|
|
|
@ -72,8 +72,10 @@
|
||||||
|
|
||||||
// NOTE: Add a transcoded MP3 src at the end for browsers
|
// NOTE: Add a transcoded MP3 src at the end for browsers
|
||||||
// that do not support other codecs to be able to play it :)
|
// that do not support other codecs to be able to play it :)
|
||||||
if (sources.length > 0 && !sources.some(({ type }) => type === 'audio/mpeg')) {
|
if (sources.length > 0 && !sources.some(({ mimetype }) => mimetype === 'audio/mpeg')) {
|
||||||
sources.push({ mimetype: 'audio/mpeg', listen_url: `${sources[0].listen_url}?to=mp3` })
|
const url = new URL(sources[0].listen_url)
|
||||||
|
url.searchParams.set('to', 'mp3')
|
||||||
|
sources.push({ mimetype: 'audio/mpeg', listen_url: url.toString() })
|
||||||
}
|
}
|
||||||
|
|
||||||
return sources
|
return sources
|
||||||
|
|
|
@ -1,14 +1,17 @@
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
|
import { initializeFirstTrack, isPlaying, _seekEnd } from '~/composables/audio/player'
|
||||||
|
import { useMouse, useWindowSize } from '@vueuse/core'
|
||||||
|
import { useGettext } from 'vue3-gettext'
|
||||||
|
import { computed, ref } from 'vue'
|
||||||
import { useStore } from '~/store'
|
import { useStore } from '~/store'
|
||||||
import VolumeControl from './VolumeControl.vue'
|
|
||||||
|
import onKeyboardShortcut from '~/composables/onKeyboardShortcut'
|
||||||
|
import usePlayer from '~/composables/audio/usePlayer'
|
||||||
|
import useQueue from '~/composables/audio/useQueue'
|
||||||
|
|
||||||
import TrackFavoriteIcon from '~/components/favorites/TrackFavoriteIcon.vue'
|
import TrackFavoriteIcon from '~/components/favorites/TrackFavoriteIcon.vue'
|
||||||
import TrackPlaylistIcon from '~/components/playlists/TrackPlaylistIcon.vue'
|
import TrackPlaylistIcon from '~/components/playlists/TrackPlaylistIcon.vue'
|
||||||
import onKeyboardShortcut from '~/composables/onKeyboardShortcut'
|
import VolumeControl from './VolumeControl.vue'
|
||||||
import { computed, ref } from 'vue'
|
|
||||||
import { useGettext } from 'vue3-gettext'
|
|
||||||
import { useMouse, useWindowSize } from '@vueuse/core'
|
|
||||||
import useQueue from '~/composables/audio/useQueue'
|
|
||||||
import usePlayer from '~/composables/audio/usePlayer'
|
|
||||||
|
|
||||||
const store = useStore()
|
const store = useStore()
|
||||||
const { $pgettext } = useGettext()
|
const { $pgettext } = useGettext()
|
||||||
|
@ -97,6 +100,10 @@ const touchProgress = (event: MouseEvent) => {
|
||||||
|
|
||||||
const { x } = useMouse()
|
const { x } = useMouse()
|
||||||
const { width: screenWidth } = useWindowSize()
|
const { width: screenWidth } = useWindowSize()
|
||||||
|
|
||||||
|
initializeFirstTrack()
|
||||||
|
onKeyboardShortcut('w', () => { isPlaying.value = !isPlaying.value })
|
||||||
|
onKeyboardShortcut('9', () => { _seekEnd() })
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<template>
|
<template>
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
import type { IAudioContext, IMediaElementAudioSourceNode } from 'standardized-audio-context'
|
||||||
|
|
||||||
|
import { AudioContext } from 'standardized-audio-context'
|
||||||
|
import { useEventListener } from '@vueuse/core'
|
||||||
|
|
||||||
|
// Audio nodes
|
||||||
|
const AUDIO_CONTEXT = new AudioContext()
|
||||||
|
const GAIN_NODE = AUDIO_CONTEXT.createGain()
|
||||||
|
|
||||||
|
// Unlock AudioContext automatically
|
||||||
|
const UNLOCK_EVENTS = ['touchstart', 'touchend', 'mousedown', 'keydown']
|
||||||
|
for (const event of UNLOCK_EVENTS) {
|
||||||
|
const stop = useEventListener(window, event, () => {
|
||||||
|
AUDIO_CONTEXT.resume()
|
||||||
|
stop()
|
||||||
|
}, { passive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect Gain Node
|
||||||
|
GAIN_NODE.connect(AUDIO_CONTEXT.destination)
|
||||||
|
GAIN_NODE.gain.value = 1
|
||||||
|
|
||||||
|
// TODO (wvffle): Create equalizer filters
|
||||||
|
const equalizerFilters = [
|
||||||
|
GAIN_NODE
|
||||||
|
]
|
||||||
|
|
||||||
|
export const connectAudioSource = (sourceNode: IMediaElementAudioSourceNode<IAudioContext>) => {
|
||||||
|
for (const filter of equalizerFilters) {
|
||||||
|
sourceNode.connect(filter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createAudioSource = (sourceElement: HTMLAudioElement) => {
|
||||||
|
return AUDIO_CONTEXT.createMediaElementSource(sourceElement)
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
import { getCurrentSound, createTrack } from '~/composables/audio/tracks'
|
||||||
|
import { tryOnMounted } from '@vueuse/core'
|
||||||
|
import { ref, watch } from 'vue'
|
||||||
|
|
||||||
|
import useQueue from '~/composables/audio/useQueue'
|
||||||
|
|
||||||
|
const { currentIndex } = useQueue()
|
||||||
|
|
||||||
|
export const isPlaying = ref(false)
|
||||||
|
|
||||||
|
watch(isPlaying, (playing) => {
|
||||||
|
const sound = getCurrentSound()
|
||||||
|
if (!sound) return
|
||||||
|
|
||||||
|
if (playing) {
|
||||||
|
sound.audio.play()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
sound.audio.pause()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Create first track when we initalize the page
|
||||||
|
export const initializeFirstTrack = () => tryOnMounted(() => {
|
||||||
|
createTrack(currentIndex.value)
|
||||||
|
})
|
||||||
|
|
||||||
|
export const _seekEnd = () => {
|
||||||
|
const sound = getCurrentSound()
|
||||||
|
if (!sound) return
|
||||||
|
|
||||||
|
sound.audio.currentTime = sound.audio.duration - 3
|
||||||
|
}
|
|
@ -0,0 +1,133 @@
|
||||||
|
import type { IAudioContext, IMediaElementAudioSourceNode } from 'standardized-audio-context'
|
||||||
|
import type { Track, Upload } from '~/types'
|
||||||
|
|
||||||
|
import { connectAudioSource, createAudioSource } from '~/composables/audio/audio-api'
|
||||||
|
import { isPlaying } from '~/composables/audio/player'
|
||||||
|
|
||||||
|
import useQueue from '~/composables/audio/useQueue'
|
||||||
|
|
||||||
|
import { useEventListener } from '@vueuse/core'
|
||||||
|
|
||||||
|
import store from '~/store'
|
||||||
|
import axios from 'axios'
|
||||||
|
|
||||||
|
export interface SoundSource {
|
||||||
|
uuid: string
|
||||||
|
mimetype: string
|
||||||
|
url: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Sound {
|
||||||
|
audio: HTMLAudioElement
|
||||||
|
sources: SoundSource[]
|
||||||
|
sourceNode: IMediaElementAudioSourceNode<IAudioContext>
|
||||||
|
}
|
||||||
|
|
||||||
|
const ALLOWED_PLAY_TYPES: (CanPlayTypeResult | undefined)[] = ['maybe', 'probably']
|
||||||
|
const AUDIO_ELEMENT = document.createElement('audio')
|
||||||
|
|
||||||
|
const { tracks, currentIndex } = useQueue()
|
||||||
|
|
||||||
|
const soundPromises = new Map<number, Promise<Sound>>()
|
||||||
|
const soundCache = new Map<number, Sound>()
|
||||||
|
|
||||||
|
const getUploadSources = (uploads: Upload[]): SoundSource[] => {
|
||||||
|
const sources = uploads
|
||||||
|
// NOTE: Filter out repeating and unplayable media types
|
||||||
|
.filter(({ mimetype }, index, array) => array.findIndex((upload) => upload.mimetype === mimetype) === index)
|
||||||
|
.filter(({ mimetype }) => ALLOWED_PLAY_TYPES.includes(AUDIO_ELEMENT.canPlayType(`${mimetype}`)))
|
||||||
|
.map((upload): SoundSource => ({
|
||||||
|
...upload,
|
||||||
|
url: store.getters['instance/absoluteUrl'](upload.listen_url) as string
|
||||||
|
}))
|
||||||
|
|
||||||
|
// NOTE: Add a transcoded MP3 src at the end for browsers
|
||||||
|
// that do not support other codecs to be able to play it :)
|
||||||
|
if (sources.length > 0 && !sources.some(({ mimetype }) => mimetype === 'audio/mpeg')) {
|
||||||
|
const url = new URL(sources[0].url)
|
||||||
|
url.searchParams.set('to', 'mp3')
|
||||||
|
sources.push({ uuid: 'transcoded', mimetype: 'audio/mpeg', url: url.toString() })
|
||||||
|
}
|
||||||
|
|
||||||
|
return sources
|
||||||
|
}
|
||||||
|
|
||||||
|
const getTrackSources = async (track: Track): Promise<SoundSource[]> => {
|
||||||
|
if (track === undefined) return []
|
||||||
|
|
||||||
|
if (track.uploads.length === 0) {
|
||||||
|
// we don't have any information for this track, we need to fetch it
|
||||||
|
const { uploads } = await axios.get(`tracks/${track.id}/`)
|
||||||
|
.then(response => response.data as Track, () => ({ uploads: [] as Upload[] } as Track))
|
||||||
|
|
||||||
|
track.uploads = uploads
|
||||||
|
}
|
||||||
|
|
||||||
|
return getUploadSources(track.uploads)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createSound = async (track: Track): Promise<Sound> => {
|
||||||
|
if (soundCache.has(track.id)) {
|
||||||
|
return soundCache.get(track.id) as Sound
|
||||||
|
}
|
||||||
|
|
||||||
|
if (soundPromises.has(track.id)) {
|
||||||
|
return soundPromises.get(track.id) as Promise<Sound>
|
||||||
|
}
|
||||||
|
|
||||||
|
const createSoundPromise = async () => {
|
||||||
|
const sources = await getTrackSources(track)
|
||||||
|
|
||||||
|
const audio = new Audio()
|
||||||
|
audio.src = sources[0].url
|
||||||
|
|
||||||
|
const sourceNode = createAudioSource(audio)
|
||||||
|
|
||||||
|
const sound = { audio, sources, sourceNode }
|
||||||
|
soundCache.set(track.id, sound)
|
||||||
|
soundPromises.delete(track.id)
|
||||||
|
return sound
|
||||||
|
}
|
||||||
|
|
||||||
|
const soundPromise = createSoundPromise()
|
||||||
|
soundPromises.set(track.id, soundPromise)
|
||||||
|
return soundPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create track from queue
|
||||||
|
export const createTrack = async (index: number) => {
|
||||||
|
if (tracks.value.length <= index || index === -1) return
|
||||||
|
console.log('LOADING TRACK')
|
||||||
|
|
||||||
|
const track = tracks.value[index]
|
||||||
|
if (!soundPromises.has(track.id) && !soundCache.has(track.id)) {
|
||||||
|
// TODO (wvffle): Resolve race condition
|
||||||
|
console.log('NO TRACK IN CACHE, CREATING')
|
||||||
|
}
|
||||||
|
|
||||||
|
const sound = await createSound(track)
|
||||||
|
console.log('CONNECTING NODE')
|
||||||
|
|
||||||
|
const stop = useEventListener(sound.audio, 'ended', () => {
|
||||||
|
createTrack(currentIndex.value + 1)
|
||||||
|
store.dispatch('queue/next')
|
||||||
|
stop()
|
||||||
|
})
|
||||||
|
|
||||||
|
sound.sourceNode.disconnect()
|
||||||
|
connectAudioSource(sound.sourceNode)
|
||||||
|
|
||||||
|
if (isPlaying.value) {
|
||||||
|
sound.audio.play()
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: Preload next track
|
||||||
|
if (index + 1 < tracks.value.length) {
|
||||||
|
createSound(tracks.value[index + 1])
|
||||||
|
.then(sound => sound.audio.load())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getCurrentSound = () => {
|
||||||
|
return soundCache.get(tracks.value[currentIndex.value]?.id ?? -1)
|
||||||
|
}
|
|
@ -231,7 +231,6 @@ const togglePlayback = () => {
|
||||||
if (playing.value) return pause()
|
if (playing.value) return pause()
|
||||||
return resume()
|
return resume()
|
||||||
}
|
}
|
||||||
|
|
||||||
export default () => {
|
export default () => {
|
||||||
return {
|
return {
|
||||||
looping,
|
looping,
|
||||||
|
|
1126
front/yarn.lock
1126
front/yarn.lock
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue