Skip to content

Commit

Permalink
Decoupled visualizer from MinimlAudioPlayer a little (wrote a composa…
Browse files Browse the repository at this point in the history
…ble to pass audio el), rest of decoupling was just bad logic, no need to call methods from MinimlAudioPlayer on the visualizer, fixed the logic around previous and next buttons, now if you hit previous or next in paused mode you go to next track without playing it, unless you are in play mode, so all play is controlled by play pause buttons
  • Loading branch information
thorstensson committed Jan 6, 2025
1 parent 3ad3c13 commit 7e7828f
Show file tree
Hide file tree
Showing 8 changed files with 93 additions and 72 deletions.
Binary file removed src/assets/audio/AlbumArtSmall.jpg
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file removed src/assets/audio/Folder.jpg
Binary file not shown.
Binary file not shown.
86 changes: 35 additions & 51 deletions src/components/MinimlAudioPlayer.vue
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
<script setup lang="ts">
import { ref, useTemplateRef, reactive, watch, onMounted, computed } from "vue";
import { useEventListener } from "@vueuse/core";
import type { MaybeRef } from "@vueuse/core";
import { useEventListener, type MaybeRef } from "@vueuse/core";
import { gsap } from "gsap";
import { PlayIcon, PauseIcon, ChevronRightIcon, ChevronLeftIcon } from "@heroicons/vue/24/solid";
import MinimlSpectrumVisualizer from "./MinimlSpectrumVisualizer.vue";
let audioCtx: AudioContext;
let analyser: AnalyserNode;
//let gainNode: GainNode;
const firstRun = ref<boolean>(true);
import MinimlSpectrumVisualizer from "./MinimlSpectrumVisualizer.vue";
import { useStoreRef } from "@/composable/useStoreRef";
const spectrum = useTemplateRef("spectrum");
const audioEl = useTemplateRef("audio-element");
Expand All @@ -24,14 +19,13 @@ const trackIndex = ref<number>(0);
const currentTrack = ref<string>("");
const isPlaying = ref<boolean>(false);
// Add mp3 path here, local or online, but use your own S3 bucket or local path
// Add mp3 path here, note if you are testing on Vite with localhost metadata events wont fire (next , previous)
const PATH = "https://audio-tt.s3.amazonaws.com";
// The panel width, if track text wider then GSAP yoyo
const TRACK_WIDTH = 160;
//Add tracks here; no plans to make a DOM playlist at the moment (see Roadmap)
//Add tracks here; no plans to make a DOM playlist
const playlist = reactive([
{ artist: "Ernes Guevara", track: "Ernes Guevara - Lost (Original Mix).mp3" },
{ artist: "Stockholm Syndrome, Young Squage", track: "EMPHI - Stockholm Syndrome (Original Mix).mp3" },
Expand All @@ -47,73 +41,58 @@ const ifTrackPrev = computed(() => {
return trackIndex.value > 0;
})
// Check for current track
const currTrack = computed(() => {
return playlist[trackIndex.value].track;
})
/**
* We create an AudioContext after user interaction.
* TODO: Make this a composable, also when that is done, it should be in MinimlSpectrumVisualizer
*/
const createAudioContext = () => {
audioCtx = new AudioContext();
const audioSrc = audioCtx.createMediaElementSource(audioEl.value!)
analyser = audioCtx.createAnalyser();
audioSrc.connect(analyser);
analyser.connect(audioCtx.destination);
if (audioCtx.state === "suspended") {
audioCtx.resume();
}
firstRun.value = false;
}
const doScrub = (e: MouseEvent) => {
if (progressBar.value && audioEl.value) {
const scrubTime = (e.offsetX / progressBar.value!.offsetWidth) * audioEl.value.duration;
audioEl.value.currentTime = scrubTime;
}
}
// Turns out that AudioContext too needs user interaction before instantiation
const togglePlay = () => {
if (firstRun.value) createAudioContext()
isPlaying.value = !isPlaying.value;
if (isPlaying.value && audioEl.value) {
audioEl.value.play()
spectrum.value?.startAnimRequest();
playTrack()
} else if (audioEl.value) {
audioEl.value.pause();
spectrum.value?.cancelAnimRequest()
}
}
// Check if audio conttext also as next can be first
// For previous and next we need to know if track is playing when we press them
const nextTrack = () => {
if (firstRun.value) createAudioContext();
if (!isPlaying.value) isPlaying.value = true;
if (ifTrackNext.value && spectrum.value) {
if (ifTrackNext.value && !isPlaying.value) {
trackIndex.value++;
currentTrack.value = currTrack.value;
} else if (ifTrackNext.value) {
isPlaying.value = true;
trackIndex.value++;
playTrack();
}
}
const prevTrack = () => {
if (ifTrackPrev.value && spectrum.value) {
console.log("NOOO")
if (ifTrackPrev.value && !isPlaying.value) {
trackIndex.value--;
currentTrack.value = currTrack.value;
} else if (ifTrackPrev.value) {
isPlaying.value = true;
trackIndex.value--;
playTrack();
}
}
const playTrack = () => {
// vueuse, easy cancel. oncanplaythrough does not work on mobile, loadedmetadata does???
// Vueuse, easy cancel. oncanplaythrough does not work on mobile, loadedmetadata does???
const cancelcan = useEventListener(audioEl.value as unknown as MaybeRef, 'loadedmetadata', () => {
console.log("YEP")
audioEl.value?.play();
spectrum.value?.startAnimRequest();
cancelcan();
})
// synchronous, so we do this after adding event
// Synchronous, so we do this after adding event
isPlaying.value = true;
audioEl.value!.currentTime = 0;
currentTrack.value = currTrack.value;
Expand Down Expand Up @@ -147,11 +126,9 @@ const durationUpdate = () => {
const onTrackEnded = () => {
progressBarFilled.value!.style.flexBasis = "0%";
if (ifTrackNext.value && spectrum.value) {
spectrum.value.cancelAnimRequest()
trackIndex.value++;
playTrack();
} else if (audioEl.value && spectrum.value) {
spectrum.value.cancelAnimRequest()
isPlaying.value = false;
trackIndex.value = 0;
audioEl.value.pause();
Expand All @@ -167,7 +144,7 @@ watch(
const { width } = panelTrack.value?.getBoundingClientRect() || {};
const trackAnim = gsap.timeline();
if (isPlaying.value && (width && width > TRACK_WIDTH)) {
const remWidth = width - TRACK_WIDTH +10;
const remWidth = width - TRACK_WIDTH + 10;
trackAnim.fromTo(".panel__box__track", { x: 0 }, {
duration: width / 100, x: -remWidth, repeat: -1, yoyo: true, ease: "sine.inOut"
});
Expand All @@ -178,6 +155,9 @@ watch(
)
onMounted(() => {
const { addElem } = useStoreRef();
addElem('audioEl', audioEl);
currentTrack.value = currTrack.value;
let mousedown = false;
Expand Down Expand Up @@ -218,10 +198,14 @@ onMounted(() => {
<div class="controls__pause-txt" :class="{ 'controls__pause-txt--show': !isPlaying }">PAUSE</div>
<PlayIcon @click="togglePlay" class="controls__play" :class="{ 'controls__play--show': !isPlaying }" />
<PauseIcon @click="togglePlay" class="controls__pause" :class="{ 'controls__pause--show': isPlaying }" />
<ChevronLeftIcon @click="prevTrack" class="controls__prev" :class="{ 'controls__prev--end': !ifTrackPrev }"></ChevronLeftIcon>
<ChevronRightIcon @click="nextTrack" class="controls__next" :class="{ 'controls__next--end': !ifTrackNext }" ></ChevronRightIcon>
<ChevronLeftIcon @click="prevTrack" class="controls__prev" :class="{ 'controls__prev--end': !ifTrackPrev }">
</ChevronLeftIcon>
<ChevronRightIcon @click="nextTrack" class="controls__next"
:class="{ 'controls__next--end': !ifTrackNext }"></ChevronRightIcon>
</div>
<div v-if="audioEl">
<MinimlSpectrumVisualizer ref="spectrum" />
</div>
<MinimlSpectrumVisualizer :analyser="analyser" ref="spectrum" />
</div>
</template>

Expand Down Expand Up @@ -389,15 +373,15 @@ body {
transform: translateY(-50%);
color: $clr-quinary;
cursor: pointer;
opacity:1;
opacity: 1;
&--end{
&--end {
opacity: .5;
}
}
&__next {
right:5px;
right: 5px;
}
&__pause-txt {
Expand Down
58 changes: 37 additions & 21 deletions src/components/MinimlSpectrumVisualizer.vue
Original file line number Diff line number Diff line change
@@ -1,26 +1,42 @@
<script setup lang="ts">
import { watch, useTemplateRef, onMounted } from 'vue';
import { ref, useTemplateRef, onMounted } from 'vue';
import { useEventListener } from "@vueuse/core";
import { useStoreRef } from '@/composable/useStoreRef';
const canvas = useTemplateRef<HTMLCanvasElement>("canvas")
let ctx: CanvasRenderingContext2D;
const analyser = ref<AnalyserNode>();
let ctx: CanvasRenderingContext2D;
let myReq: number;
let dataArray: Uint8Array;
let barWidth: number;
let audioCtx: AudioContext;
let dataArray: Uint8Array;
let bufferLength: number;
const { analyser } = defineProps<{ analyser: AnalyserNode }>()
// Use our Composable to get the audio element from the Record
const { getElem } = useStoreRef();
const { sRef: audioEl } = getElem('audioEl')
/**
* MDN: FFT represents the window size in samples that is used when performing
* a Fast Fourier Transform (FFT) to get frequency domain data.
* Use web audio API to get the total number of data points
* available to the AudioContext sampleRate.
* I could not make this method a composable, can't see how when on mouse down is needed first (async, await?)
*/
const createAnalyserData = () => {
/* eslint-disable */
analyser.fftSize = 128;
/* eslint-enable */
bufferLength = analyser.frequencyBinCount;
audioCtx = new AudioContext();
const audioSrc = audioCtx.createMediaElementSource(audioEl.value as HTMLMediaElement);
analyser.value = audioCtx.createAnalyser();
audioSrc.connect(analyser.value);
analyser.value.connect(audioCtx.destination);
analyser.value.fftSize = 128;
bufferLength = analyser.value.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
if (audioCtx.state === "suspended") {
audioCtx.resume();
}
barWidth = (canvas.value!.width / bufferLength);
}
Expand All @@ -32,7 +48,7 @@ const startAnimRequest = () => {
let x = 0;
if (canvas.value && ctx) {
ctx.clearRect(0, 0, canvas.value.width, canvas.value.height);
analyser.getByteFrequencyData(dataArray);
analyser.value?.getByteFrequencyData(dataArray);
for (let i = 0; i < bufferLength; i++) {
const barHeight = dataArray[i];
ctx.fillStyle = "#000102";
Expand All @@ -51,19 +67,19 @@ const cancelAnimRequest = () => {
}, 800);
}
//Watch for analyser to be defined (we have to wait for user to interact with page)
watch(() => analyser, () => {
if (analyser.fftSize) createAnalyserData();
})
onMounted(() => {
if (canvas.value) ctx = canvas.value.getContext("2d")!;
})
if (canvas.value) ctx = canvas.value.getContext("2d")!
audioEl.value?.addEventListener('playing', startAnimRequest);
audioEl.value?.addEventListener('paused', cancelAnimRequest);
defineExpose({
startAnimRequest,
cancelAnimRequest
// One time event to create onetime AudioContext
const cleanup = useEventListener(document, 'mousedown', () => {
cleanup();
createAnalyserData();
})
})
</script>

<template>
Expand Down
21 changes: 21 additions & 0 deletions src/composable/useStoreRef.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import { shallowRef, toRef, type ShallowRef } from "vue";
import type { Ref } from "vue";

const store: Ref<Record<string, ShallowRef<HTMLElement | null>>> = shallowRef({})

export const useStoreRef = () => {

const addElem = (key: string, el: ShallowRef<HTMLElement | null>) => {
store.value[key] = el;
}

const getElem = (key: string) => {
const sRef = toRef(store.value[key])
return { sRef }
}

return { addElem, getElem };
}



0 comments on commit 7e7828f

Please sign in to comment.