Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor w/ Centralized State #35

Merged
merged 16 commits into from
Oct 19, 2024
Merged
2 changes: 2 additions & 0 deletions app/.eslintrc.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ const config = {
"plugin:jsx-a11y/recommended",
],
rules: {
"@typescript-eslint/consistent-type-definitions": "off",
"@typescript-eslint/no-unused-vars": [
"error",
{ argsIgnorePattern: "^_", varsIgnorePattern: "^_" },
Expand All @@ -43,6 +44,7 @@ const config = {
"jsx-a11y/no-static-element-interactions": "off",
"jsx-a11y/heading-has-content": "off",
"react/no-unknown-property": "off",
"react/display-name": "off",
},
globals: {
React: "writable",
Expand Down
2 changes: 2 additions & 0 deletions app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"private": true,
"scripts": {
"dev": "vite",
"mobile": "vite --host",
"build": "tsc && vite build",
"preview": "vite preview",
"typecheck": "tsc --noEmit",
Expand All @@ -31,6 +32,7 @@
"@tanstack/react-query": "5.56.2",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"framer-motion": "^11.11.7",
"lucide-react": "^0.445.0",
"react": "^18.3.1",
"react-dom": "^18.3.1",
Expand Down
24 changes: 24 additions & 0 deletions app/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 9 additions & 7 deletions app/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@ import AudioAnalyzer from "@/components/analyzers/audioAnalyzer";
import AudioScopeCanvas from "@/components/canvas/AudioScope";
import Visual3DCanvas from "@/components/canvas/Visual3D";
import { ControlsPanel } from "@/components/controls/main";
import { useModeContext } from "@/context/mode";
import { APPLICATION_MODE, type ApplicationMode } from "@/lib/applicationModes";
import {
APPLICATION_MODE,
type TApplicationMode,
} from "@/lib/applicationModes";

import { useAppStateActions } from "./lib/appState";
import { useAppStateActions, useMode } from "./lib/appState";

const getAnalyzerComponent = (mode: ApplicationMode) => {
const getAnalyzerComponent = (mode: TApplicationMode) => {
switch (mode) {
case APPLICATION_MODE.AUDIO:
case APPLICATION_MODE.AUDIO_SCOPE:
Expand All @@ -22,22 +24,22 @@ const getAnalyzerComponent = (mode: ApplicationMode) => {
}
};

const getCanvasComponent = (mode: ApplicationMode) => {
const getCanvasComponent = (mode: TApplicationMode) => {
switch (mode) {
case APPLICATION_MODE.AUDIO_SCOPE:
return <AudioScopeCanvas />;
case APPLICATION_MODE.WAVE_FORM:
case APPLICATION_MODE.NOISE:
case APPLICATION_MODE.AUDIO:
case APPLICATION_MODE.PARTICLE_NOISE:
return <Visual3DCanvas mode={mode} />;
return <Visual3DCanvas />;
default:
return mode satisfies never;
}
};

const App = () => {
const { mode } = useModeContext();
const mode = useMode();
const { noteCanvasInteraction } = useAppStateActions();

return (
Expand Down
149 changes: 83 additions & 66 deletions app/src/components/analyzers/audioAnalyzer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,89 +5,113 @@ import {
AUDIO_SOURCE,
buildAudio,
buildAudioContext,
type TAudioSource,
} from "@/components/audio/sourceControls/common";
import MicrophoneAudioControls from "@/components/audio/sourceControls/mic";
import ScreenShareControls from "@/components/audio/sourceControls/screenshare";
import { useAudioSourceContext } from "@/context/audioSource";
import { useMediaStreamLink } from "@/lib/analyzers/common";
import {
useMediaStreamLink,
type TAnalyzerInputControl,
} from "@/lib/analyzers/common";
import FFTAnalyzer from "@/lib/analyzers/fft";
import ScopeAnalyzer from "@/lib/analyzers/scope";
import { APPLICATION_MODE } from "@/lib/applicationModes";
import { useAudio } from "@/lib/appState";

import { AudioScopeAnalyzerControls } from "./scopeAnalyzerControls";

const InternalAudioAnalyzer = ({
mode,
audioSource,
const buildScopeAnalyzer = () => {
const audioCtx = buildAudioContext();
const audio = buildAudio();
return {
audio,
analyzer: new ScopeAnalyzer(audio, audioCtx),
};
};

const buildFFTAnalyzer = (volume: number) => {
const audioCtx = buildAudioContext();
const audio = buildAudio();
return {
audio,
analyzer: new FFTAnalyzer(audio, audioCtx, volume),
};
};

const ControlledMicAnalyzer = ({
audio,
analyzer,
}: {
mode: "AUDIO" | "AUDIO_SCOPE";
audioSource: "SOUNDCLOUD" | "FILE_UPLOAD";
audio: HTMLAudioElement;
analyzer: TAnalyzerInputControl;
}) => {
const audioCtx = useMemo(() => buildAudioContext(), []);
const audio = useMemo(() => buildAudio(), []);
const analyzer = useMemo(() => {
console.log("Creating analyzer...");
switch (mode) {
case APPLICATION_MODE.AUDIO:
return new FFTAnalyzer(audio, audioCtx, 1.0);
case APPLICATION_MODE.AUDIO_SCOPE:
return new ScopeAnalyzer(audio, audioCtx);
default:
return mode satisfies never;
}
}, [mode, audio, audioCtx]);
const { onDisabled, onStreamCreated } = useMediaStreamLink(audio, analyzer);
return (
<MicrophoneAudioControls
audio={audio}
onDisabled={onDisabled}
onStreamCreated={onStreamCreated}
/>
);
};

const ControlledScreenShareAnalyzer = ({
audio,
analyzer,
}: {
audio: HTMLAudioElement;
analyzer: TAnalyzerInputControl;
}) => {
const { onDisabled, onStreamCreated } = useMediaStreamLink(audio, analyzer);
return (
<>
<ControlledAudioSource audio={audio} audioSource={audioSource} />
{analyzer instanceof FFTAnalyzer ? (
<FFTAnalyzerControls analyzer={analyzer} />
) : analyzer instanceof ScopeAnalyzer ? (
<AudioScopeAnalyzerControls analyzer={analyzer} />
) : (
(analyzer satisfies never)
)}
</>
<ScreenShareControls
audio={audio}
onDisabled={onDisabled}
onStreamCreated={onStreamCreated}
/>
);
};

const InternalMediaStreamAnalyzer = ({
const isMediaStream = (source: TAudioSource) => {
switch (source) {
case AUDIO_SOURCE.MICROPHONE:
case AUDIO_SOURCE.SCREEN_SHARE:
return true;
case AUDIO_SOURCE.SOUNDCLOUD:
case AUDIO_SOURCE.FILE_UPLOAD:
return false;
default:
return source satisfies never;
}
};

const ControlledAnalyzer = ({
mode,
audioSource,
}: {
mode: "AUDIO" | "AUDIO_SCOPE";
audioSource: "MICROPHONE" | "SCREEN_SHARE";
mode: typeof APPLICATION_MODE.AUDIO | typeof APPLICATION_MODE.AUDIO_SCOPE;
audioSource: TAudioSource;
}) => {
const audioCtx = useMemo(() => buildAudioContext(), []);
const audio = useMemo(() => buildAudio(), []);
const analyzer = useMemo(() => {
console.log("Creating analyzer...");
const { audio, analyzer } = useMemo(() => {
switch (mode) {
case APPLICATION_MODE.AUDIO:
return new FFTAnalyzer(audio, audioCtx, 0.0);
return buildFFTAnalyzer(isMediaStream(audioSource) ? 0.0 : 1.0);
case APPLICATION_MODE.AUDIO_SCOPE:
return new ScopeAnalyzer(audio, audioCtx);
return buildScopeAnalyzer();
default:
return mode satisfies never;
}
}, [audio, audioCtx, mode]);

const { onDisabled, onStreamCreated } = useMediaStreamLink(audio, analyzer);
}, [mode, audioSource]);

return (
<>
{audioSource === AUDIO_SOURCE.MICROPHONE ? (
<MicrophoneAudioControls
audio={audio}
onDisabled={onDisabled}
onStreamCreated={onStreamCreated}
/>
<ControlledMicAnalyzer audio={audio} analyzer={analyzer} />
) : audioSource === AUDIO_SOURCE.SCREEN_SHARE ? (
<ScreenShareControls
audio={audio}
onDisabled={onDisabled}
onStreamCreated={onStreamCreated}
/>
<ControlledScreenShareAnalyzer audio={audio} analyzer={analyzer} />
) : audioSource === AUDIO_SOURCE.SOUNDCLOUD ||
audioSource === AUDIO_SOURCE.FILE_UPLOAD ? (
<ControlledAudioSource audio={audio} audioSource={audioSource} />
) : (
(audioSource satisfies never)
)}
Expand All @@ -102,21 +126,14 @@ const InternalMediaStreamAnalyzer = ({
);
};

const AudioAnalyzer = ({ mode }: { mode: "AUDIO" | "AUDIO_SCOPE" }) => {
const { audioSource } = useAudioSourceContext();
const AudioAnalyzer = ({
mode,
}: {
mode: typeof APPLICATION_MODE.AUDIO | typeof APPLICATION_MODE.AUDIO_SCOPE;
}) => {
const { source } = useAudio();

switch (audioSource) {
case AUDIO_SOURCE.SOUNDCLOUD:
case AUDIO_SOURCE.FILE_UPLOAD:
return <InternalAudioAnalyzer mode={mode} audioSource={audioSource} />;
case AUDIO_SOURCE.MICROPHONE:
case AUDIO_SOURCE.SCREEN_SHARE:
return (
<InternalMediaStreamAnalyzer mode={mode} audioSource={audioSource} />
);
default:
return audioSource satisfies never;
}
return <ControlledAnalyzer mode={mode} audioSource={source} />;
};

export default AudioAnalyzer;
29 changes: 13 additions & 16 deletions app/src/components/analyzers/fftAnalyzerControls.tsx
Original file line number Diff line number Diff line change
@@ -1,21 +1,18 @@
import { useCallback, useEffect, useRef } from "react";
import { useFFTAnalyzerContext } from "@/context/fftAnalyzer";
import type FFTAnalyzer from "@/lib/analyzers/fft";
import {
useAppStateActions,
useEnergyInfo,
useVisualSourceDataX,
} from "@/lib/appState";
import { useAnalyzerFFT, useMappers } from "@/lib/appState";
import { COORDINATE_MAPPER_REGISTRY } from "@/lib/mappers/coordinateMappers/registry";

export const FFTAnalyzerControls = ({
analyzer,
}: {
analyzer: FFTAnalyzer;
}) => {
const { octaveBandMode, energyMeasure } = useFFTAnalyzerContext();
const freqData = useVisualSourceDataX();
const energyInfo = useEnergyInfo();
const { resizeVisualSourceData } = useAppStateActions();
const { octaveBandMode, energyMeasure } = useAnalyzerFFT();
const { energyTracker } = useMappers();
const coordinateMapperData =
COORDINATE_MAPPER_REGISTRY.data.hooks.useInstance();
const { setParams } = COORDINATE_MAPPER_REGISTRY.data.hooks.useActions();
const animationRequestRef = useRef<number>(null!);

/**
Expand All @@ -24,18 +21,18 @@ export const FFTAnalyzerControls = ({
const mapData = useCallback(() => {
const bars = analyzer.getBars();

if (freqData.length != bars.length) {
if (coordinateMapperData.data.length != bars.length) {
console.log(`Resizing ${bars.length}`);
resizeVisualSourceData(bars.length);
setParams({ size: bars.length });
return;
}

energyInfo.current = analyzer.getEnergy(energyMeasure);
energyTracker?.set(analyzer.getEnergy(energyMeasure));

bars.forEach(({ value }, index) => {
freqData[index] = value;
coordinateMapperData.data[index] = value;
});
}, [freqData, analyzer, resizeVisualSourceData, energyInfo, energyMeasure]);
}, [coordinateMapperData, analyzer, energyTracker, energyMeasure, setParams]);

/**
* Re-Synchronize the animation loop if the target data destination changes.
Expand All @@ -50,7 +47,7 @@ export const FFTAnalyzerControls = ({
};
animationRequestRef.current = requestAnimationFrame(animate);
return () => cancelAnimationFrame(animationRequestRef.current);
}, [freqData, energyMeasure, mapData]);
}, [coordinateMapperData, energyMeasure, mapData]);

/**
* Make sure an analyzer exists with the correct mode
Expand Down
Loading
Loading