Skip to content
This repository has been archived by the owner on Jun 28, 2024. It is now read-only.

Mute track #49

Merged
merged 25 commits into from
Jun 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,7 @@ async function startScreenSharing(webrtc: WebRTCEndpoint) {
// Add local MediaStream to webrtc
screenStream
.getTracks()
.forEach((track) =>
webrtc.addTrack(track, screenStream, { type: 'screen' }),
);
.forEach((track) => webrtc.addTrack(track, { type: 'screen' }));
}
```

Expand Down
2 changes: 2 additions & 0 deletions e2e/app/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 3 additions & 8 deletions e2e/app/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
BandwidthLimit,
SimulcastConfig,
} from '@fishjam-dev/ts-client';
import { PeerMessage } from './protos/fishjam/peer_notifications';
import { PeerMessage } from '@fishjam-dev/ts-client/protos';
import { useEffect, useState, useSyncExternalStore } from 'react';
import { MockComponent } from './MockComponent';
import { VideoPlayerWithDetector } from './VideoPlayerWithDetector';
Expand All @@ -24,7 +24,7 @@
goodTrack: string;
};

function endpointMetadataParser(a: any): EndpointMetadata {

Check warning on line 27 in e2e/app/src/App.tsx

View workflow job for this annotation

GitHub Actions / Build and test (18.x)

Unexpected any. Specify a different type
if (
typeof a !== 'object' ||
a === null ||
Expand All @@ -35,7 +35,7 @@
return { goodStuff: a.goodStuff };
}

function trackMetadataParser(a: any): TrackMetadata {

Check warning on line 38 in e2e/app/src/App.tsx

View workflow job for this annotation

GitHub Actions / Build and test (18.x)

Unexpected any. Specify a different type
if (
typeof a !== 'object' ||
a === null ||
Expand Down Expand Up @@ -154,7 +154,7 @@
websocket.send(message);
});

const messageHandler = (event: MessageEvent<any>) => {

Check warning on line 157 in e2e/app/src/App.tsx

View workflow job for this annotation

GitHub Actions / Build and test (18.x)

Unexpected any. Specify a different type
const uint8Array = new Uint8Array(event.data);
try {
const data = PeerMessage.decode(uint8Array);
Expand Down Expand Up @@ -186,19 +186,19 @@

websocket.addEventListener('message', messageHandler);

const closeHandler = (event: any) => {

Check warning on line 189 in e2e/app/src/App.tsx

View workflow job for this annotation

GitHub Actions / Build and test (18.x)

Unexpected any. Specify a different type
console.log({ name: 'Close handler!', event });
};

websocket.addEventListener('close', closeHandler);

const errorHandler = (event: any) => {

Check warning on line 195 in e2e/app/src/App.tsx

View workflow job for this annotation

GitHub Actions / Build and test (18.x)

Unexpected any. Specify a different type
console.log({ name: 'Error handler!', event });
};

websocket.addEventListener('error', errorHandler);

const trackReady = (event: any) => {

Check warning on line 201 in e2e/app/src/App.tsx

View workflow job for this annotation

GitHub Actions / Build and test (18.x)

Unexpected any. Specify a different type
console.log({ name: 'trackReady', event });
};

Expand All @@ -217,13 +217,7 @@
};
const maxBandwidth: BandwidthLimit = 0;

return webrtc.addTrack(
track,
stream,
trackMetadata,
simulcastConfig,
maxBandwidth,
);
return webrtc.addTrack(track, trackMetadata, simulcastConfig, maxBandwidth);
}

export function App() {
Expand Down Expand Up @@ -323,6 +317,7 @@
<VideoPlayerWithDetector
id={endpoint.id}
stream={stream ?? undefined}
webrtc={webrtc}
/>
</div>
<div data-name="stream-id">{stream?.id}</div>
Expand Down
18 changes: 8 additions & 10 deletions e2e/app/src/MockComponent.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,12 @@ import {
SimulcastConfig,
WebRTCEndpoint,
} from '@fishjam-dev/ts-client';
import { MuteTrackTest } from './MuteTrackTest';

const brainMock = createStream('🧠', 'white', 'low', 24);
const brain2Mock = createStream('🤯', '#00ff00', 'low', 24);
const heartMock = createStream('🫀', 'white', 'low', 24);
const heart2Mock = createStream('💝', '#FF0000', 'low', 24);
export const brainMock = createStream('🧠', 'white', 'low', 24);
export const brain2Mock = createStream('🤯', '#00ff00', 'low', 24);
export const heartMock = createStream('🫀', 'white', 'low', 24);
export const heart2Mock = createStream('💝', '#FF0000', 'low', 24);

type Props = {
webrtc: WebRTCEndpoint<EndpointMetadata, TrackMetadata>;
Expand All @@ -31,11 +32,7 @@ export const MockComponent = ({ webrtc }: Props) => {
const stream = heartMock.stream;
const track = stream.getVideoTracks()[0];

heartId.current = webrtc.addTrack(
track,
stream,
JSON.parse(trackMetadataInput),
);
heartId.current = webrtc.addTrack(track, JSON.parse(trackMetadataInput));
};

const removeHeart = async () => {
Expand Down Expand Up @@ -90,7 +87,6 @@ export const MockComponent = ({ webrtc }: Props) => {

brainId.current = webrtc.addTrack(
track,
stream,
JSON.parse(trackMetadataInput),
simulcastConfig,
maxBandwidth,
Expand Down Expand Up @@ -150,6 +146,8 @@ export const MockComponent = ({ webrtc }: Props) => {
<button onClick={addBoth}>Add both</button>
<button onClick={addAndReplaceHeart}>Add and replace a heart</button>
<button onClick={addAndRemoveHeart}>Add and remove a heart</button>

<MuteTrackTest webrtc={webrtc} />
</div>
);
};
121 changes: 121 additions & 0 deletions e2e/app/src/MuteTrackTest.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
import { WebRTCEndpoint } from '@fishjam-dev/ts-client';
import { brain2Mock, heart2Mock } from './MockComponent';
import { useEffect, useState } from 'react';
import { VideoPlayer } from './VideoPlayer';
import { WebRTCEndpointEvents } from '../../../src';
import { EndpointMetadata, TrackMetadata } from './App';

type Props = {
webrtc: WebRTCEndpoint;
};

export const MuteTrackTest = ({ webrtc }: Props) => {
const [currentStream, setCurrentStream] = useState<MediaStream | null>();
const [currentTrack, setCurrentTrack] = useState<MediaStreamTrack | null>();
const [trackId, setTrackId] = useState<string | null>(null);

useEffect(() => {
const localTrackAdded: WebRTCEndpointEvents<
EndpointMetadata,
TrackMetadata
>['localTrackAdded'] = (event) => {
setCurrentStream(event.stream);
setCurrentTrack(event.track);
setTrackId(event.trackId);
};

const localTrackReplaced: WebRTCEndpointEvents<
EndpointMetadata,
TrackMetadata
>['localTrackReplaced'] = (event) => {
setCurrentTrack(event.track);
};

webrtc.on('localTrackAdded', localTrackAdded);
webrtc.on('localTrackReplaced', localTrackReplaced);

return () => {
webrtc.removeListener('localTrackAdded', localTrackAdded);
webrtc.removeListener('localTrackReplaced', localTrackReplaced);
};
}, [webrtc]);

const addTrack = async (stream: MediaStream) => {
const track = stream.getVideoTracks()[0];

if (!track) throw Error("Stream doesn't have any track");

await webrtc.addTrack(
track,
{ goodTrack: 'camera' },
{
enabled: true,
activeEncodings: ['l', 'm', 'h'],
disabledEncodings: [],
},
);
};

const replaceTrack = async (
trackId: string | null,
stream: MediaStream | null,
track: MediaStreamTrack | null,
) => {
if (!trackId) throw Error('Track id is null');

await webrtc.replaceTrack(trackId, track);
};

return (
<div
style={{
display: 'flex',
flexDirection: 'column',
padding: '8px',
borderStyle: 'dotted',
borderWidth: '1px',
borderColor: 'black',
}}>
<div>
<span>track: {currentTrack?.id ?? 'null'}</span>
</div>
<div>
<button
disabled={!!currentStream || !!trackId}
onClick={() => addTrack(heart2Mock.stream)}>
Add heart
</button>
<button
disabled={!!currentStream || !!trackId}
onClick={() => addTrack(brain2Mock.stream)}>
Add brain
</button>
<button
onClick={() =>
replaceTrack(
trackId,
heart2Mock.stream,
heart2Mock.stream.getVideoTracks()[0],
)
}>
Replace with heart
</button>
<button
onClick={() =>
replaceTrack(
trackId,
brain2Mock.stream,
brain2Mock.stream.getVideoTracks()[0],
)
}>
Replace with brain
</button>
<button onClick={() => replaceTrack(trackId, null, null)}>
Mute track
</button>
</div>

<div>{currentStream && <VideoPlayer stream={currentStream} />}</div>
</div>
);
};
44 changes: 43 additions & 1 deletion e2e/app/src/VideoPlayerWithDetector.tsx
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import { useEffect, useRef, useState } from 'react';
import { getPixel, Pixel } from './mocks';
import { WebRTCEndpoint } from '@fishjam-dev/ts-client';

type Props = {
stream?: MediaStream;
id?: string;
webrtc: WebRTCEndpoint;
};

const rgbToText = (pixel: Pixel): string => {
Expand All @@ -17,15 +19,51 @@
return 'unknown';
};

export const VideoPlayerWithDetector = ({ stream, id }: Props) => {
const getTrackIdentifierToInboundRtp = (
stats: RTCStatsReport,
): Record<string, any> => {

Check warning on line 24 in e2e/app/src/VideoPlayerWithDetector.tsx

View workflow job for this annotation

GitHub Actions / Build and test (18.x)

Unexpected any. Specify a different type
const result: Record<string, any> = {};

Check warning on line 25 in e2e/app/src/VideoPlayerWithDetector.tsx

View workflow job for this annotation

GitHub Actions / Build and test (18.x)

Unexpected any. Specify a different type

stats.forEach((report) => {
if (report.type === 'inbound-rtp') {
result[report.trackIdentifier] = report;
}
});

return result;
};

export const VideoPlayerWithDetector = ({ stream, id, webrtc }: Props) => {
const videoElementRef = useRef<HTMLVideoElement>(null);
const [color, setColor] = useState<string>('');
const [decodedFrames, setDecodedFrames] = useState<string>('');

useEffect(() => {
if (!videoElementRef.current) return;
videoElementRef.current.srcObject = stream || null;
}, [stream]);

const getDecodedFrames = async () => {
const connection = webrtc['connection'];
if (!connection) return 0;

const inbound = getTrackIdentifierToInboundRtp(await connection.getStats());

const trackId = stream?.getVideoTracks()?.[0]?.id ?? '';

return inbound[trackId]?.framesDecoded ?? 0;
};

useEffect(() => {
const id = setInterval(async () => {
setDecodedFrames(await getDecodedFrames());
}, 50);

return () => {
clearInterval(id);
};
}, [stream]);

useEffect(() => {
const id = setInterval(() => {
const videoElement = videoElementRef.current;
Expand All @@ -51,6 +89,10 @@
return (
<div>
<div data-color-name={color}>{color}</div>
<div>
Decoded frames:
<span data-decoded-frames={decodedFrames}>{decodedFrames}</span>
</div>
<video
id={id}
style={{ maxHeight: '90px' }}
Expand Down
Loading
Loading