Skip to content

Commit

Permalink
sample changes
Browse files Browse the repository at this point in the history
  • Loading branch information
niyatim23 committed Apr 23, 2024
1 parent 04b15ce commit 1836f5f
Show file tree
Hide file tree
Showing 7 changed files with 165 additions and 30 deletions.
24 changes: 20 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -273,15 +273,18 @@ After executing `make` you will have sample applications in your `build/samples`
#### Sample: kvsWebrtcClientMaster
This application sends sample H264/Opus frames (path: `/samples/h264SampleFrames` and `/samples/opusSampleFrames`) via WebRTC. It also accepts incoming audio, if enabled in the browser. When checked in the browser, it prints the metadata of the received audio packets in your terminal. To run:
```shell
./samples/kvsWebrtcClientMaster <channelName>
./samples/kvsWebrtcClientMaster <channelName> <storage-option> <audio-codec> <video-codec>
```

To use the **Storage for WebRTC** feature, run the same command as above but with an additional command line arg to enable the feature.

```shell
./samples/kvsWebrtcClientMaster <channelName> 1
./samples/kvsWebrtcClientMaster <channelName> 1 opus h265
```

Audio codec: aac, opus
Video codec: h264, h265

#### Sample: kvsWebrtcClientMasterGstSample
This application can send media from a GStreamer pipeline using test H264/Opus frames, device `autovideosrc` and `autoaudiosrc` input, or a received RTSP stream. It also will playback incoming audio via an `autoaudiosink`. To run:
```shell
Expand All @@ -292,23 +295,36 @@ Pass the desired media and source type when running the sample. The mediaType ca
./samples/kvsWebrtcClientMasterGstSample <channelName> <mediaType> rtspsrc rtsp://<rtspUri>
```

Using the testsrc with audio and video codec
```shell
./samples/kvsWebrtcClientMasterGstSample <channelName> audio-video testsrc opus h264
```

Audio codec: aac, opus
Video codec: h264, h265

#### Sample: kvsWebrtcClientViewer
This application accepts sample H264/Opus frames by default. You can use other supported codecs by changing the value for `videoTrack.codec` and `audioTrack.codec` in _Common.c_. By default, this sample only logs the size of the audio and video buffer it receives. To write these frames to a file using GStreamer, use the _kvsWebrtcClientViewerGstSample_ instead.

To run:
```shell
./samples/kvsWebrtcClientViewer <channelName>
./samples/kvsWebrtcClientViewer <channelName> <audio-codec> <video-codec>
```

Audio codec: aac, opus
Video codec: h264, h265

#### Sample: kvsWebrtcClientViewerGstSample
This application is similar to the kvsWebrtcClientViewer. However, instead of just logging the media it receives, it generates a file using filesink. Make sure that your device has enough space to write the media to a file. You can also customize the receiving logic by modifying the functions in _GstAudioVideoReceiver.c_

To run:
```shell
./samples/kvsWebrtcClientViewerGstSample <channelName> <mediaType>
./samples/kvsWebrtcClientViewerGstSample <channelName> <mediaType> <audio-codec> <video-codec>
```

Audio codec: aac, opus
Video codec: h264, h265

#### Sample: Generating sample frames

##### H264
Expand Down
15 changes: 8 additions & 7 deletions samples/Common.c
Original file line number Diff line number Diff line change
Expand Up @@ -569,13 +569,12 @@ STATUS createSampleStreamingSession(PSampleConfiguration pSampleConfiguration, P
}
#endif

// Declare that we support H264,Profile=42E01F,level-asymmetry-allowed=1,packetization-mode=1 and Opus
CHK_STATUS(addSupportedCodec(pSampleStreamingSession->pPeerConnection, RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE));
CHK_STATUS(addSupportedCodec(pSampleStreamingSession->pPeerConnection, RTC_CODEC_OPUS));
CHK_STATUS(addSupportedCodec(pSampleStreamingSession->pPeerConnection, pSampleConfiguration->videoCodec));
CHK_STATUS(addSupportedCodec(pSampleStreamingSession->pPeerConnection, pSampleConfiguration->audioCodec));

// Add a SendRecv Transceiver of type video
videoTrack.kind = MEDIA_STREAM_TRACK_KIND_VIDEO;
videoTrack.codec = RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE;
videoTrack.codec = pSampleConfiguration->videoCodec;
videoRtpTransceiverInit.direction = RTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV;
videoRtpTransceiverInit.rollingBufferDurationSec = 3;
// Considering 4 Mbps for 720p (which is what our samples use). This is for H.264.
Expand All @@ -591,7 +590,7 @@ STATUS createSampleStreamingSession(PSampleConfiguration pSampleConfiguration, P

// Add a SendRecv Transceiver of type audio
audioTrack.kind = MEDIA_STREAM_TRACK_KIND_AUDIO;
audioTrack.codec = RTC_CODEC_OPUS;
audioTrack.codec = pSampleConfiguration->audioCodec;
audioRtpTransceiverInit.direction = RTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV;
audioRtpTransceiverInit.rollingBufferDurationSec = 3;
// For opus, the bitrate could be between 6 Kbps to 510 Kbps
Expand Down Expand Up @@ -801,8 +800,8 @@ STATUS lookForSslCert(PSampleConfiguration* ppSampleConfiguration)
return retStatus;
}

STATUS createSampleConfiguration(PCHAR channelName, SIGNALING_CHANNEL_ROLE_TYPE roleType, BOOL trickleIce, BOOL useTurn, UINT32 logLevel,
PSampleConfiguration* ppSampleConfiguration)
STATUS createSampleConfiguration(PCHAR channelName, SIGNALING_CHANNEL_ROLE_TYPE roleType, BOOL trickleIce, BOOL useTurn, UINT32 logLevel,
RTC_CODEC audioCodec, RTC_CODEC videoCodec, PSampleConfiguration* ppSampleConfiguration)
{
STATUS retStatus = STATUS_SUCCESS;
PCHAR pAccessKey, pSecretKey, pSessionToken;
Expand Down Expand Up @@ -913,6 +912,8 @@ STATUS createSampleConfiguration(PCHAR channelName, SIGNALING_CHANNEL_ROLE_TYPE
pSampleConfiguration->iceCandidatePairStatsTimerId = MAX_UINT32;
pSampleConfiguration->pregenerateCertTimerId = MAX_UINT32;
pSampleConfiguration->signalingClientMetrics.version = SIGNALING_CLIENT_METRICS_CURRENT_VERSION;
pSampleConfiguration->audioCodec = audioCodec;
pSampleConfiguration->videoCodec = videoCodec;

ATOMIC_STORE_BOOL(&pSampleConfiguration->interrupted, FALSE);
ATOMIC_STORE_BOOL(&pSampleConfiguration->mediaThreadStarted, FALSE);
Expand Down
11 changes: 10 additions & 1 deletion samples/Samples.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,13 @@ extern "C" {
#define DEFAULT_AUDIO_AAC_BITS_PER_SAMPLE 16
#define DEFAULT_MAX_CONCURRENT_STREAMING_SESSION 10

#define AUDIO_CODEC_NAME_G711 "g711"
#define AUDIO_CODEC_NAME_OPUS "opus"
#define AUDIO_CODEC_NAME_AAC "aac"
#define VIDEO_CODEC_NAME_H264 "h264"
#define VIDEO_CODEC_NAME_H265 "h265"
#define VIDEO_CODEC_NAME_VP8 "vp8"

#define SAMPLE_MASTER_CLIENT_ID "ProducerMaster"
#define SAMPLE_VIEWER_CLIENT_ID "ConsumerViewer"
#define SAMPLE_CHANNEL_NAME (PCHAR) "ScaryTestChannel"
Expand Down Expand Up @@ -118,6 +125,8 @@ typedef struct {
PCHAR pCaCertPath;
PAwsCredentialProvider pCredentialProvider;
SIGNALING_CLIENT_HANDLE signalingClientHandle;
RTC_CODEC audioCodec;
RTC_CODEC videoCodec;
PBYTE pAudioFrameBuffer;
UINT32 audioBufferSize;
PBYTE pVideoFrameBuffer;
Expand Down Expand Up @@ -230,7 +239,7 @@ PVOID sampleReceiveAudioVideoFrame(PVOID);
PVOID getPeriodicIceCandidatePairStats(PVOID);
STATUS getIceCandidatePairStatsCallback(UINT32, UINT64, UINT64);
STATUS pregenerateCertTimerCallback(UINT32, UINT64, UINT64);
STATUS createSampleConfiguration(PCHAR, SIGNALING_CHANNEL_ROLE_TYPE, BOOL, BOOL, UINT32, PSampleConfiguration*);
STATUS createSampleConfiguration(PCHAR, SIGNALING_CHANNEL_ROLE_TYPE, BOOL, BOOL, UINT32, RTC_CODEC, RTC_CODEC, PSampleConfiguration*);
STATUS freeSampleConfiguration(PSampleConfiguration*);
STATUS signalingClientStateChanged(UINT64, SIGNALING_CLIENT_STATE);
STATUS signalingMessageReceived(UINT64, PReceivedSignalingMessage);
Expand Down
47 changes: 40 additions & 7 deletions samples/kvsWebRTCClientMaster.c
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ INT32 main(INT32 argc, CHAR* argv[])
PCHAR pChannelName;
SignalingClientMetrics signalingClientMetrics;
signalingClientMetrics.version = SIGNALING_CLIENT_METRICS_CURRENT_VERSION;
RTC_CODEC audioCodec = RTC_CODEC_OPUS;
RTC_CODEC videoCodec = RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE;

SET_INSTRUMENTED_ALLOCATORS();
UINT32 logLevel = setLogLevel();
Expand All @@ -25,7 +27,19 @@ INT32 main(INT32 argc, CHAR* argv[])
pChannelName = argc > 1 ? argv[1] : SAMPLE_CHANNEL_NAME;
#endif

CHK_STATUS(createSampleConfiguration(pChannelName, SIGNALING_CHANNEL_ROLE_TYPE_MASTER, TRUE, TRUE, logLevel, &pSampleConfiguration));
if (argc >= 3) {
if (!STRCMP(argv[3], AUDIO_CODEC_NAME_AAC)) {
audioCodec = RTC_CODEC_AAC;
}
}

if (argc >= 4) {
if (!STRCMP(argv[4], VIDEO_CODEC_NAME_H265)) {
videoCodec = RTC_CODEC_H265;
}
}

CHK_STATUS(createSampleConfiguration(pChannelName, SIGNALING_CHANNEL_ROLE_TYPE_MASTER, TRUE, TRUE, logLevel, audioCodec, videoCodec, &pSampleConfiguration));

// Set the audio and video handlers
pSampleConfiguration->audioSource = sendAudioPackets;
Expand All @@ -44,11 +58,21 @@ INT32 main(INT32 argc, CHAR* argv[])

// Check if the samples are present

CHK_STATUS(readFrameFromDisk(NULL, &frameSize, "./h264SampleFrames/frame-0001.h264"));
DLOGI("[KVS Master] Checked sample video frame availability....available");
if (videoCodec == RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE) {
CHK_STATUS(readFrameFromDisk(NULL, &frameSize, "./h264SampleFrames/frame-0001.h264"));
DLOGI("[KVS Master] Checked H264 sample video frame availability....available");
} else if (videoCodec == RTC_CODEC_H265) {
CHK_STATUS(readFrameFromDisk(NULL, &frameSize, "./h265SampleFrames/frame-0001.h265"));
DLOGI("[KVS Master] Checked H265 sample video frame availability....available");
}

CHK_STATUS(readFrameFromDisk(NULL, &frameSize, "./opusSampleFrames/sample-001.opus"));
DLOGI("[KVS Master] Checked sample audio frame availability....available");
if (audioCodec == RTC_CODEC_OPUS) {
CHK_STATUS(readFrameFromDisk(NULL, &frameSize, "./opusSampleFrames/sample-001.opus"));
DLOGI("[KVS Master] Checked Opus sample audio frame availability....available");
} else if (audioCodec == RTC_CODEC_AAC) {
CHK_STATUS(readFrameFromDisk(NULL, &frameSize, "./aacSampleFrames/sample-001.aac"));
DLOGI("[KVS Master] Checked AAC sample audio frame availability....available");
}

// Initialize KVS WebRTC. This must be done before anything else, and must only be done once.
CHK_STATUS(initKvsWebRtc());
Expand Down Expand Up @@ -146,7 +170,11 @@ PVOID sendVideoPackets(PVOID args)

while (!ATOMIC_LOAD_BOOL(&pSampleConfiguration->appTerminateFlag)) {
fileIndex = fileIndex % NUMBER_OF_H264_FRAME_FILES + 1;
SNPRINTF(filePath, MAX_PATH_LEN, "./h264SampleFrames/frame-%04d.h264", fileIndex);
if (pSampleConfiguration->videoCodec == RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE) {
SNPRINTF(filePath, MAX_PATH_LEN, "./h264SampleFrames/frame-%04d.h264", fileIndex);
} else if (pSampleConfiguration->videoCodec == RTC_CODEC_H265) {
SNPRINTF(filePath, MAX_PATH_LEN, "./h265SampleFrames/frame-%04d.h265", fileIndex);
}

CHK_STATUS(readFrameFromDisk(NULL, &frameSize, filePath));

Expand Down Expand Up @@ -218,8 +246,13 @@ PVOID sendAudioPackets(PVOID args)

while (!ATOMIC_LOAD_BOOL(&pSampleConfiguration->appTerminateFlag)) {
fileIndex = fileIndex % NUMBER_OF_OPUS_FRAME_FILES + 1;
SNPRINTF(filePath, MAX_PATH_LEN, "./opusSampleFrames/sample-%03d.opus", fileIndex);

if (pSampleConfiguration->audioCodec == RTC_CODEC_AAC) {
SNPRINTF(filePath, MAX_PATH_LEN, "./aacSampleFrames/sample-%03d.aac", fileIndex);
} else if (pSampleConfiguration->audioCodec == RTC_CODEC_OPUS) {
SNPRINTF(filePath, MAX_PATH_LEN, "./opusSampleFrames/sample-%03d.opus", fileIndex);
}

CHK_STATUS(readFrameFromDisk(NULL, &frameSize, filePath));

// Re-alloc if needed
Expand Down
65 changes: 56 additions & 9 deletions samples/kvsWebRTCClientMasterGstSample.c
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
#include <gst/app/gstappsink.h>

extern PSampleConfiguration gSampleConfiguration;
RTC_CODEC audioCodec = DEFAULT_AUDIO_TRACK_ID;
RTC_CODEC videoCodec = DEFAULT_VIDEO_TRACK_ID;

// #define VERBOSE

Expand Down Expand Up @@ -63,7 +65,7 @@ GstFlowReturn on_new_sample(GstElement* sink, gpointer data, UINT64 trackid)
pSampleStreamingSession = pSampleConfiguration->sampleStreamingSessionList[i];
frame.index = (UINT32) ATOMIC_INCREMENT(&pSampleStreamingSession->frameIndex);

if (trackid == DEFAULT_AUDIO_TRACK_ID) {
if (trackid == audioCodec) {
pRtcRtpTransceiver = pSampleStreamingSession->pAudioRtcRtpTransceiver;
frame.presentationTs = pSampleStreamingSession->audioTimestamp;
frame.decodingTs = frame.presentationTs;
Expand Down Expand Up @@ -109,12 +111,12 @@ GstFlowReturn on_new_sample(GstElement* sink, gpointer data, UINT64 trackid)

GstFlowReturn on_new_sample_video(GstElement* sink, gpointer data)
{
return on_new_sample(sink, data, DEFAULT_VIDEO_TRACK_ID);
return on_new_sample(sink, data, videoCodec);
}

GstFlowReturn on_new_sample_audio(GstElement* sink, gpointer data)
{
return on_new_sample(sink, data, DEFAULT_AUDIO_TRACK_ID);
return on_new_sample(sink, data, audioCodec);
}

PVOID sendGstreamerAudioVideo(PVOID args)
Expand Down Expand Up @@ -168,13 +170,23 @@ PVOID sendGstreamerAudioVideo(PVOID args)
case SAMPLE_STREAMING_VIDEO_ONLY:
switch (pSampleConfiguration->srcType) {
case TEST_SOURCE: {
pipeline =
gst_parse_launch("videotestsrc is-live=TRUE ! queue ! videoconvert ! videoscale ! video/x-raw,width=1280,height=720 ! "
if (pSampleConfiguration->videoCodec == RTC_CODEC_H265) {
pipeline =
gst_parse_launch("videotestsrc pattern=ball is-live=TRUE ! timeoverlay ! queue ! videoconvert ! "
"video/x-raw,width=288,height=352,framerate=25/1 ! queue ! "
"x265enc speed-preset=veryfast bitrate=512 tune=zerolatency ! "
"video/x-h265,stream-format=byte-stream,alignment=au,profile=main ! appsink sync=TRUE "
"emit-signals=TRUE name=appsink-video",
&error);
} else {
pipeline =
gst_parse_launch("videotestsrc is-live=TRUE ! queue ! videoconvert ! videoscale ! video/x-raw,width=1280,height=720 ! "
"videorate ! video/x-raw,framerate=25/1 ! "
"x264enc bframes=0 speed-preset=veryfast bitrate=512 byte-stream=TRUE tune=zerolatency ! "
"video/x-h264,stream-format=byte-stream,alignment=au,profile=baseline ! appsink sync=TRUE emit-signals=TRUE "
"name=appsink-video",
&error);
}
break;
}
case DEVICE_SOURCE: {
Expand Down Expand Up @@ -209,14 +221,38 @@ PVOID sendGstreamerAudioVideo(PVOID args)
case SAMPLE_STREAMING_AUDIO_VIDEO:
switch (pSampleConfiguration->srcType) {
case TEST_SOURCE: {
pipeline =
gst_parse_launch("videotestsrc is-live=TRUE ! queue ! videoconvert ! video/x-raw,width=1280,height=720,framerate=25/1 ! "
if (videoCodec == RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE && audioCodec == RTC_CODEC_OPUS) {
pipeline =
gst_parse_launch("videotestsrc is-live=TRUE ! queue ! videoconvert ! video/x-raw,width=1280,height=720,framerate=25/1 ! "
"x264enc bframes=0 speed-preset=veryfast bitrate=512 byte-stream=TRUE tune=zerolatency ! "
"video/x-h264,stream-format=byte-stream,alignment=au,profile=baseline ! appsink sync=TRUE "
"emit-signals=TRUE name=appsink-video audiotestsrc is-live=TRUE ! "
"queue leaky=2 max-size-buffers=400 ! audioconvert ! audioresample ! opusenc ! "
"audio/x-opus,rate=48000,channels=2 ! appsink sync=TRUE emit-signals=TRUE name=appsink-audio",
&error);
} else if (videoCodec == RTC_CODEC_H265 && audioCodec == RTC_CODEC_OPUS) {
pipeline =
gst_parse_launch("videotestsrc pattern=ball is-live=TRUE ! timeoverlay ! queue ! videoconvert ! "
"video/x-raw,width=288,height=352,framerate=25/1 ! queue ! "
"x265enc speed-preset=veryfast bitrate=512 tune=zerolatency ! "
"video/x-h265,stream-format=byte-stream,alignment=au,profile=main ! appsink sync=TRUE "
"emit-signals=TRUE name=appsink-video audiotestsrc is-live=TRUE ! "
"queue leaky=2 max-size-buffers=400 ! audioconvert ! audioresample ! opusenc ! "
"audio/x-opus,rate=48000,channels=2 ! appsink sync=TRUE emit-signals=TRUE name=appsink-audio",
&error);

} else if (videoCodec == RTC_CODEC_H265 && audioCodec == RTC_CODEC_AAC) {
pipeline =
gst_parse_launch("videotestsrc pattern=ball is-live=TRUE ! timeoverlay ! queue ! videoconvert ! "
"video/x-raw,format=I420,width=1920,height=1080,framerate=25/1 ! queue ! "
"x265enc speed-preset=veryfast bitrate=512 tune=zerolatency ! "
"video/x-h265,stream-format=byte-stream,alignment=au,profile=main ! appsink sync=TRUE "
"emit-signals=TRUE name=appsink-video audiotestsrc wave=triangle is-live=TRUE ! "
"queue leaky=2 max-size-buffers=400 ! audioconvert ! audioresample ! faac ! "
"capsfilter caps=audio/mpeg,mpegversion=4,stream-format=adts,base-profile=lc,channels=2,rate=48000 ! "
"appsink sync=TRUE emit-signals=TRUE name=appsink-audio",
&error);
}
break;
}
case DEVICE_SOURCE: {
Expand Down Expand Up @@ -322,11 +358,11 @@ INT32 main(INT32 argc, CHAR* argv[])
pChannelName = argc > 1 ? argv[1] : SAMPLE_CHANNEL_NAME;
#endif

CHK_STATUS(createSampleConfiguration(pChannelName, SIGNALING_CHANNEL_ROLE_TYPE_MASTER, TRUE, TRUE, logLevel, &pSampleConfiguration));
CHK_STATUS(createSampleConfiguration(pChannelName, SIGNALING_CHANNEL_ROLE_TYPE_MASTER, TRUE, TRUE, logLevel,
audioCodec, videoCodec, &pSampleConfiguration));

pSampleConfiguration->videoSource = sendGstreamerAudioVideo;
pSampleConfiguration->mediaType = SAMPLE_STREAMING_VIDEO_ONLY;
pSampleConfiguration->receiveAudioVideoSource = receiveGstreamerAudioVideo;

#ifdef ENABLE_DATA_CHANNEL
pSampleConfiguration->onDataChannel = onDataChannel;
Expand Down Expand Up @@ -359,6 +395,17 @@ INT32 main(INT32 argc, CHAR* argv[])
if (STRCMP(argv[3], "testsrc") == 0) {
DLOGI("[KVS GStreamer Master] Using test source in GStreamer");
pSampleConfiguration->srcType = TEST_SOURCE;
if (argc > 4) {
if (!STRCMP(argv[4], AUDIO_CODEC_NAME_AAC)) {
audioCodec = RTC_CODEC_AAC;
}
}

if (argc > 5) {
if (!STRCMP(argv[5], VIDEO_CODEC_NAME_H265)) {
videoCodec = RTC_CODEC_H265;
}
}
} else if (STRCMP(argv[3], "devicesrc") == 0) {
DLOGI("[KVS GStreamer Master] Using device source in GStreamer");
pSampleConfiguration->srcType = DEVICE_SOURCE;
Expand Down
Loading

0 comments on commit 1836f5f

Please sign in to comment.