diff --git a/android/app/src/main/cpp/doc_examples_transcode_aac.c b/android/app/src/main/cpp/doc_examples_transcode_aac.c new file mode 100644 index 000000000..6ad2bfd83 --- /dev/null +++ b/android/app/src/main/cpp/doc_examples_transcode_aac.c @@ -0,0 +1,904 @@ +/* + * Copyright (c) 2013-2018 Andreas Unterweger + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * Simple audio converter + * + * @example transcode_aac.c + * Convert an input audio file to AAC in an MP4 container using FFmpeg. + * Formats other than MP4 are supported based on the output file extension. + * @author Andreas Unterweger (dustsigns@gmail.com) + */ + +#include + +#include "libavformat/avformat.h" +#include "libavformat/avio.h" + +#include "libavcodec/avcodec.h" + +#include "libavutil/audio_fifo.h" +#include "libavutil/avassert.h" +#include "libavutil/avstring.h" +#include "libavutil/frame.h" +#include "libavutil/opt.h" + +#include "libswresample/swresample.h" + +#include "mobileffmpeg.h" +static int audio_stream_idx = -1; + +/* The output bit rate in bit/s */ +#define OUTPUT_BIT_RATE 96000 +/* The number of output channels */ +#define OUTPUT_CHANNELS 2 +/* The index of audio stream that will be transcoded */ + +/** + * Open an input file and the required decoder. + * @param filename File to be opened + * @param[out] input_format_context Format context of opened file + * @param[out] input_codec_context Codec context of opened file + * @return Error code (0 if successful) + */ +static int open_input_file(const char *filename, + AVFormatContext **input_format_context, + AVCodecContext **input_codec_context) +{ + AVCodecContext *avctx; + AVCodec *input_codec; + int error; + + /* Open the input file to read from it. */ + if ((error = avformat_open_input(input_format_context, filename, NULL, + NULL)) < 0) { + LOGE("Could not open input file '%s' (error '%s')\n", + filename, av_err2str(error)); + *input_format_context = NULL; + return error; + } + + /* Get information on the input file (number of streams etc.). */ + if ((error = avformat_find_stream_info(*input_format_context, NULL)) < 0) { + LOGE("Could not open find stream info (error '%s')\n", + av_err2str(error)); + avformat_close_input(input_format_context); + return error; + } + + for (audio_stream_idx = 0; audio_stream_idx < (*input_format_context)->nb_streams; audio_stream_idx++) { + if ((*input_format_context)->streams[audio_stream_idx]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) + break; + + LOGI("Skip non-audio input stream %d\n", audio_stream_idx); + } + + /* Make sure that there is at least one audio stream in the input file. */ + if (audio_stream_idx >= (*input_format_context)->nb_streams) { + LOGE("Could not find an audio (error '%s')\n", + av_err2str(error)); + avformat_close_input(input_format_context); + return AVERROR_EXIT; + } + + /* Find a decoder for the audio stream. */ + if (!(input_codec = avcodec_find_decoder((*input_format_context)->streams[audio_stream_idx]->codecpar->codec_id))) { + LOGE("Could not find input codec\n"); + avformat_close_input(input_format_context); + return AVERROR_EXIT; + } + + /* Allocate a new decoding context. */ + avctx = avcodec_alloc_context3(input_codec); + if (!avctx) { + LOGE("Could not allocate a decoding context\n"); + avformat_close_input(input_format_context); + return AVERROR(ENOMEM); + } + + /* Initialize the stream parameters with demuxer information. */ + error = avcodec_parameters_to_context(avctx, (*input_format_context)->streams[audio_stream_idx]->codecpar); + if (error < 0) { + avformat_close_input(input_format_context); + avcodec_free_context(&avctx); + return error; + } + + /* Open the decoder for the audio stream to use it later. */ + if ((error = avcodec_open2(avctx, input_codec, NULL)) < 0) { + LOGE("Could not open input codec (error '%s')\n", + av_err2str(error)); + avcodec_free_context(&avctx); + avformat_close_input(input_format_context); + return error; + } + + /* Save the decoder context for easier access later. */ + *input_codec_context = avctx; + + return 0; +} + +/** + * Open an output file and the required encoder. + * Also set some basic encoder parameters. + * Some of these parameters are based on the input file's parameters. + * @param filename File to be opened + * @param input_codec_context Codec context of input file + * @param[out] output_format_context Format context of output file + * @param[out] output_codec_context Codec context of output file + * @return Error code (0 if successful) + */ +static int open_output_file(const char *filename, + AVCodecContext *input_codec_context, + AVFormatContext **output_format_context, + AVCodecContext **output_codec_context) +{ + AVCodecContext *avctx = NULL; + AVIOContext *output_io_context = NULL; + AVStream *stream = NULL; + AVCodec *output_codec = NULL; + int error; + + /* Open the output file to write to it. */ + if ((error = avio_open(&output_io_context, filename, + AVIO_FLAG_WRITE)) < 0) { + LOGE("Could not open output file '%s' (error '%s')\n", + filename, av_err2str(error)); + return error; + } + + /* Create a new format context for the output container format. */ + if (!(*output_format_context = avformat_alloc_context())) { + LOGE("Could not allocate output format context\n"); + return AVERROR(ENOMEM); + } + + /* Associate the output file (pointer) with the container format context. */ + (*output_format_context)->pb = output_io_context; + + /* Guess the desired container format based on the file extension. */ + if (!((*output_format_context)->oformat = av_guess_format(NULL, filename, + NULL))) { + LOGE("Could not find output file format\n"); + goto cleanup; + } + + if (!((*output_format_context)->url = av_strdup(filename))) { + LOGE("Could not allocate url.\n"); + error = AVERROR(ENOMEM); + goto cleanup; + } + + /* Find the encoder to be used by its name. */ + if (!(output_codec = avcodec_find_encoder((*output_format_context)->oformat->audio_codec))) { + LOGE("Could not find an encoder for %s(%d).\n", + (*output_format_context)->oformat->long_name, + (*output_format_context)->oformat->audio_codec); + goto cleanup; + } + + /* Create a new audio stream in the output file container. */ + if (!(stream = avformat_new_stream(*output_format_context, NULL))) { + LOGE("Could not create new stream\n"); + error = AVERROR(ENOMEM); + goto cleanup; + } + + avctx = avcodec_alloc_context3(output_codec); + if (!avctx) { + LOGE("Could not allocate an encoding context\n"); + error = AVERROR(ENOMEM); + goto cleanup; + } + + /* Set the basic encoder parameters. + * The input file's sample rate is used to avoid a sample rate conversion. */ + avctx->channels = OUTPUT_CHANNELS; + avctx->channel_layout = av_get_default_channel_layout(OUTPUT_CHANNELS); + avctx->sample_rate = input_codec_context->sample_rate; + avctx->sample_fmt = output_codec->sample_fmts[0]; + avctx->bit_rate = OUTPUT_BIT_RATE; + + /* Allow the use of the experimental AAC encoder. */ + avctx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL; + + /* Set the sample rate for the container. */ + stream->time_base.den = input_codec_context->sample_rate; + stream->time_base.num = 1; + + /* Some container formats (like MP4) require global headers to be present. + * Mark the encoder so that it behaves accordingly. */ + if ((*output_format_context)->oformat->flags & AVFMT_GLOBALHEADER) + avctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; + + /* Open the encoder for the audio stream to use it later. */ + if ((error = avcodec_open2(avctx, output_codec, NULL)) < 0) { + LOGE("Could not open output codec (error '%s')\n", + av_err2str(error)); + goto cleanup; + } + + error = avcodec_parameters_from_context(stream->codecpar, avctx); + if (error < 0) { + LOGE("Could not initialize stream parameters\n"); + goto cleanup; + } + + /* Save the encoder context for easier access later. */ + *output_codec_context = avctx; + + return 0; + +cleanup: + avcodec_free_context(&avctx); + avio_closep(&(*output_format_context)->pb); + avformat_free_context(*output_format_context); + *output_format_context = NULL; + return error < 0 ? error : AVERROR_EXIT; +} + +/** + * Initialize one data packet for reading or writing. + * @param packet Packet to be initialized + */ +static void init_packet(AVPacket *packet) +{ + av_init_packet(packet); + /* Set the packet data and size so that it is recognized as being empty. */ + packet->data = NULL; + packet->size = 0; +} + +/** + * Initialize one audio frame for reading from the input file. + * @param[out] frame Frame to be initialized + * @return Error code (0 if successful) + */ +static int init_input_frame(AVFrame **frame) +{ + if (!(*frame = av_frame_alloc())) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate input frame\n"); + return AVERROR(ENOMEM); + } + return 0; +} + +/** + * Initialize the audio resampler based on the input and output codec settings. + * If the input and output sample formats differ, a conversion is required + * libswresample takes care of this, but requires initialization. + * @param input_codec_context Codec context of the input file + * @param output_codec_context Codec context of the output file + * @param[out] resample_context Resample context for the required conversion + * @return Error code (0 if successful) + */ +static int init_resampler(AVCodecContext *input_codec_context, + AVCodecContext *output_codec_context, + SwrContext **resample_context) +{ + int error; + + /* + * Create a resampler context for the conversion. + * Set the conversion parameters. + * Default channel layouts based on the number of channels + * are assumed for simplicity (they are sometimes not detected + * properly by the demuxer and/or decoder). + */ + *resample_context = swr_alloc_set_opts(NULL, + av_get_default_channel_layout(output_codec_context->channels), + output_codec_context->sample_fmt, + output_codec_context->sample_rate, + av_get_default_channel_layout(input_codec_context->channels), + input_codec_context->sample_fmt, + input_codec_context->sample_rate, + 0, NULL); + if (!*resample_context) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate resample context\n"); + return AVERROR(ENOMEM); + } + /* + * Perform a sanity check so that the number of converted samples is + * not greater than the number of samples to be converted. + * If the sample rates differ, this case has to be handled differently + */ + av_assert0(output_codec_context->sample_rate == input_codec_context->sample_rate); + + /* Open the resampler with the specified parameters. */ + if ((error = swr_init(*resample_context)) < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not open resample context\n"); + swr_free(resample_context); + return error; + } + return 0; +} + +/** + * Initialize a FIFO buffer for the audio samples to be encoded. + * @param[out] fifo Sample buffer + * @param output_codec_context Codec context of the output file + * @return Error code (0 if successful) + */ +static int init_fifo(AVAudioFifo **fifo, AVCodecContext *output_codec_context) +{ + /* Create the FIFO buffer based on the specified output sample format. */ + if (!(*fifo = av_audio_fifo_alloc(output_codec_context->sample_fmt, + output_codec_context->channels, 1))) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate FIFO\n"); + return AVERROR(ENOMEM); + } + return 0; +} + +/** + * Write the header of the output file container. + * @param output_format_context Format context of the output file + * @return Error code (0 if successful) + */ +static int write_output_file_header(AVFormatContext *output_format_context) +{ + int error; + if ((error = avformat_write_header(output_format_context, NULL)) < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not write output file header (error '%s')\n", + av_err2str(error)); + return error; + } + return 0; +} + +/** + * Decode one audio frame from the input file. + * @param frame Audio frame to be decoded + * @param input_format_context Format context of the input file + * @param input_codec_context Codec context of the input file + * @param[out] data_present Indicates whether data has been decoded + * @param[out] finished Indicates whether the end of file has + * been reached and all data has been + * decoded. If this flag is false, there + * is more data to be decoded, i.e., this + * function has to be called again. + * @return Error code (0 if successful) + */ +static int decode_audio_frame(AVFrame *frame, + AVFormatContext *input_format_context, + AVCodecContext *input_codec_context, + int *data_present, int *finished) +{ + /* Packet used for temporary storage. */ + AVPacket input_packet; + int error; + init_packet(&input_packet); + + /* Read one audio frame from the input file into a temporary packet. */ + if ((error = av_read_frame(input_format_context, &input_packet)) < 0) { + /* If we are at the end of the file, flush the decoder below. */ + if (error == AVERROR_EOF) + *finished = 1; + else { + __android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not read frame (error '%s')\n", + av_err2str(error)); + return error; + } + } + + if (error != AVERROR_EOF && input_packet.stream_index != audio_stream_idx) { + goto cleanup; + } + + /* Send the audio frame stored in the temporary packet to the decoder. + * The input audio stream decoder is used to do this. */ + if ((error = avcodec_send_packet(input_codec_context, &input_packet)) < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not send packet for decoding (error '%s')\n", + av_err2str(error)); + return error; + } + + /* Receive one frame from the decoder. */ + error = avcodec_receive_frame(input_codec_context, frame); + /* If the decoder asks for more data to be able to decode a frame, + * return indicating that no data is present. */ + if (error == AVERROR(EAGAIN)) { + error = 0; + goto cleanup; + /* If the end of the input file is reached, stop decoding. */ + } else if (error == AVERROR_EOF) { + *finished = 1; + error = 0; + goto cleanup; + } else if (error < 0) { + __android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not decode frame (error '%s')\n", + av_err2str(error)); + goto cleanup; + /* Default case: Return decoded data. */ + } else { + *data_present = 1; + goto cleanup; + } + +cleanup: + av_packet_unref(&input_packet); + return error; +} + +/** + * Initialize a temporary storage for the specified number of audio samples. + * The conversion requires temporary storage due to the different format. + * The number of audio samples to be allocated is specified in frame_size. + * @param[out] converted_input_samples Array of converted samples. The + * dimensions are reference, channel + * (for multi-channel audio), sample. + * @param output_codec_context Codec context of the output file + * @param frame_size Number of samples to be converted in + * each round + * @return Error code (0 if successful) + */ +static int init_converted_samples(uint8_t ***converted_input_samples, + AVCodecContext *output_codec_context, + int frame_size) +{ + int error; + + /* Allocate as many pointers as there are audio channels. + * Each pointer will later point to the audio samples of the corresponding + * channels (although it may be NULL for interleaved formats). + */ + if (!(*converted_input_samples = calloc(output_codec_context->channels, + sizeof(**converted_input_samples)))) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate converted input sample pointers\n"); + return AVERROR(ENOMEM); + } + + /* Allocate memory for the samples of all channels in one consecutive + * block for convenience. */ + if ((error = av_samples_alloc(*converted_input_samples, NULL, + output_codec_context->channels, + frame_size, + output_codec_context->sample_fmt, 0)) < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", + "Could not allocate converted input samples (error '%s')\n", + av_err2str(error)); + av_freep(&(*converted_input_samples)[0]); + free(*converted_input_samples); + return error; + } + return 0; +} + +/** + * Convert the input audio samples into the output sample format. + * The conversion happens on a per-frame basis, the size of which is + * specified by frame_size. + * @param input_data Samples to be decoded. The dimensions are + * channel (for multi-channel audio), sample. + * @param[out] converted_data Converted samples. The dimensions are channel + * (for multi-channel audio), sample. + * @param frame_size Number of samples to be converted + * @param resample_context Resample context for the conversion + * @return Error code (0 if successful) + */ +static int convert_samples(const uint8_t **input_data, + uint8_t **converted_data, const int frame_size, + SwrContext *resample_context) +{ + int error; + + /* Convert the samples using the resampler. */ + if ((error = swr_convert(resample_context, + converted_data, frame_size, + input_data , frame_size)) < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not convert input samples (error '%s')\n", + av_err2str(error)); + return error; + } + + return 0; +} + +/** + * Add converted input audio samples to the FIFO buffer for later processing. + * @param fifo Buffer to add the samples to + * @param converted_input_samples Samples to be added. The dimensions are channel + * (for multi-channel audio), sample. + * @param frame_size Number of samples to be converted + * @return Error code (0 if successful) + */ +static int add_samples_to_fifo(AVAudioFifo *fifo, + uint8_t **converted_input_samples, + const int frame_size) +{ + int error; + + /* Make the FIFO as large as it needs to be to hold both, + * the old and the new samples. */ + if ((error = av_audio_fifo_realloc(fifo, av_audio_fifo_size(fifo) + frame_size)) < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not reallocate FIFO\n"); + return error; + } + + /* Store the new samples in the FIFO buffer. */ + if (av_audio_fifo_write(fifo, (void **)converted_input_samples, + frame_size) < frame_size) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not write data to FIFO\n"); + return AVERROR_EXIT; + } + return 0; +} + +/** + * Read one audio frame from the input file, decode, convert and store + * it in the FIFO buffer. + * @param fifo Buffer used for temporary storage + * @param input_format_context Format context of the input file + * @param input_codec_context Codec context of the input file + * @param output_codec_context Codec context of the output file + * @param resampler_context Resample context for the conversion + * @param[out] finished Indicates whether the end of file has + * been reached and all data has been + * decoded. If this flag is false, + * there is more data to be decoded, + * i.e., this function has to be called + * again. + * @return Error code (0 if successful) + */ +static int read_decode_convert_and_store(AVAudioFifo *fifo, + AVFormatContext *input_format_context, + AVCodecContext *input_codec_context, + AVCodecContext *output_codec_context, + SwrContext *resampler_context, + int *finished) +{ + /* Temporary storage of the input samples of the frame read from the file. */ + AVFrame *input_frame = NULL; + /* Temporary storage for the converted input samples. */ + uint8_t **converted_input_samples = NULL; + int data_present = 0; + int ret = AVERROR_EXIT; + + /* Initialize temporary storage for one input frame. */ + if (init_input_frame(&input_frame)) + goto cleanup; + /* Decode one frame worth of audio samples. */ + if (decode_audio_frame(input_frame, input_format_context, + input_codec_context, &data_present, finished)) + goto cleanup; + /* If we are at the end of the file and there are no more samples + * in the decoder which are delayed, we are actually finished. + * This must not be treated as an error. */ + if (*finished) { + ret = 0; + goto cleanup; + } + /* If there is decoded data, convert and store it. */ + if (data_present) { + /* Initialize the temporary storage for the converted input samples. */ + if (init_converted_samples(&converted_input_samples, output_codec_context, + input_frame->nb_samples)) + goto cleanup; + + /* Convert the input samples to the desired output sample format. + * This requires a temporary storage provided by converted_input_samples. */ + if (convert_samples((const uint8_t**)input_frame->extended_data, converted_input_samples, + input_frame->nb_samples, resampler_context)) + goto cleanup; + + /* Add the converted input samples to the FIFO buffer for later processing. */ + if (add_samples_to_fifo(fifo, converted_input_samples, + input_frame->nb_samples)) + goto cleanup; + ret = 0; + } + ret = 0; + +cleanup: + if (converted_input_samples) { + av_freep(&converted_input_samples[0]); + free(converted_input_samples); + } + av_frame_free(&input_frame); + + return ret; +} + +/** + * Initialize one input frame for writing to the output file. + * The frame will be exactly frame_size samples large. + * @param[out] frame Frame to be initialized + * @param output_codec_context Codec context of the output file + * @param frame_size Size of the frame + * @return Error code (0 if successful) + */ +static int init_output_frame(AVFrame **frame, + AVCodecContext *output_codec_context, + int frame_size) +{ + int error; + + /* Create a new frame to store the audio samples. */ + if (!(*frame = av_frame_alloc())) { + __android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not allocate output frame\n"); + return AVERROR_EXIT; + } + + /* Set the frame's parameters, especially its size and format. + * av_frame_get_buffer needs this to allocate memory for the + * audio samples of the frame. + * Default channel layouts based on the number of channels + * are assumed for simplicity. */ + (*frame)->nb_samples = frame_size; + (*frame)->channel_layout = output_codec_context->channel_layout; + (*frame)->format = output_codec_context->sample_fmt; + (*frame)->sample_rate = output_codec_context->sample_rate; + + /* Allocate the samples of the created frame. This call will make + * sure that the audio frame can hold as many samples as specified. */ + if ((error = av_frame_get_buffer(*frame, 0)) < 0) { + __android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not allocate output frame samples (error '%s')\n", + av_err2str(error)); + av_frame_free(frame); + return error; + } + + return 0; +} + +/* Global timestamp for the audio frames. */ +static int64_t pts = 0; + +/** + * Encode one frame worth of audio to the output file. + * @param frame Samples to be encoded + * @param output_format_context Format context of the output file + * @param output_codec_context Codec context of the output file + * @param[out] data_present Indicates whether data has been + * encoded + * @return Error code (0 if successful) + */ +static int encode_audio_frame(AVFrame *frame, + AVFormatContext *output_format_context, + AVCodecContext *output_codec_context, + int *data_present) +{ + /* Packet used for temporary storage. */ + AVPacket output_packet; + int error; + init_packet(&output_packet); + + /* Set a timestamp based on the sample rate for the container. */ + if (frame) { + frame->pts = pts; + pts += frame->nb_samples; + } + + /* Send the audio frame stored in the temporary packet to the encoder. + * The output audio stream encoder is used to do this. */ + error = avcodec_send_frame(output_codec_context, frame); + /* The encoder signals that it has nothing more to encode. */ + if (error == AVERROR_EOF) { + error = 0; + goto cleanup; + } else if (error < 0) { + __android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not send packet for encoding (error '%s')\n", + av_err2str(error)); + return error; + } + + /* Receive one encoded frame from the encoder. */ + error = avcodec_receive_packet(output_codec_context, &output_packet); + /* If the encoder asks for more data to be able to provide an + * encoded frame, return indicating that no data is present. */ + if (error == AVERROR(EAGAIN)) { + error = 0; + goto cleanup; + /* If the last frame has been encoded, stop encoding. */ + } else if (error == AVERROR_EOF) { + error = 0; + goto cleanup; + } else if (error < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not encode frame (error '%s')\n", + av_err2str(error)); + goto cleanup; + /* Default case: Return encoded data. */ + } else { + *data_present = 1; + } + + /* Write one audio frame from the temporary packet to the output file. */ + if (*data_present && + (error = av_write_frame(output_format_context, &output_packet)) < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not write frame (error '%s')\n", + av_err2str(error)); + goto cleanup; + } + +cleanup: + av_packet_unref(&output_packet); + return error; +} + +/** + * Load one audio frame from the FIFO buffer, encode and write it to the + * output file. + * @param fifo Buffer used for temporary storage + * @param output_format_context Format context of the output file + * @param output_codec_context Codec context of the output file + * @return Error code (0 if successful) + */ +static int load_encode_and_write(AVAudioFifo *fifo, + AVFormatContext *output_format_context, + AVCodecContext *output_codec_context) +{ + /* Temporary storage of the output samples of the frame written to the file. */ + AVFrame *output_frame; + /* Use the maximum number of possible samples per frame. + * If there is less than the maximum possible frame size in the FIFO + * buffer use this number. Otherwise, use the maximum possible frame size. */ + const int frame_size = FFMIN(av_audio_fifo_size(fifo), + output_codec_context->frame_size); + int data_written; + + /* Initialize temporary storage for one output frame. */ + if (init_output_frame(&output_frame, output_codec_context, frame_size)) + return AVERROR_EXIT; + + /* Read as many samples from the FIFO buffer as required to fill the frame. + * The samples are stored in the frame temporarily. */ + if (av_audio_fifo_read(fifo, (void **)output_frame->data, frame_size) < frame_size) { + __android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not read data from FIFO\n"); + av_frame_free(&output_frame); + return AVERROR_EXIT; + } + + /* Encode one frame worth of audio samples. */ + if (encode_audio_frame(output_frame, output_format_context, + output_codec_context, &data_written)) { + av_frame_free(&output_frame); + return AVERROR_EXIT; + } + av_frame_free(&output_frame); + return 0; +} + +/** + * Write the trailer of the output file container. + * @param output_format_context Format context of the output file + * @return Error code (0 if successful) + */ +static int write_output_file_trailer(AVFormatContext *output_format_context) +{ + int error; + if ((error = av_write_trailer(output_format_context)) < 0) { + __android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not write output file trailer (error '%s')\n", + av_err2str(error)); + return error; + } + return 0; +} + +JNIEXPORT int JNICALL Java_com_arthenica_mobileffmpeg_Config_runTranscode(JNIEnv *env, jclass object, jstring inFilename, jstring outFilename) +{ + AVFormatContext *input_format_context = NULL, *output_format_context = NULL; + AVCodecContext *input_codec_context = NULL, *output_codec_context = NULL; + SwrContext *resample_context = NULL; + AVAudioFifo *fifo = NULL; + int ret = AVERROR_EXIT; + + const char *in_filename = (*env)->GetStringUTFChars(env, inFilename, 0); + const char *out_filename = (*env)->GetStringUTFChars(env, outFilename, 0); + + __android_log_print(ANDROID_LOG_INFO, "transcode_aac", "%s %s\n", in_filename, out_filename); + + /* Open the input file for reading. */ + if (open_input_file(in_filename, &input_format_context, + &input_codec_context)) + goto cleanup; + + /* Open the output file for writing. */ + if (open_output_file(out_filename, input_codec_context, + &output_format_context, &output_codec_context)) + goto cleanup; + + /* Initialize the resampler to be able to convert audio sample formats. */ + if (init_resampler(input_codec_context, output_codec_context, + &resample_context)) + goto cleanup; + /* Initialize the FIFO buffer to store audio samples to be encoded. */ + if (init_fifo(&fifo, output_codec_context)) + goto cleanup; + /* Write the header of the output file container. */ + if (write_output_file_header(output_format_context)) + goto cleanup; + + /* Loop as long as we have input samples to read or output samples + * to write; abort as soon as we have neither. */ + while (1) { + /* Use the encoder's desired frame size for processing. */ + const int output_frame_size = output_codec_context->frame_size; + int finished = 0; + + /* Make sure that there is one frame worth of samples in the FIFO + * buffer so that the encoder can do its work. + * Since the decoder's and the encoder's frame size may differ, we + * need to FIFO buffer to store as many frames worth of input samples + * that they make up at least one frame worth of output samples. */ + while (av_audio_fifo_size(fifo) < output_frame_size) { + /* Decode one frame worth of audio samples, convert it to the + * output sample format and put it into the FIFO buffer. */ + if (read_decode_convert_and_store(fifo, input_format_context, + input_codec_context, + output_codec_context, + resample_context, &finished)) + goto cleanup; + + /* If we are at the end of the input file, we continue + * encoding the remaining audio samples to the output file. */ + if (finished) + break; + } + + /* If we have enough samples for the encoder, we encode them. + * At the end of the file, we pass the remaining samples to + * the encoder. */ + while (av_audio_fifo_size(fifo) >= output_frame_size || + (finished && av_audio_fifo_size(fifo) > 0)) + /* Take one frame worth of audio samples from the FIFO buffer, + * encode it and write it to the output file. */ + if (load_encode_and_write(fifo, output_format_context, + output_codec_context)) + goto cleanup; + + /* If we are at the end of the input file and have encoded + * all remaining samples, we can exit this loop and finish. */ + if (finished) { + int data_written; + /* Flush the encoder as it may have delayed frames. */ + do { + data_written = 0; + if (encode_audio_frame(NULL, output_format_context, + output_codec_context, &data_written)) + goto cleanup; + } while (data_written); + break; + } + } + + /* Write the trailer of the output file container. */ + if (write_output_file_trailer(output_format_context)) + goto cleanup; + ret = 0; + +cleanup: + if (fifo) + av_audio_fifo_free(fifo); + swr_free(&resample_context); + if (output_codec_context) + avcodec_free_context(&output_codec_context); + if (output_format_context) { + avio_closep(&output_format_context->pb); + avformat_free_context(output_format_context); + } + if (input_codec_context) + avcodec_free_context(&input_codec_context); + if (input_format_context) + avformat_close_input(&input_format_context); + + return ret; +} diff --git a/android/app/src/main/cpp/structured_storage.c b/android/app/src/main/cpp/structured_storage.c new file mode 100644 index 000000000..95802de7f --- /dev/null +++ b/android/app/src/main/cpp/structured_storage.c @@ -0,0 +1,290 @@ +/* + * Copyright (c) 2018 Taner Sener + * + * This file is part of MobileFFmpeg. + * + * MobileFFmpeg is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * MobileFFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with MobileFFmpeg. If not, see . + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "mobileffmpeg.h" + +static jclass android_net_Uri = NULL; /* global ref */ + +static jmethodID android_net_Uri_parse = NULL; +static jmethodID android_content_Context_getContentResolver = NULL; +static jmethodID android_content_ContentResolver_openFileDescriptor = NULL; +static jmethodID android_content_ContentResolver_query = NULL; +static jmethodID android_os_ParcelFileDescriptor_getFd = NULL; +static jmethodID android_database_Cursor_moveToFirst = NULL; +static jmethodID android_database_Cursor_getString = NULL; +static jmethodID android_database_Cursor_getColumnIndex = NULL; +static jmethodID android_database_Cursor_close = NULL; + +static jstring DISPLAY_NAME = NULL; +static jobject android_app_Application_global_instance = NULL; +static jobject android_content_ContentResolver_global_instance = NULL; + +// LOG with source filename and line +#define LOGD_NL(fmt, ...) LOGD("%s:%d " fmt, strrchr(__FILE__, '/')+1, __LINE__, __VA_ARGS__) +#define LOGE_NL(fmt, ...) LOGE("%s:%d " fmt, strrchr(__FILE__, '/')+1, __LINE__, __VA_ARGS__) + +#define JNI_CHECK_EXTRA(j_object, extra_cleanup, format, ...) { \ + if (!(j_object) || (*env)->ExceptionCheck(env)) { \ + (*env)->ExceptionClear(env); \ + LOGE_NL("cannot get (" #j_object ") from " format, __VA_ARGS__); \ + extra_cleanup; \ + (*env)->PopLocalFrame(env, NULL); \ + return JNI_ERR; \ + } \ +} + +#define JNI_CHECK(j_object, format, ...) JNI_CHECK_EXTRA(j_object, ;, format, __VA_ARGS__) + +static int get_static_method_id(JNIEnv *env, const char *class_name, const char *name, const char *signature, jclass *global_class_reference, jmethodID *method_id) { + jclass class_reference = (*env)->FindClass(env, class_name); + JNI_CHECK(class_reference, "%s", class_name); + *global_class_reference = (*env)->NewGlobalRef(env, class_reference); + JNI_CHECK(*global_class_reference, "%p %s", class_reference, class_name); + (*env)->DeleteLocalRef(env, class_reference); + + *method_id = (*env)->GetStaticMethodID(env, *global_class_reference, name, signature); + JNI_CHECK(*method_id, "%p class %s static method %s for %s", *global_class_reference, class_name, name, signature); + return JNI_OK; +} + +static int get_method_id(JNIEnv *env, const char *class_name, const char *name, const char *signature, jmethodID *method_id) { + jclass class_reference = (*env)->FindClass(env, class_name); + JNI_CHECK(class_reference, "%s", class_name); + + *method_id = (*env)->GetMethodID(env, class_reference, name, signature); + JNI_CHECK(*method_id, "%p class %s method %s for %s", class_reference, class_name, name, signature); + (*env)->DeleteLocalRef(env, class_reference); + return JNI_OK; +} + +/** + * Kudos to Harlan Chen, https://stackoverflow.com/a/46871051 + + static jobject getGlobalContext(JNIEnv *env) + { + + jclass activityThread = (*env)->FindClass(env,"android/app/ActivityThread"); + jmethodID currentActivityThread = (*env)->GetStaticMethodID(env,activityThread, "currentActivityThread", "()Landroid/app/ActivityThread;"); + jobject at = (*env)->CallStaticObjectMethod(env,activityThread, currentActivityThread); + + jmethodID getApplication = (*env)->GetMethodID(env,activityThread, "getApplication", "()Landroid/app/Application;"); + jobject context = (*env)->CallObjectMethod(env,at, getApplication); + return context; + } + */ +int get_global_app_context(JNIEnv *env) { + + if (android_app_Application_global_instance != NULL) + return JNI_OK; + + jclass android_app_ActivityThread; + jmethodID android_app_ActivityThread_currentActivityThread; + + if (get_static_method_id(env, "android/app/ActivityThread", "currentActivityThread", "()Landroid/app/ActivityThread;", &android_app_ActivityThread, &android_app_ActivityThread_currentActivityThread) != JNI_OK) + return JNI_ERR; + + jobject activity_thread_instance = (*env)->CallStaticObjectMethod(env, android_app_ActivityThread, android_app_ActivityThread_currentActivityThread); + JNI_CHECK(activity_thread_instance, "%p %s", android_app_ActivityThread, "android/app/ActivityThread"); + + jmethodID android_app_ActivityThread_getApplication = (*env)->GetMethodID(env, android_app_ActivityThread, "getApplication", "()Landroid/app/Application;"); + JNI_CHECK(android_app_ActivityThread_getApplication, "%p class %s method %s for %s", android_app_ActivityThread, "android/app/ActivityThread", "getApplication", "()Landroid/app/Application;"); + + jobject context = (*env)->CallObjectMethod(env, activity_thread_instance, android_app_ActivityThread_getApplication); + JNI_CHECK(context, "%p", android_app_ActivityThread); + + android_app_Application_global_instance = (*env)->NewGlobalRef(env, context); + JNI_CHECK(android_app_Application_global_instance, "NewGlobalRef %p", context); + + return JNI_OK; +} + +/** + * Kudos to Stefan Haustein, https://stackoverflow.com/a/25005243 + + Cursor cursor = getContentResolver().query(uri, null, null, null, null); + try { + if (cursor != null && cursor.moveToFirst()) { + result = cursor.getString(cursor.getColumnIndex(DocumentsContract.Document.COLUMN_DISPLAY_NAME)); + } + } finally { + cursor.close(); + } +**/ + +static int get_filename_from_content(const char *content, char **filename) { + + *filename = NULL; + + if (!android_content_ContentResolver_global_instance) { + LOGE_NL("have not initialized %s instance", "android.content.ContentResolver"); + return JNI_ERR; + } + + JavaVM *java_vm = av_jni_get_java_vm(NULL); + JNIEnv *env; + if ((*java_vm)->GetEnv(java_vm, (void **)&env, JNI_VERSION_1_6) != JNI_OK || !env) { + LOGE_NL("cannot get env for %p", java_vm); + return JNI_ERR; + } + + JNI_CHECK((*env)->PushLocalFrame(env, 10) == 0, "%p", env); + + jstring uriString = (*env)->NewStringUTF(env, content); + JNI_CHECK(uriString, "%s", content); + + jobject uri = (*env)->CallStaticObjectMethod(env, android_net_Uri, android_net_Uri_parse, uriString); + JNI_CHECK(uri, "%p", uriString); + + jobject cursor = (*env)->CallObjectMethod(env, android_content_ContentResolver_global_instance, android_content_ContentResolver_query, uri, NULL, NULL, NULL, NULL); + JNI_CHECK(cursor, "%p (%s, null, null, null, null)", android_content_ContentResolver_global_instance, content); + + jboolean move_res = (*env)->CallBooleanMethod(env, cursor, android_database_Cursor_moveToFirst); + JNI_CHECK_EXTRA(move_res, (*env)->CallVoidMethod(env, cursor, android_database_Cursor_close), "%p moveToFirst()", cursor); + + jint column_idx = (*env)->CallIntMethod(env, cursor, android_database_Cursor_getColumnIndex, DISPLAY_NAME); + JNI_CHECK_EXTRA(column_idx >= 0, (*env)->CallVoidMethod(env, cursor, android_database_Cursor_close), "%p getColumnIndex(DISPLAY_NAME)", cursor); + + jstring j_str = (jstring)(*env)->CallObjectMethod(env, cursor, android_database_Cursor_getString, column_idx); + JNI_CHECK_EXTRA(j_str, (*env)->CallVoidMethod(env, cursor, android_database_Cursor_close), "%p getString (%d)", cursor, column_idx); + (*env)->CallVoidMethod(env, cursor, android_database_Cursor_close); + + const char *c_str = (*env)->GetStringUTFChars(env, j_str, 0); + JNI_CHECK(c_str, "GetStringUTFChars %p", j_str); + + LOGD("get_filename_from_content " "recovered name '%s' from '%s'", c_str, content); + *filename = strdup(c_str); + + (*env)->ReleaseStringUTFChars(env, j_str, c_str); + + (*env)->PopLocalFrame(env, NULL); + return JNI_OK; +} + +__thread char *last_content = NULL; +__thread char *last_filename = NULL; + +int match_ext_from_content(const char *content, const char *extensions) { + if (last_content && strcmp(last_content, content) == 0) { + return av_match_ext(last_filename, extensions); + } + if (last_content) + free(last_content); + last_content = strdup(content); + if (last_filename) + free(last_filename); + if (get_filename_from_content(content, &last_filename) != JNI_OK) + return 0; + return av_match_ext(last_filename, extensions); +} + +int get_fd_from_content(const char *content, int access) { + + int fd = -1; // it's lucky that JNI_ERR == -1 + + JavaVM *java_vm = av_jni_get_java_vm(NULL); + JNIEnv *env; + if ((*java_vm)->GetEnv(java_vm, (void **)&env, JNI_VERSION_1_6) != JNI_OK || !env) { + LOGE_NL("cannot get env for %p", java_vm); + return JNI_ERR; + } + + JNI_CHECK((*env)->PushLocalFrame(env, 10) == 0, "%p", env); + + if (!android_net_Uri) { + if (get_static_method_id(env, "android/net/Uri", "parse", "(Ljava/lang/String;)Landroid/net/Uri;", &android_net_Uri, &android_net_Uri_parse) != JNI_OK) + return JNI_ERR; + + if (get_method_id(env, "android/content/Context", "getContentResolver", "()Landroid/content/ContentResolver;", &android_content_Context_getContentResolver) != JNI_OK) + return JNI_ERR; + + if (get_method_id(env, "android/content/ContentResolver", "openFileDescriptor", "(Landroid/net/Uri;Ljava/lang/String;)Landroid/os/ParcelFileDescriptor;", &android_content_ContentResolver_openFileDescriptor) != JNI_OK) + return JNI_ERR; + + if (get_method_id(env, "android/content/ContentResolver", "query", "(Landroid/net/Uri;[Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;Ljava/lang/String;)Landroid/database/Cursor;", &android_content_ContentResolver_query) != JNI_OK) + return JNI_ERR; + + if (get_method_id(env, "android/database/Cursor", "moveToFirst", "()Z", &android_database_Cursor_moveToFirst) != JNI_OK) + return JNI_ERR; + + if (get_method_id(env, "android/database/Cursor", "getString", "(I)Ljava/lang/String;", &android_database_Cursor_getString) != JNI_OK) + return JNI_ERR; + + if (get_method_id(env, "android/database/Cursor", "getColumnIndex", "(Ljava/lang/String;)I", &android_database_Cursor_getColumnIndex) != JNI_OK) + return JNI_ERR; + + if (get_method_id(env, "android/database/Cursor", "close", "()V", &android_database_Cursor_close) != JNI_OK) + return JNI_ERR; + + if (get_method_id(env, "android/os/ParcelFileDescriptor", "getFd", "()I", &android_os_ParcelFileDescriptor_getFd) != JNI_OK) + return JNI_ERR; + + DISPLAY_NAME = (*env)->NewStringUTF(env, "_display_name"); // DocumentsContract.Document.COLUMN_DISPLAY_NAME + JNI_CHECK(DISPLAY_NAME, "DocumentsContract.Document.COLUMN_DISPLAY_NAME=%s", "_display_name"); + DISPLAY_NAME = (*env)->NewGlobalRef(env, DISPLAY_NAME); + JNI_CHECK(DISPLAY_NAME, "DocumentsContract.Document.COLUMN_DISPLAY_NAME=%s", "_display_name"); + + if (get_global_app_context(env) != JNI_OK) + return JNI_ERR; + + jobject contentResolver = (*env)->CallObjectMethod(env, android_app_Application_global_instance, android_content_Context_getContentResolver); + JNI_CHECK(contentResolver, "getContentResolver from %p", android_app_Application_global_instance); + + android_content_ContentResolver_global_instance = (*env)->NewGlobalRef(env, contentResolver); + JNI_CHECK(android_content_ContentResolver_global_instance, "NewGlobalRef %p", contentResolver); + } + + const char *fmode = "r"; + if (access & (O_WRONLY | O_RDWR)) { + fmode = "w"; + } + + LOGD("get_fd_from_content" " try \"%s\" for \"%s\"", content, fmode); + + jstring uriString = (*env)->NewStringUTF(env, content); + JNI_CHECK(uriString, "%s", content); + + jstring fmodeString = (*env)->NewStringUTF(env, fmode); + JNI_CHECK(fmodeString, "%s", fmode); + + jobject uri = (*env)->CallStaticObjectMethod(env, android_net_Uri, android_net_Uri_parse, uriString); + JNI_CHECK(uri, "%p", uriString); + + jobject parcelFileDescriptor = (*env)->CallObjectMethod(env, android_content_ContentResolver_global_instance, android_content_ContentResolver_openFileDescriptor, uri, fmodeString); + JNI_CHECK(parcelFileDescriptor, "%p (%s, %s)", android_content_ContentResolver_global_instance, content, fmode); + + fd = (*env)->CallIntMethod(env, parcelFileDescriptor, android_os_ParcelFileDescriptor_getFd); + JNI_CHECK(fd >= 0, "%p", parcelFileDescriptor); + + LOGI("get_fd_from_content" " opened \"%s\" for \"%s\" as %d", fmode, content, fd); + + (*env)->PopLocalFrame(env, NULL); + return dup(fd); +} diff --git a/android/app/src/main/java/com/arthenica/mobileffmpeg/Config.java b/android/app/src/main/java/com/arthenica/mobileffmpeg/Config.java index ca3294686..6d90800ab 100644 --- a/android/app/src/main/java/com/arthenica/mobileffmpeg/Config.java +++ b/android/app/src/main/java/com/arthenica/mobileffmpeg/Config.java @@ -687,4 +687,11 @@ static void setLastReturnCode(int newLastReturnCode) { */ native static String getNativeLastCommandOutput(); + /** + *

Run transcode_aac from doc/examples. + * + * @return zero if transcoding was successfull + */ + public native static int runTranscode(final String inFilename, final String outFilename); + } diff --git a/android/jni/Android.mk b/android/jni/Android.mk index 6c925c128..5553293cc 100644 --- a/android/jni/Android.mk +++ b/android/jni/Android.mk @@ -43,6 +43,7 @@ else ifeq ($(TARGET_PLATFORM),android-17) else LOCAL_SRC_FILES := mobileffmpeg.c mobileffprobe.c mobileffmpeg_exception.c fftools_cmdutils.c fftools_ffmpeg.c fftools_ffprobe.c fftools_ffmpeg_opt.c fftools_ffmpeg_hw.c fftools_ffmpeg_filter.c endif +LOCAL_SRC_FILES += doc_examples_transcode_aac.c structured_storage.c LOCAL_CFLAGS := -Wall -Werror -Wno-unused-parameter -Wno-switch -Wno-sign-compare LOCAL_LDLIBS := -llog -lz -landroid LOCAL_SHARED_LIBRARIES := libavfilter libavformat libavcodec libavutil libswresample libavdevice libswscale diff --git a/android/test-app/src/main/java/com/arthenica/mobileffmpeg/test/PagerAdapter.java b/android/test-app/src/main/java/com/arthenica/mobileffmpeg/test/PagerAdapter.java index b63180732..421cb1f61 100644 --- a/android/test-app/src/main/java/com/arthenica/mobileffmpeg/test/PagerAdapter.java +++ b/android/test-app/src/main/java/com/arthenica/mobileffmpeg/test/PagerAdapter.java @@ -26,7 +26,7 @@ import androidx.fragment.app.FragmentPagerAdapter; public class PagerAdapter extends FragmentPagerAdapter { - private static final int NUMBER_OF_TABS = 7; + private static final int NUMBER_OF_TABS = 8; private final Context context; @@ -59,6 +59,9 @@ public Fragment getItem(final int position) { case 6: { return PipeTabFragment.newInstance(); } + case 7: { + return ScopedStorageTabFragment.newInstance(); + } default: { return null; } @@ -94,6 +97,9 @@ public CharSequence getPageTitle(final int position) { case 6: { return context.getString(R.string.pipe_tab); } + case 7: { + return context.getString(R.string.scoped_storage_tab); + } default: { return null; } diff --git a/android/test-app/src/main/java/com/arthenica/mobileffmpeg/test/ScopedStorageTabFragment.java b/android/test-app/src/main/java/com/arthenica/mobileffmpeg/test/ScopedStorageTabFragment.java new file mode 100644 index 000000000..4deb5ccba --- /dev/null +++ b/android/test-app/src/main/java/com/arthenica/mobileffmpeg/test/ScopedStorageTabFragment.java @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2018 Taner Sener + * + * This file is part of MobileFFmpeg. + * + * MobileFFmpeg is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * MobileFFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with MobileFFmpeg. If not, see . + */ + +package com.arthenica.mobileffmpeg.test; + +import android.content.Intent; +import android.net.Uri; +import android.os.Bundle; +import android.text.method.ScrollingMovementMethod; +import android.util.AndroidRuntimeException; +import android.util.Log; +import android.view.View; +import android.widget.Button; +import android.widget.EditText; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.fragment.app.Fragment; + +import com.arthenica.mobileffmpeg.Config; +import com.arthenica.mobileffmpeg.FFprobe; +import com.arthenica.mobileffmpeg.LogCallback; +import com.arthenica.mobileffmpeg.LogMessage; + +import java.util.concurrent.Callable; + +import static android.app.Activity.RESULT_OK; + +public class ScopedStorageTabFragment extends Fragment { + + private EditText commandText; + private TextView outputText; + private Uri videoUri; + private Uri outUri; + private static final int REQUEST_SAF_FFPROBE = 11; + private static final int REQUEST_SAF_TRANSCODE_IN = 12; + private static final int REQUEST_SAF_TRANSCODE_OUT = 13; + + public ScopedStorageTabFragment() { + super(R.layout.fragment_command_tab); + } + + @Override + public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { + super.onViewCreated(view, savedInstanceState); + + commandText = view.findViewById(R.id.commandText); + commandText.setVisibility(View.GONE); + + Button runFFmpegButton = view.findViewById(R.id.runFFmpegButton); + runFFmpegButton.setText(R.string.command_run_transcode_button_text); + runFFmpegButton.setOnClickListener(new View.OnClickListener() { + + @Override + public void onClick(View v) { + Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT) + .setType("video/*") + .addCategory(Intent.CATEGORY_OPENABLE); + startActivityForResult(intent, REQUEST_SAF_TRANSCODE_IN); + } + }); + + Button runFFprobeButton = view.findViewById(R.id.runFFprobeButton); + runFFprobeButton.setOnClickListener(new View.OnClickListener() { + + @Override + public void onClick(View v) { + Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT) + .setType("video/*") + .addCategory(Intent.CATEGORY_OPENABLE); + startActivityForResult(intent, REQUEST_SAF_FFPROBE); + } + }); + + outputText = view.findViewById(R.id.outputText); + outputText.setMovementMethod(new ScrollingMovementMethod()); + + Log.d(MainActivity.TAG, "Last command output was: " + Config.getLastCommandOutput()); + } + + @Override + public void onResume() { + super.onResume(); + setActive(); + } + + public static ScopedStorageTabFragment newInstance() { + return new ScopedStorageTabFragment(); + } + + public void enableLogCallback() { + Config.enableLogCallback(new LogCallback() { + + @Override + public void apply(final LogMessage message) { + MainActivity.addUIAction(new Callable() { + + @Override + public Object call() { + appendLog(message.getText()); + return null; + } + }); + + throw new AndroidRuntimeException("I am test exception thrown by test application"); + } + }); + } + + private void runFFprobe() { + clearLog(); + + final String ffprobeCommand = "-hide_banner " + videoUri; + + Log.d(MainActivity.TAG, "Testing FFprobe COMMAND synchronously."); + + Log.d(MainActivity.TAG, String.format("FFprobe process started with arguments\n\'%s\'", ffprobeCommand)); + + int result = FFprobe.execute(ffprobeCommand); + + Log.d(MainActivity.TAG, String.format("FFprobe process exited with rc %d", result)); + + if (result != 0) { + Popup.show(requireContext(), "Command failed. Please check output for the details."); + } + videoUri = null; + } + + private void runTranscode() { + clearLog(); + + Log.d(MainActivity.TAG, "Testing transcode(" + videoUri + ", " + outUri + ")"); + + int result = Config.runTranscode(videoUri.toString(), outUri.toString()); + + Log.d(MainActivity.TAG, String.format("Transcode exited with rc %d", result)); + + if (result != 0) { + Popup.show(requireContext(), "Command failed. Please check output for the details."); + } + + videoUri = outUri; + outUri = null; + if (result == 0) { + runFFprobe(); + } + } + + private void setActive() { + Log.i(MainActivity.TAG, "ScopedStorage Tab Activated"); + enableLogCallback(); + } + + public void appendLog(final String logMessage) { + outputText.append(logMessage); + } + + public void clearLog() { + outputText.setText(""); + } + + @Override + public void onActivityResult(int requestCode, int resultCode, Intent data) { + if (requestCode == REQUEST_SAF_FFPROBE && resultCode == RESULT_OK && data != null) { + videoUri = data.getData(); + runFFprobe(); + } else if (requestCode == REQUEST_SAF_TRANSCODE_IN && resultCode == RESULT_OK && data != null) { + videoUri = data.getData(); + Intent intent = new Intent(Intent.ACTION_CREATE_DOCUMENT) + .setType("audio/*") + .putExtra(Intent.EXTRA_TITLE, "transcode.aac") + .addCategory(Intent.CATEGORY_OPENABLE); + startActivityForResult(intent, REQUEST_SAF_TRANSCODE_OUT); + } else if (requestCode == REQUEST_SAF_TRANSCODE_OUT && resultCode == RESULT_OK && data != null) { + outUri = data.getData(); + runTranscode(); + } else { + super.onActivityResult(requestCode, resultCode, data); + } + } +} diff --git a/android/test-app/src/main/res/values/strings.xml b/android/test-app/src/main/res/values/strings.xml index 0b9e9e633..0740328c7 100644 --- a/android/test-app/src/main/res/values/strings.xml +++ b/android/test-app/src/main/res/values/strings.xml @@ -6,9 +6,11 @@ AUDIO SUBTITLE VID.STAB + SAF PIPE Enter command RUN FFMPEG + TRANSCODE RUN FFPROBE ENCODE CREATE diff --git a/src/ffmpeg/libavformat/file.c b/src/ffmpeg/libavformat/file.c index 8303436be..714dd74b7 100644 --- a/src/ffmpeg/libavformat/file.c +++ b/src/ffmpeg/libavformat/file.c @@ -412,3 +412,60 @@ const URLProtocol ff_pipe_protocol = { }; #endif /* CONFIG_PIPE_PROTOCOL */ + +#ifdef ANDROID +extern int get_fd_from_content(const char *content, int access); +static int saf_open(URLContext *h, const char *filename, int flags) +{ + FileContext *c = h->priv_data; + int access; + int fd; + struct stat st; + + if (flags & AVIO_FLAG_WRITE && flags & AVIO_FLAG_READ) { + access = O_CREAT | O_RDWR; + if (c->trunc) + access |= O_TRUNC; + } else if (flags & AVIO_FLAG_WRITE) { + access = O_CREAT | O_WRONLY; + if (c->trunc) + access |= O_TRUNC; + } else { + access = O_RDONLY; + } +#ifdef O_BINARY + access |= O_BINARY; +#endif + + fd = get_fd_from_content(filename, access); + if (fd < 0) + return AVERROR(errno); + c->fd = fd; + + h->is_streamed = !fstat(fd, &st) && S_ISFIFO(st.st_mode); + + /* Buffer writes more than the default 32k to improve throughput especially + * with networked file systems */ + if (!h->is_streamed && flags & AVIO_FLAG_WRITE) + h->min_packet_size = h->max_packet_size = 262144; + + if (c->seekable >= 0) + h->is_streamed = !c->seekable; + + return 0; +} + +const URLProtocol ff_saf_protocol = { + .name = "content", + .url_open = saf_open, + .url_read = file_read, + .url_write = file_write, + .url_seek = file_seek, + .url_close = file_close, + .url_get_file_handle = file_get_handle, + .url_check = file_check, + .priv_data_size = sizeof(FileContext), + .priv_data_class = &file_class +}; + +#endif // ANDROID \ No newline at end of file diff --git a/src/ffmpeg/libavformat/format.c b/src/ffmpeg/libavformat/format.c index c47490c8e..4c4c3dd72 100644 --- a/src/ffmpeg/libavformat/format.c +++ b/src/ffmpeg/libavformat/format.c @@ -35,6 +35,8 @@ * Format register and lookup */ +extern int match_ext_from_content(const char *filename, const char *extensions); + int av_match_ext(const char *filename, const char *extensions) { const char *ext; @@ -42,6 +44,11 @@ int av_match_ext(const char *filename, const char *extensions) if (!filename) return 0; +#ifdef ANDROID + if (av_strstart(filename, "content:", NULL)) + return match_ext_from_content(filename, extensions); +#endif + ext = strrchr(filename, '.'); if (ext) return av_match_name(ext + 1, extensions); diff --git a/src/ffmpeg/libavformat/protocols.c b/src/ffmpeg/libavformat/protocols.c index f1b8eab0f..efa346fe6 100644 --- a/src/ffmpeg/libavformat/protocols.c +++ b/src/ffmpeg/libavformat/protocols.c @@ -32,6 +32,7 @@ extern const URLProtocol ff_data_protocol; extern const URLProtocol ff_ffrtmpcrypt_protocol; extern const URLProtocol ff_ffrtmphttp_protocol; extern const URLProtocol ff_file_protocol; +extern const URLProtocol ff_saf_protocol; extern const URLProtocol ff_ftp_protocol; extern const URLProtocol ff_gopher_protocol; extern const URLProtocol ff_hls_protocol; diff --git a/tools/ndk/Android.lts.mk b/tools/ndk/Android.lts.mk index 295518a8b..a75fa71ba 100644 --- a/tools/ndk/Android.lts.mk +++ b/tools/ndk/Android.lts.mk @@ -43,6 +43,7 @@ else ifeq ($(TARGET_PLATFORM),android-17) else LOCAL_SRC_FILES := mobileffmpeg.c mobileffprobe.c mobileffmpeg_exception.c fftools_cmdutils.c fftools_ffmpeg.c fftools_ffprobe.c fftools_ffmpeg_opt.c fftools_ffmpeg_hw.c fftools_ffmpeg_filter.c endif +LOCAL_SRC_FILES += doc_examples_transcode_aac.c structured_storage.c LOCAL_CFLAGS := -Wall -Werror -Wno-unused-parameter -Wno-switch -Wno-sign-compare LOCAL_LDLIBS := -llog -lz -landroid LOCAL_SHARED_LIBRARIES := libavfilter libavformat libavcodec libavutil libswresample libavdevice libswscale diff --git a/tools/ndk/Android.mk b/tools/ndk/Android.mk index 6c925c128..5553293cc 100644 --- a/tools/ndk/Android.mk +++ b/tools/ndk/Android.mk @@ -43,6 +43,7 @@ else ifeq ($(TARGET_PLATFORM),android-17) else LOCAL_SRC_FILES := mobileffmpeg.c mobileffprobe.c mobileffmpeg_exception.c fftools_cmdutils.c fftools_ffmpeg.c fftools_ffprobe.c fftools_ffmpeg_opt.c fftools_ffmpeg_hw.c fftools_ffmpeg_filter.c endif +LOCAL_SRC_FILES += doc_examples_transcode_aac.c structured_storage.c LOCAL_CFLAGS := -Wall -Werror -Wno-unused-parameter -Wno-switch -Wno-sign-compare LOCAL_LDLIBS := -llog -lz -landroid LOCAL_SHARED_LIBRARIES := libavfilter libavformat libavcodec libavutil libswresample libavdevice libswscale