In the previousA Preliminary study on Android Video and Audio Development

The demo addressGithub.com/ColorfulHor…

Simple process

In the last blog, we have completed a simple streamer. In this article, we will implement a simple player to pull streams. In fact, it is to push the flow of the previous flow through the reverse:

  1. Connect to the streaming media server to fetch streams
  2. The code stream is decoded to get the original data
  3. Convert the yuV data to RGB data for rendering and display

JNI binding

The Java part

class LyjPlayer {

    companion object {
        // Some state constants for callbacks
        const val STATE_CONNECTED = 0
        const val STATE_START = STATE_CONNECTED + 1
        const val STATE_STOP = STATE_START + 1

        const val ERROR_CONNECT_TIMEOUT = 0
        const val ERROR_STREAM = ERROR_CONNECT_TIMEOUT + 1
        const val NONE_VIDEO_STREAM = ERROR_STREAM + 1
        const val UNKNOW = NONE_VIDEO_STREAM + 1

        init {
            LibLoader.loadLib("lyjplayer")
        }
    }

    init {
        initPlayer()
    }

    external fun initPlayer(a)
    // Set the surface to display the screen
    external fun setSurface(surface: Surface)

    external fun setVideoCallBack(callback: VideoCallBack)

    external fun startPlay(url: String): Int

    external fun stopPlay(a): Int

    external fun release(a)
}
Copy the code

Native part

Here, a map is used to store multiple player instances, and the string obtained by the toString method of the Java layer object is used as the key to identify the objects of the native layer, so as to achieve one-to-one mapping.

In addition, in the native layer, ANativeWindow (which can be considered as the surface of the native layer) is needed to display the screen, so in the player_set_surface method, pass the Surface in the Java layer to create the corresponding instance.

register_jni.cpp

template<class T>
int arrayLen(T &array) {
    return (sizeof(array) / sizeof(array[0]));
}

#ifdef __cplusplus
extern "C" {
#endif

const char *cls_player = "com/lyj/learnffmpeg/LyjPlayer";

// Set of instances
map<string, LyjPlayer *> player_map;

// Generate a key to identify the native layer object based on the Java Object, where the key is the memory address string of the Java bean
const string getKey(JNIEnv *env, jobject obj) {
    jclass cls = env->GetObjectClass(obj);
    jmethodID mid = env->GetMethodID(cls, "toString"."()Ljava/lang/String;");
    jstring jstr = static_cast<jstring>(env->CallObjectMethod(obj, mid, nullptr));
    return string(env->GetStringUTFChars(jstr, nullptr));
}

void player_init_play(JNIEnv *env, jobject obj) {
    string key = getKey(env, obj);
    LyjPlayer *player = new LyjPlayer();
    // Save the JVM instance for callbacks
    env->GetJavaVM(&player->vm);
    player_map[key] = player;
    player->init();
}

/ / set the surface
void player_set_surface(JNIEnv *env, jobject obj, jobject surface) {
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        // Create ANativeWindow based on the incoming surface to display the screen
        ANativeWindow *window = ANativeWindow_fromSurface(env, surface);
        if(! window) { LOGE("window null");
        } else{ player->window = window; }}}// Set the callback
void player_set_callback(JNIEnv *env, jobject obj, jobject callback) {
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        if(player->callback) { env->DeleteGlobalRef(player->callback); } player->callback = env->NewGlobalRef(callback); }}// Start playing
int player_start_play(JNIEnv *env, jobject obj, jstring url) {
    const char *path = nullptr;
    path = env->GetStringUTFChars(url, nullptr);
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        player->stopPlay();
        player->startPlay(path);
    } else {
        LOGE("cant not find player");
    }
    env->ReleaseStringUTFChars(url, path);
    return 0;
}

// Stop playing to release resources
int player_stop_play(JNIEnv *env, jobject obj) {
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        player->stopPlay();
    }
    return 0;
}

void player_release(JNIEnv *env, jobject obj) {
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        env->DeleteGlobalRef(player->callback);
        player->release();
        player_map.erase(key);
        deleteplayer; }}// Method mapping
JNINativeMethod player_methods[] = {
        {"initPlayer"."()V",                                    (void *) player_init_play},
        {"setSurface"."(Landroid/view/Surface;) V",              (void *) player_set_surface},
        {"setVideoCallBack"."(Lcom/lyj/learnffmpeg/VideoCallBack;) V", (void *) player_set_callback},
        {"startPlay"."(Ljava/lang/String;) I",                  (void *) player_start_play},
        {"stopPlay"."()I",                                    (void *) player_stop_play},
        {"release"."()V",                                    (void *) player_release}
};
/ / register jni
int jniRegisterNativeMethods(JNIEnv *env, const char *className, const JNINativeMethod *methods,
                             int count) {
    int res = - 1;
    jclass cls = env->FindClass(className);
    if(cls ! =nullptr) {
        int ret = env->RegisterNatives(cls, methods, count);
        if (ret > 0) {
            res = 0;
        }
    }
    env->DeleteLocalRef(cls);
    return res;
}

JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void *reserved) {
    JNIEnv *env = nullptr;
    jint result = - 1;
    if (vm->GetEnv((void**) (&env), JNI_VERSION_1_6) ! = JNI_OK) {return result;
    }
    jniRegisterNativeMethods(env, cls_player, player_methods, arrayLen(player_methods));
    return JNI_VERSION_1_6;
}

JNIEXPORT void JNI_OnUnload(JavaVM *jvm, void *reserved) {}

#ifdef __cplusplus
}
#endif
Copy the code

Main player logic

Simple ideas

  1. Start a thread continuously fetching streams, decoding the code stream, and putting the decoded data into the queue
  2. Calculate the time of each frame according to the bit stream information, and start a scheduled task to continuously fetch RGB data from the queue for drawing

Code implementation

lyjplayer.h

#ifdef __cplusplus
extern "C" {
#endif

#include <libavformat/avformat.h>
#include <libswscale/swscale.h>

#ifdef __cplusplus
}
#endif

using namespace std;

struct FrameData {
    AVFrame *frame;
    // The actual data of the frame
    uint8_t *buffer;
};

class LyjPlayer {
private:
    const char *url;
    int width = 0;
    int height = 0;
    atomic_bool playing = {false};
    // AVFormatContext used to decamp FLV, AVI, RMVB, MP4
    AVFormatContext *formatContext = nullptr;
    AVCodecContext *codecContext = nullptr;
    int buffer_size;
    AVFrame *frame = nullptr, *temp = nullptr;
    AVPacket *packet = nullptr;
    SwsContext *sws_context = nullptr;
    uint8_t *buffer = nullptr;
    ANativeWindow_Buffer windowBuffer;
    thread task;
    // Record the frame number
    int index;
    // Buffer received by the network
    LinkedBlockingQueue<FrameData> queue;
    / / timer
    Timer timer;

    int decodeFrame(a);

    int render(a);

    int destroyPlay(a);

    void callbackState(JNIEnv *env, PlayState state);

    void callbackError(JNIEnv *env, PlayError error);
public:
    JavaVM *vm = nullptr;
    jobject callback = nullptr;
    ANativeWindow *window = nullptr;

    LyjPlayer();

    int init(a);

    void startPlay(const char *url);

    int stopPlay(a);

    void release(a);

    virtual ~LyjPlayer();

};
#endif
Copy the code

lyjplayer.cpp

Initialize the

int LyjPlayer::init() {
    // This function is called because the hard solution of mediaCodec uses jni
    av_jni_set_java_vm(vm, 0);
    return 0;
}
Copy the code

Start playing

Timer is a simple Timer task that draws a frame every 40ms, keeping the frame rate stable. The time of 40ms is calculated based on the frame rate and the time base of the video stream. In this case, the time base of the stream is 1/1000, which is equivalent to dividing the second into 1000 parts.

To use mediacodec hard solution, you only need to set up the corresponding decoder

void LyjPlayer::startPlay(const char *url) {
    this->url = url;
    playing = true;
    if (task.joinable()) {
        task.join();
    }
    // Fetch the stream decoder thread
    task = thread([this] {
        JNIEnv *env = nullptr;
        int ret = vm->AttachCurrentThread(&env, nullptr);
        avformat_network_init();
        formatContext = avformat_alloc_context();
        // Open the file
        LOGE("Connecting");
        ret = avformat_open_input(&formatContext, this->url, nullptr.nullptr);
        if (ret < 0) {
            LOGE(Failed to open file code:%d MSG :%s", ret, av_err2str(ret));
            callbackError(env, PlayError::CONNECT_TIMEOUT);
            vm->DetachCurrentThread();
            destroyPlay();
            return ret;
        }
        callbackState(env, PlayState::CONNECTED);
        LOGE("Successful connection to streaming media.");
        ret = avformat_find_stream_info(formatContext, nullptr);
        if (ret < 0) {
            LOGE("Failed to find flow %s", av_err2str(ret));
            callbackError(env, PlayError::ERROR_STREAM);
            vm->DetachCurrentThread();
            destroyPlay();
            return ret;
        }
        int index = - 1;
        for (int i = 0; i < formatContext->nb_streams; i++) {
            // Find the video stream. If there is audio, there is more than one stream, so you need to find it
            if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
                index = i;
                break; }}if (index == - 1) {
            LOGE("No video streaming.");
            callbackError(env, PlayError::NONE_VIDEO_STREAM);
            vm->DetachCurrentThread();
            destroyPlay();
            return - 1;
        }
        AVStream *videoStream = formatContext->streams[index];
        AVCodecParameters *params = videoStream->codecpar;
        LOGE("AVCodecParameters id:%d, width:%d, height%d", params->codec_id, params->width,
             params->height);
        // Find the decoder
        AVCodecID codecId = videoStream->codecpar->codec_id;
        AVCodec *codec = nullptr;
        // Use h264 hard solution
        if (codecId == AV_CODEC_ID_H264) {
            codec = avcodec_find_decoder_by_name("h264_mediacodec");
            if (codec == nullptr) {
                LOGE("can not find mediacodec");
                codec = avcodec_find_decoder(codecId);
            } else {
                LOGE("Use a hard solution."); }}if (codec == nullptr) {
            LOGE("No decoder found.");
            callbackError(env, PlayError::UNKNOW);
            vm->DetachCurrentThread();
            destroyPlay();
            return - 1;
        }
        codecContext = avcodec_alloc_context3(codec);
        // Copy the bit stream configuration to the decoder
        avcodec_parameters_to_context(codecContext, videoStream->codecpar);
        ret = avcodec_open2(codecContext, codec, nullptr);
        if (ret < 0) {
            LOGE("Failed to initialize decoder :%s", av_err2str(ret));
            callbackError(env, PlayError::UNKNOW);
            vm->DetachCurrentThread();
            destroyPlay();
            return - 1;
        }
        this->width = codecContext->width;
        this->height = codecContext->height;
        buffer_size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 1);
        temp = av_frame_alloc();
        packet = av_packet_alloc();
        // Create a format conversion mode to convert YUV data to RGBA
        sws_context = sws_getContext(width, height, codecContext->pix_fmt, width,
                                     height, AV_PIX_FMT_RGBA, SWS_BICUBIC,
                                     nullptr.nullptr.nullptr);
        // Set the window parameters
        if (ANativeWindow_setBuffersGeometry(window, width, height, WINDOW_FORMAT_RGBA_8888) < 0) {
            callbackError(env, PlayError::UNKNOW);
            vm->DetachCurrentThread();
            destroyPlay();
            LOGE("Failed to initialize play window");
            return - 1;
        }
        // Get the frame rate
        double fps = av_q2d(videoStream->avg_frame_rate);
        AVRational timebase = videoStream->time_base;
        // Calculate the duration of each frame in milliseconds
        int duration = static_cast<int>(timebase.den / timebase.num / fps / (timebase.den / 1000));
        LOGE("videoStream FPS %lf, duration %d", fps, duration);
        // Draw regularly, keeping the frame rate
        timer.setInterval([this] {
            // Draw a frame
            render();
        }, duration);
        while (playing) {
            / / read flow
            ret = av_read_frame(formatContext, packet);
            if (ret < 0) {
                continue;
            }
            if (packet->stream_index == index) {
                // Decode a frame
                decodeFrame();
            }
            av_packet_unref(packet);
        }
        vm->DetachCurrentThread();
        return 0;
    });
}
Copy the code

DecodeFrame decodes data

int LyjPlayer::decodeFrame() {
    int ret = avcodec_send_packet(codecContext, packet);
    if (ret == AVERROR(EAGAIN)) {
        ret = 0;
    } else if (ret < 0) {
        LOGE("avcodec_send_packet err code: %d, msg:%s", ret, av_err2str(ret));
        av_packet_free(&packet);
        vm->DetachCurrentThread();
        destroyPlay();
        return - 1;
    }
    LOGE("send a packet");
    while (ret >= 0) {
        ret = avcodec_receive_frame(codecContext, temp);
        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            // Packet has finished reading
            return 0;
        } else if (ret < 0) {
            LOGE("avcodec_receive_frame error %s", av_err2str(ret));
            av_packet_free(&packet);
            vm->DetachCurrentThread();
            destroyPlay();
            return - 1;
        }
        AVFrame *frame = av_frame_alloc();
        uint8_t *buffer = static_cast<uint8_t *>(av_malloc(buffer_size));
        av_image_fill_arrays(frame->data, frame->linesize, buffer, AV_PIX_FMT_RGBA, width, height,
                             1);
        // Convert frame data to RGBA format
        sws_scale(sws_context, temp->data, temp->linesize, 0, codecContext->height,
                  frame->data, frame->linesize);
        FrameData frameData = {frame, buffer};
        queue.push(frameData);
    }
    return ret;
}
Copy the code

Display the screen of each frame

int LyjPlayer::render() {
    int ret = 0;
    JNIEnv *env = nullptr;
    FrameData frameData = queue.pop();
    AVFrame *frame = frameData.frame;
    uint8_t *buffer = frameData.buffer;
    // Start drawing the first frame callback
    if (index == 0) {
        ret = vm->AttachCurrentThread(&env, nullptr);
        callbackState(env, PlayState::START);
    }
    index++;
    ret = ANativeWindow_lock(window, &windowBuffer, nullptr);
    if (ret < 0) {
        LOGE("cannot lock window");
    } else {
        uint8_t *bufferBits = (uint8_t *) windowBuffer.bits;
        // Copy the rGBA data line by line to the byte array in ANativeWindow
        for (int h = 0; h < height; h++) {
            // Rgba 4 channel, each pixel needs 4byte, so need stride*4
            memcpy(bufferBits + h * windowBuffer.stride * 4,
                   buffer + h * frame->linesize[0].static_cast<size_t>(frame->linesize[0]));
        }
        ANativeWindow_unlockAndPost(window);
    }
    av_free(buffer);
    av_frame_free(&frame);
    if (env) {
        vm->DetachCurrentThread();
    }
    return ret;
}
Copy the code

The whole process is basically like this, the above code is the main code to pull the stream play, the comments are written more clearly, here will not repeat, other code can be seen in the demo; You can see that the API used for push and pull flow is similar, mainly the order change.