您的位置:首页 > 运维架构 > 网站架构

linux下webcam进行录像, gstreamer架构

2013-01-14 09:38 204 查看
author: CarlsonLee(carlsonlee.freec@hotmail.com), 本代码是freecamera的一部分,freecamera源代码存在:http://gitorious.org/freecamera

#include <glib.h>

#include <string.h>

#include <stdio.h>

#include <unistd.h>

#include <stdlib.h>

#include <pthread.h>

#include <gst/gst.h>

#include <gst/app/gstappsink.h>

#include <gst/app/gstappsrc.h>

#include <gst/app/gstappbuffer.h>

#include "cam_midware.h"

#include "cam_global.h"

#include "cam_display.h"

#include "cam_files.h"

#include "color_space.h"

#include "review_data.h"

#include "cam_ui.h"

#include "cam_utils.h"

#include "cam_err.h"

#define SKIP_FRAMES 10

static struct tag_CAM_MW_DATA

{

    gint               camera_status;

    GMainLoop*      video_cap_loop;

    char*            filename;

    gint             skip_frames;

    GMutex*            mutex;

}cam_video_data;

static gboolean video_bus_call (GstBus     *bus,

                       GstMessage *msg,

                       gpointer    data)

{

    GMainLoop *loop = (GMainLoop *) data;

    bus = bus;

    switch (GST_MESSAGE_TYPE (msg))

    {

    case GST_MESSAGE_EOS:

        g_main_loop_quit (loop);

        break;

    case GST_MESSAGE_ERROR:

        {

            gchar *debug;

            GError *error;

            gst_message_parse_error (msg, &error, &debug);

            g_print ("Error**##: %s, %s\n", error->message, debug);

            g_free (debug);

            g_error_free (error);

            g_main_loop_quit (loop);

        }

        break;

    default:

        break;

    }

    return TRUE;

}

static gboolean link_video_cap_src(GstElement *src, GstElement *sink)

{

    gboolean link_ok = FALSE;

    GstCaps *caps;

      guint width = cam_global_data.cam_res.video_res[cam_global_data.cam_res.video_res_cur].width;

    guint height = cam_global_data.cam_res.video_res[cam_global_data.cam_res.video_res_cur].height;

    caps = gst_caps_new_simple ("video/x-raw-yuv",

            "width", G_TYPE_INT, width,

            "height", G_TYPE_INT, height, NULL);

    link_ok = gst_element_link_filtered (src, sink, caps);

    gst_caps_unref (caps);

    return link_ok;

}

static gboolean link_video_cap_pp(GstElement *src, GstElement *sink)

{

    gboolean link_ok = FALSE;

    GstCaps *caps;

 

      guint width = cam_global_data.cam_res.video_res[cam_global_data.cam_res.video_res_cur].width;

      guint height = cam_global_data.cam_res.video_res[cam_global_data.cam_res.video_res_cur].height;

    caps = gst_caps_new_simple ("video/x-raw-yuv",

            "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),

            "width", G_TYPE_INT, width,

            "height", G_TYPE_INT, height, NULL);

    link_ok = gst_element_link_filtered (src, sink, caps);

    gst_caps_unref (caps);

    return link_ok;

}

static gboolean link_video_cap_enc(GstElement *src, GstElement *sink)

{

    gboolean link_ok = FALSE;

    GstCaps *caps;

 

      guint width = cam_global_data.cam_res.video_res[cam_global_data.cam_res.video_res_cur].width;

      guint height = cam_global_data.cam_res.video_res[cam_global_data.cam_res.video_res_cur].height;

    caps = gst_caps_new_simple ("video/x-raw-yuv",

            "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),

            "framerate", GST_TYPE_FRACTION, 30, 1,

            "width", G_TYPE_INT, width,

            "height", G_TYPE_INT, height, NULL);

    link_ok = gst_element_link_filtered (src, sink, caps);

    gst_caps_unref (caps);

    return link_ok;

}

gint getTickCount()

{

    struct timeval tv;

    gettimeofday(&tv, NULL);

    return (tv.tv_sec * 1000 + tv.tv_usec / 1000);

}

static gint frames = 0;

static long last_time = 0;

static void raw_captured(void* data, guint size, guint64 timestamp)

{

      guint width = cam_global_data.cam_res.video_res[cam_global_data.cam_res.video_res_cur].width;

      guint height = cam_global_data.cam_res.video_res[cam_global_data.cam_res.video_res_cur].height;

      

      if(cam_global_data.photo_timestamp)

          camui_mask_timestamp(data, width, height);

          

      if(cam_video_data.skip_frames--<=0)

      {

          void* review_data = malloc(cam_global_data.preview_data_width*cam_global_data.preview_data_height*3/2);

          timestamp = timestamp;

          if(review_data == NULL)

              return;

          if(width != cam_global_data.preview_data_width || height != cam_global_data.preview_data_height)

          {

              resample_yv12(review_data,

                          cam_global_data.preview_data_width,

                          cam_global_data.preview_data_height,

                          data, width, height, SCALE_TYPE_BILINEAR);

          }

          else

          {

              memcpy(review_data, data, cam_global_data.preview_data_width*cam_global_data.preview_data_height*3/2);

          }

          camrev_append(review_data, cam_video_data.filename);

      }

      frames++;

      if(frames%30==0)

      {

          g_print("fps: %ld\n", 30000/(getTickCount()-last_time));

          last_time = getTickCount();

          frames = 0;

      }

      camdisp_show(data,

                  width*height*3/2,

                  width,

                  height);

      cam_global_data.video_rec_time = timestamp/1000000000;

}

void* video_cap_thread(void* param)

{

    const char* cam_dev_file = cam_global_data.cam_dev_list->cam_device[cam_global_data.cam_dev_list->current_device].device;

    GstBus      *video_cap_bus = NULL;

    GstElement  *video_cap_pipeline = NULL;

    GstElement     *video_cap_bin = NULL;

    GstElement  *video_cap_src = NULL;

    GstElement     *video_cap_pp = NULL;

    GstElement     *video_cap_trans = NULL;

    GstElement     *video_cap_enc = NULL;

    GstElement     *video_cap_mux = NULL;

    GstElement     *video_cap_sink = NULL;

    GstElement     *audio_cap_src = NULL;

      GstElement     *audio_cap_enc = NULL;

      

      GstPad *srcpad = NULL;

    GstPad *sinkpad = NULL;

    

    GstPadLinkReturn lres;

      gboolean ret = FALSE;

      cam_global_data.is_capturing = TRUE;

      cam_video_data.skip_frames = SKIP_FRAMES;

    cammw_stop_preview();

    camrev_init(cam_global_data.preview_data_width, cam_global_data.preview_data_height);

    cam_video_data.video_cap_loop = g_main_loop_new (NULL, FALSE);

          

    video_cap_pipeline = gst_pipeline_new ("video_cap");

    if(!video_cap_pipeline)

    {

        g_print("%s, %d\n", __FUNCTION__, __LINE__);  

        goto exit;

    }

    video_cap_bus = gst_pipeline_get_bus (GST_PIPELINE (video_cap_pipeline));

    gst_bus_add_watch (video_cap_bus, video_bus_call, cam_video_data.video_cap_loop);

    gst_object_unref (video_cap_bus);

    

    video_cap_bin     = gst_bin_new("video_cap_bin");

    video_cap_src    = gst_element_factory_make ("v4l2src",    "video_cap_src");

    video_cap_pp    = gst_element_factory_make ("ffmpegcolorspace",    "video_pp");

    video_cap_trans = gst_element_factory_make ("camgrub","video_enc_trans");

    video_cap_enc   = gst_element_factory_make ("theoraenc","video_enc");

    video_cap_mux   = gst_element_factory_make ("oggmux","video_mux");

    video_cap_sink  = gst_element_factory_make ("filesink","video_writer");

    audio_cap_src  = gst_element_factory_make ("pulsesrc","audio_cap_src");

    audio_cap_enc  = gst_element_factory_make ("vorbisenc","audio_cap_enc");

    if ( !video_cap_bin ||

        !video_cap_src ||

        !video_cap_pp ||

        !video_cap_trans||

        !video_cap_enc ||

        !video_cap_mux ||

        !video_cap_sink ||

        !audio_cap_src ||

        !audio_cap_enc

        )

    {

        g_printerr ("One element in capture bin could not be created. Exiting.\n");

        goto exit ;

    }

    g_object_set (G_OBJECT (video_cap_src), "device", cam_dev_file, NULL);

    g_object_set (G_OBJECT (video_cap_trans), "captured", (guint)raw_captured, NULL);

    cam_video_data.filename = g_strdup(camfiles_get_video_full_name());

    g_object_set (G_OBJECT (video_cap_sink), "location", cam_video_data.filename, NULL);

    gst_bin_add_many (GST_BIN (video_cap_bin),

                     video_cap_src,

                     video_cap_pp,

                     video_cap_trans,

                     video_cap_enc,

                     video_cap_mux,

                     video_cap_sink,

                     audio_cap_src,

                     audio_cap_enc,

                       NULL);

    

    gst_bin_add (GST_BIN (video_cap_pipeline), video_cap_bin);

    if(!link_video_cap_src (video_cap_src, video_cap_pp))

    {

          g_print("link camera src element failed\n");

        goto exit;

    }

    if(!link_video_cap_pp (video_cap_pp, video_cap_trans))

    {

          g_print("link camera pp element failed\n");

        goto exit;

    }

      if(!link_video_cap_enc( video_cap_trans, video_cap_enc))

      {

         g_print("link camera video enc elements failed\n");

       goto exit;

      }

       /*if(!gst_element_link_many (video_cap_pp, video_cap_enc, NULL))

      {

         g_print("link camera audio enc elements failed\n");

       goto exit;

      }*/          

      

      if(!gst_element_link_many ( audio_cap_src, audio_cap_enc, NULL))

      {

         g_print("link camera audio enc elements failed\n");

       goto exit;

      }         

      

      sinkpad = gst_element_get_request_pad (video_cap_mux, "sink_0");

    srcpad = gst_element_get_static_pad (video_cap_enc, "src");

    lres = gst_pad_link (srcpad, sinkpad);

    if(lres != GST_PAD_LINK_OK)

    {

          g_print("link camera video mux elements failed\n");

          goto exit;

    }

    //gst_element_release_request_pad(video_cap_mux, sinkpad);

    gst_object_unref (srcpad);

    

      sinkpad = gst_element_get_request_pad (video_cap_mux, "sink_1");

    srcpad = gst_element_get_static_pad (audio_cap_enc, "src");

    lres = gst_pad_link (srcpad, sinkpad);

    gst_object_unref (srcpad);

    if(lres != GST_PAD_LINK_OK)

    {

          g_print("link camera audio mux elements failed\n");

          goto exit;

    }

    

       if(!gst_element_link_many ( video_cap_mux, video_cap_sink, NULL))

      {

         g_print("link camera video filesink elements failed\n");

       goto exit;

      }         

 

    gst_element_set_state (video_cap_pipeline, GST_STATE_PLAYING);

    cam_video_data.camera_status = CAM_STATUS_RUNNING;

    g_mutex_unlock(cam_video_data.mutex);

    camutils_play_record_sound();

    g_main_loop_run(cam_video_data.video_cap_loop);

    gst_element_set_state (video_cap_pipeline, GST_STATE_NULL);

    camutils_play_record_sound();    

    ret = TRUE;

exit:

    if(video_cap_pipeline)

        gst_object_unref (GST_OBJECT (video_cap_pipeline));

    if(cam_video_data.video_cap_loop)

        g_main_loop_unref(cam_video_data.video_cap_loop);    

    if(cam_video_data.filename)

        g_free(cam_video_data.filename);

    cam_video_data.camera_status = CAM_STATUS_STOPPED;

    if(!ret)

    {

        camui_show_error_message(CERR_FAILED_RECORD_VIDEO);

        cam_global_data.is_capturing = FALSE;

        g_mutex_unlock(cam_video_data.mutex);

    }

    return NULL;

}

gboolean cammw_start_record_video()

{

    pthread_t thread_id;

    pthread_attr_t attr;

    if(cam_video_data.camera_status == CAM_STATUS_RUNNING)

        return FALSE;

    memset(&cam_video_data, 0, sizeof(cam_video_data));

    cam_video_data.mutex=g_mutex_new();

    g_mutex_lock(cam_video_data.mutex);

    pthread_attr_init(&attr);

    pthread_create(&thread_id,NULL,video_cap_thread,NULL);

    while(!g_mutex_trylock(cam_video_data.mutex))

    {

        usleep(100000);

    }

    g_mutex_unlock(cam_video_data.mutex);

    if(cam_video_data.camera_status == CAM_STATUS_STOPPED)

    {

        g_mutex_free(cam_video_data.mutex);

           if(cam_video_data.video_cap_loop)

            g_main_loop_unref(cam_video_data.video_cap_loop);

        memset(&cam_video_data, 0, sizeof(cam_video_data));

        return FALSE;

    }

    return TRUE;

}

gboolean cammw_stop_record_video()

{

    if(cam_video_data.camera_status != CAM_STATUS_RUNNING)

        return TRUE;

    g_mutex_free(cam_video_data.mutex);

    g_main_loop_quit (cam_video_data.video_cap_loop);

    memset(&cam_video_data, 0, sizeof(cam_video_data));

    cam_global_data.is_capturing = FALSE;

    return TRUE;

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: