You are viewing a plain text version of this content. The canonical link for it is here.
Posted to bluesky-commits@incubator.apache.org by pi...@apache.org on 2009/11/30 12:18:38 UTC
svn commit: r885395 [20/23] - in /incubator/bluesky/trunk/RealClass/Student:
./ autom4te.cache/ src/ src/.deps/ src/pic/
Added: incubator/bluesky/trunk/RealClass/Student/src/en_de_screen.cpp
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Student/src/en_de_screen.cpp?rev=885395&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Student/src/en_de_screen.cpp (added)
+++ incubator/bluesky/trunk/RealClass/Student/src/en_de_screen.cpp Mon Nov 30 12:18:34 2009
@@ -0,0 +1,1501 @@
+/** \file en_de_screen.cpp Implementation for screen operation:capture,encoder,decoder,sender,receiver
+*
+*
+*Licensed to the Apache Software Foundation (ASF) under one
+*or more contributor license agreements. See the NOTICE file
+*distributed with this work for additional information
+*regarding copyright ownership. The ASF licenses this file
+*to you under the Apache License, Version 2.0 (the
+*"License"); you may not use this file except in compliance
+*with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+*Unless required by applicable law or agreed to in writing,
+*software distributed under the License is distributed on an
+*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+*KIND, either express or implied. See the License for the
+*specific language governing permissions and limitations
+*under the License.
+*/
+#include "en_de_screen.h"
+
+extern int delay_time;
+
+extern void PErrorText(const char* error);
+//CSEncoder class.
+CSEncoder::CSEncoder()
+{
+ m_pFrameBuf = 0;
+ m_pFrame = 0;
+ m_pCodec = 0;
+ m_pCodecCtx = 0;
+ m_bInit = false;
+ m_bInitScreen = false;
+
+ m_image = 0;
+ m_display = 0;
+ m_d = 0;
+ m_width = 0;
+ m_height = 0;
+ m_screen_num = 0;
+
+}
+
+CSEncoder::~CSEncoder()
+{
+ m_bInitScreen = false;
+ m_bInit = false;
+
+ if (m_pFrameBuf)
+ {
+ free( m_pFrameBuf);
+ m_pFrameBuf = 0;
+ }
+
+ if (m_pFrame)
+ {
+ free( m_pFrame);
+ m_pFrame = 0;
+ }
+ if (m_pCodecCtx)
+ {
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+
+ // free for image
+ if (m_image)
+ {
+ XDestroyImage( m_image);
+ m_image = 0;
+ }
+
+ if (m_display)
+ {
+ if (m_d)
+ {
+ XClearWindow(m_display, m_d);
+ m_d = 0;
+ }
+
+ XCloseDisplay( m_display);
+ m_display = 0;
+ }
+
+}
+
+bool CSEncoder::Init(enum CodecID nCodecID /*=CODEC_ID_MPEG4*/)
+{
+ m_bInit = false;
+ /*Init for encode*/
+ avcodec_init();
+ avcodec_register_all();
+
+ if (!InitScreen(S_CODEC_width, S_CODEC_height))
+ return false;
+
+ //new a frame object.
+ if (m_pFrame)
+ {
+ free( m_pFrame);
+ m_pFrame = 0;
+ }
+ m_pFrame = avcodec_alloc_frame();
+
+ /* find the mpeg4 video encoder */
+ m_pCodec = avcodec_find_encoder(nCodecID);
+ if (!m_pCodec)
+ {
+ PErrorText("codec not found\n");
+ // fprintf(stderr, );
+ return false;
+ }
+
+ if (m_pCodecCtx)
+ {
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+
+ m_pCodecCtx = avcodec_alloc_context();
+ /* resolution must be a multiple of two */
+ m_pCodecCtx->width = m_width;
+ m_pCodecCtx->height = m_height;
+ /* frames per second */
+ m_pCodecCtx->frame_rate = S_CODEC_framerate;
+ m_pCodecCtx->frame_rate_base = S_CODEC_frame_rate_base;
+ m_pCodecCtx->gop_size = S_CODEC_gop_size; /* emit one intra frame every ten frames */
+
+ m_pCodecCtx->bit_rate = 512 * 1024;
+ m_pCodecCtx->pix_fmt = PIX_FMT_YUV420P;
+ m_pCodecCtx->codec_type = CODEC_TYPE_VIDEO;
+
+ /* open it */
+ if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+ {
+ fprintf(stderr, "could not open codec\n");
+ return false;
+ }
+
+ if (m_pFrameBuf)
+ {
+ free( m_pFrameBuf);
+ m_pFrameBuf = 0;
+ }
+
+ int image_size = avpicture_get_size(PIX_FMT_YUV420P, m_pCodecCtx->width,
+ m_pCodecCtx->height);
+
+ m_pFrameBuf = (uint8_t*) malloc(image_size);
+ if (m_pFrameBuf == 0)
+ {
+ PErrorText("FrameBuf malloc failed!");
+ return false;
+ }
+ /*Init for encode*/
+
+ avpicture_fill((AVPicture*) m_pFrame, m_pFrameBuf, PIX_FMT_YUV420P,
+ m_pCodecCtx->width, m_pCodecCtx->height);
+ m_pFrame->type = FF_BUFFER_TYPE_SHARED;
+
+ m_bInit = true;
+ return true;
+}
+
+int CSEncoder::EncodeProcess(XImage *image, uint8_t *pOutBuf, int nOutsize)
+{
+
+ if (!m_bInit)
+ return -1;
+
+ if (nOutsize < S_En_OutBufSize)
+ {
+ return -2;
+ }
+
+ //colorconvert
+ int k, j;
+ unsigned long r32, g32, b32, color32;
+
+ for (k = 0; k < m_pCodecCtx->height; k++)
+ {
+ for (j = 0; j < m_pCodecCtx->width; j++)
+ {
+ color32 = *((unsigned long*) (image->data + k * m_pCodecCtx->width
+ * 4 + j * 4));
+ r32 = color32 & (image->red_mask);
+ g32 = color32 & (image->green_mask);
+ b32 = color32 & (image->blue_mask);
+ r32 = ((r32 >> 16) & 255) << 16;
+ g32 = ((g32 >> 8) & 255) << 8;
+ b32 = ((b32) & 255);
+ color32 = r32 | g32 | b32;
+ color32 = color32 & 16777215;
+ *((unsigned long*) (image->data + k * m_pCodecCtx->width * 4 + j
+ * 4)) = color32;
+ }
+ }
+ GetColorInfo(image, &c_info);
+ switch (image->bits_per_pixel)
+ {
+ case 8:
+ input_pixfmt = PIX_FMT_PAL8;
+ break;
+ case 16:
+ if (image->red_mask == 0xF800 && image->green_mask == 0x07E0
+ && image->blue_mask == 0x1F)
+ {
+ input_pixfmt = PIX_FMT_RGB565;
+ }
+ else if (image->red_mask == 0x7C00 && image->green_mask == 0x03E0
+ && image->blue_mask == 0x1F)
+ {
+ input_pixfmt = PIX_FMT_RGB555;
+ }
+ else
+ {
+ fprintf(
+ stderr,
+ "xtoffmpeg.XImageToFFMPEG(): rgb ordering at image depth %i not supported ... aborting\n",
+ image->bits_per_pixel);
+ fprintf(
+ stderr,
+ "xtoffmpeg.XImageToFFMPEG(): color masks: r 0x%.6lX g 0x%.6lX b 0x%.6lX\n",
+ image->red_mask, image->green_mask, image->blue_mask);
+ }
+ break;
+ case 24:
+ if (image->red_mask == 0xFF0000 && image->green_mask == 0xFF00
+ && image->blue_mask == 0xFF)
+ {
+ input_pixfmt = PIX_FMT_BGR24;
+ }
+ else if (image->red_mask == 0xFF && image->green_mask == 0xFF00
+ && image->blue_mask == 0xFF0000)
+ {
+ input_pixfmt = PIX_FMT_RGB24;
+ }
+ else
+ {
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): rgb ordering at image depth not supported ... aborting\n");
+ PErrorText("xtoffmpeg.XImageToFFMPEG()");
+ return false;
+ }
+ break;
+ case 32:
+ if (c_info.alpha_mask == 0xFF000000 && image->green_mask == 0xFF00)
+ {
+ // byte order is relevant here, not endianness
+ // endianness is handled by avcodec, but atm no such thing
+ // as having ABGR, instead of ARGB in a word. Since we
+ // need this for Solaris/SPARC, but need to do the conversion
+ // for every frame we do it outside of this loop, cf. below
+ // this matches both ARGB32 and ABGR32
+ input_pixfmt = PIX_FMT_RGBA32;
+ }
+ else
+ {
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): image depth not supported ... aborting");
+ return false;
+ }
+ break;
+ default:
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): image depth not supported ... aborting");
+ return false;
+ }
+
+ avpicture_fill(&m_pic_rgb, (uint8_t *) image->data, input_pixfmt,
+ m_pCodecCtx->width, m_pCodecCtx->height);
+ avpicture_fill((AVPicture*) m_pFrame, m_pFrameBuf, PIX_FMT_YUV420P,
+ m_pCodecCtx->width, m_pCodecCtx->height);
+
+ if (img_convert((AVPicture*) m_pFrame, PIX_FMT_YUV420P, &m_pic_rgb,
+ input_pixfmt, m_pCodecCtx->width, m_pCodecCtx->height) < 0)
+ {
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): pixel format conversion not handled ... aborting");
+ return -2;
+ }
+
+ m_ScreenHeader.width = m_width;
+ m_ScreenHeader.height = m_height;
+ memcpy((char *) pOutBuf, &m_ScreenHeader, sizeof(ScreenHeader));
+
+ int ret;
+ ret = avcodec_encode_video(m_pCodecCtx, pOutBuf + sizeof(ScreenHeader),
+ nOutsize, m_pFrame);
+
+ if (ret <= 0)
+ return ret;
+
+ return ret + sizeof(ScreenHeader);
+
+}
+
+bool CSEncoder::InitScreen(int width, int height)
+{
+ if (m_bInitScreen)
+ return true;
+ m_display = XOpenDisplay(NULL);
+
+ m_width = width;
+ m_height = height;
+ m_screen_num = DefaultScreen(m_display);
+
+ m_d = RootWindow(m_display, m_screen_num);
+
+ XWindowAttributes win_attr;
+
+ if (!XGetWindowAttributes(m_display, m_d, &win_attr))
+
+ perror("Can't get window attributes!\n");
+
+ m_image = 0;
+ m_bInitScreen = true;
+ return true;
+}
+
+bool CSEncoder::GetScreenSize(int &width, int &height)
+{
+ if (!m_bInit)
+ return false;
+
+ width = m_pCodecCtx->width;
+ height = m_pCodecCtx->height;
+
+ return true;
+}
+
+//Mouse capture.
+uint16_t mousePointerBlack[] =
+{ 0, 49152, 40960, 36864, 34816, 33792, 33280, 33024, 32896, 32832, 33728,
+ 37376, 43264, 51456, 1152, 1152, 576, 576, 448, 0 };
+uint16_t mousePointerWhite[] =
+{ 0, 0, 16384, 24576, 28672, 30720, 31744, 32256, 32512, 32640, 31744, 27648,
+ 17920, 1536, 768, 768, 384, 384, 0, 0 };
+/*
+ * the following function finds out where the mouse pointer is
+ */
+void CSEncoder::getCurrentPointer(int *x, int *y)
+{
+ Window mrootwindow, childwindow;
+ int dummy;
+
+ if (!m_bInit)
+ return;
+
+ mrootwindow = DefaultRootWindow(m_display);
+
+ if (XQueryPointer(m_display, mrootwindow, &mrootwindow, &childwindow, x, y,
+ &dummy, &dummy, (unsigned int *) &dummy))
+ {
+ // empty
+ // if the XQueryPointer was successfull, we have everything we need in the variables
+ // passed as result pointers
+ }
+ else
+ {
+ *x = -1;
+ *y = -1;
+ }
+
+ XClearWindow(m_display, mrootwindow);
+}
+
+/*
+ * paint the dummy mouse pointer into a given frame
+ */
+void CSEncoder::paintMousePointer(int *x, int *y, XImage *image)
+{
+ // only paint a mouse pointer into the dummy frame if the position of the mouse
+ // is within the rectangle defined by the capture frame
+
+ if (*x >= 0 && *x < S_CODEC_width - 25 && //25 is width and height of cursor .
+ *y >= 0 && *y < S_CODEC_height - 25)
+ {
+ int line;
+ uint8_t *im_data = (uint8_t *) image->data;
+
+ // move the cursor to the right starting position
+ im_data += (image->bytes_per_line * (*y)); // shift to right line
+ im_data += (image->bits_per_pixel / 8 * (*x)); // shift to right pixel
+
+ uint32_t *cursor;
+ int width;
+ uint16_t bm_b, bm_w, mask;
+
+ // the dummy mouse pointer is 20 pixels high ...
+ for (line = 0; line < 20; line++)
+ {
+ bm_b = mousePointerBlack[line];
+ bm_w = mousePointerWhite[line];
+
+ mask = (0x0001 << 15);
+
+ // ... and 16 pixels wide
+ for (cursor = (uint32_t*) im_data, width = 0; ((width + *x)
+ < S_CODEC_width && width < 16); cursor++, width++)
+ {
+ if ((bm_b & mask) > 0)
+ {
+ *cursor &= ((~image->red_mask) & (~image->green_mask)
+ & (~image->blue_mask));
+ }
+ else if ((bm_w & mask) > 0)
+ {
+ *cursor |= (image->red_mask | image->green_mask
+ | image->blue_mask);
+ }
+ mask >>= 1;
+ }
+ im_data += image->bytes_per_line;
+ }
+
+ }
+}
+
+//Mouse capture.
+
+bool CSEncoder::Capture(XImage **image)
+{
+ int x, y;
+ if (!m_bInitScreen)
+ return false;
+
+ if (m_image)
+ {
+ m_image->f.destroy_image(m_image);
+ m_image = 0;
+ }
+ getCurrentPointer(&x, &y);
+ m_image = XGetImage(m_display, m_d, 0, 0, m_width, m_height, AllPlanes,
+ ZPixmap);
+
+ if (m_image == 0)
+ {
+ PErrorText("GetImage error");
+ return false;
+ }
+ paintMousePointer(&x, &y, m_image);
+
+ *image = m_image;
+ return true;
+}
+
+void CSEncoder::GetColorInfo(XImage *image, ColorInfo *ci /* return struct */)
+{
+ unsigned long red_mask, green_mask, blue_mask, alpha_mask;
+ // the shifts are unsigned longs as well
+
+ if (!ci)
+ return;
+
+ // setting shifts and bit_depths to zero
+ ci->red_shift = ci->green_shift = ci->blue_shift = ci->alpha_shift = 0;
+ ci->red_bit_depth = ci->green_bit_depth = ci->blue_bit_depth
+ = ci->alpha_bit_depth = 0;
+
+ red_mask = image->red_mask;
+ if (red_mask > 0)
+ {
+ // shift red_mask to the right till all empty bits have been
+ // shifted out and count how many they were
+ while ((red_mask & 0x01) == 0)
+ {
+ red_mask >>= 1;
+ ci->red_shift++;
+ }
+ // count how many bits are set in the mask = depth
+ while ((red_mask & 0x01) == 1)
+ {
+ red_mask >>= 1;
+ ci->red_bit_depth++;
+ }
+ }
+
+ ci->red_max_val = (1 << ci->red_bit_depth) - 1;
+
+ green_mask = image->green_mask;
+ if (green_mask > 0)
+ {
+ while ((green_mask & 0x01) == 0)
+ {
+ green_mask >>= 1;
+ ci->green_shift++;
+ }
+ while ((green_mask & 0x01) == 1)
+ {
+ green_mask >>= 1;
+ ci->green_bit_depth++;
+ }
+ }
+ ci->green_max_val = (1 << ci->green_bit_depth) - 1;
+
+ blue_mask = image->blue_mask;
+ if (blue_mask > 0)
+ {
+ while ((blue_mask & 0x01) == 0)
+ {
+ blue_mask >>= 1;
+ ci->blue_shift++;
+ }
+ while ((blue_mask & 0x01) == 1)
+ {
+ blue_mask >>= 1;
+ ci->blue_bit_depth++;
+ }
+ }
+ ci->blue_max_val = (1 << ci->blue_bit_depth) - 1;
+
+ /* over all max values */
+ // whatever they are good for
+ ci->max_val = max(ci->red_max_val, ci->green_max_val);
+ ci->max_val = max(ci->blue_max_val, ci->max_val);
+ ci->bit_depth = max(ci->red_bit_depth, ci->green_bit_depth);
+ ci->bit_depth = max(ci->blue_bit_depth, ci->bit_depth);
+ if (image->bits_per_pixel > image->depth)
+ {
+
+ ci->alpha_mask = ~(image->red_mask | image->blue_mask
+ | image->green_mask);
+ alpha_mask = ci->alpha_mask;
+ if (alpha_mask > 0)
+ {
+ while ((alpha_mask & 0x01) == 0)
+ {
+ alpha_mask >>= 1;
+ ci->alpha_shift++;
+ }
+ while ((alpha_mask & 0x01) == 1)
+ {
+ alpha_mask >>= 1;
+ ci->alpha_bit_depth++;
+ }
+ }
+ ci->alpha_max_val = (1 << ci->alpha_bit_depth) - 1;
+ }
+}
+//CSDecoder class.
+CSDecoder::CSDecoder()
+{
+ m_pCodec = 0;
+ m_pCodecCtx = 0;
+ m_pOutFrame = 0;
+ m_bInit = false;
+
+ m_display = 0;
+ m_win = 0;
+ m_d = 0;
+ m_image = 0;
+ m_parent = 0;
+
+ m_width = 0;
+ m_height = 0;
+
+}
+
+CSDecoder::~CSDecoder()
+{
+ m_bInit = false;
+
+ if (m_image)
+ {
+ XDestroyImage( m_image);
+ m_image = 0;
+ }
+
+ if (m_display)
+ {
+ if (m_win)
+ {
+ XClearWindow(m_display, m_win);
+ m_win = 0;
+ }
+ if (m_d)
+ {
+ XClearWindow(m_display, m_d);
+ m_d = 0;
+ }
+
+ XCloseDisplay( m_display);
+ m_display = 0;
+ }
+
+ if (m_pOutFrame)
+ {
+ free( m_pOutFrame);
+ m_pOutFrame = 0;
+ }
+ if (m_pCodecCtx)
+ {
+ if (m_pCodecCtx->extradata)
+ {
+ free(m_pCodecCtx->extradata);
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+ }
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+
+}
+
+bool CSDecoder::CreateXImage(Drawable parent, int x, int y, int width,
+ int height)
+{
+ int screen_num;
+
+ GdkPixbuf *original_pixbuf;
+
+ gint original_width, original_height;
+ GdkColorspace original_color;
+ gboolean original_alpha;
+ gboolean pixbuf_has_alpha;
+ XSetWindowAttributes win_attr;
+ XImage *p_image = NULL;
+ if (!m_bInit)
+ return false;
+
+ CloseXImage();
+
+ m_imagemutex.Lock();
+
+ m_display = XOpenDisplay(NULL);
+ screen_num = DefaultScreen(m_display);
+ m_gc = DefaultGC(m_display, screen_num);
+ m_d = RootWindow(m_display, screen_num);
+
+ m_win = XCreateWindow(m_display, parent, x, y, width, height, 1,
+ XDefaultDepth(m_display, screen_num), InputOutput, CopyFromParent,
+ 0, &win_attr);
+
+ if (gdk_pixbuf_new_from_file("pic/screen.bmp", NULL) == NULL)
+ {
+ XSetWindowBackgroundPixmap(m_display, m_win, ParentRelative);
+ XMapWindow(m_display, m_win);
+ }
+ else
+ {
+ original_pixbuf = gdk_pixbuf_new_from_file("pic/screen.bmp", NULL);
+ pixbuf_has_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+ original_color = gdk_pixbuf_get_colorspace(original_pixbuf);
+ original_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+ original_width = gdk_pixbuf_get_width(original_pixbuf);
+ original_height = gdk_pixbuf_get_height(original_pixbuf);
+ printf("original_alpha = %d\n", original_alpha);
+ printf("original_color = %d\n", original_color);
+ printf("original_width = %d\n", original_width);
+ printf("original_height = %d\n", original_height);
+ printf("n_channles = %d\n", gdk_pixbuf_get_n_channels(original_pixbuf));
+
+ Pixmap pixmap = XCreatePixmap(m_display, m_win, original_width,
+ original_height, XDefaultDepth(m_display, screen_num));
+ XSetWindowBackgroundPixmap(m_display, m_win, pixmap);
+
+ p_image = XGetImage(m_display, m_d, 0, 0, original_width,
+ original_height, AllPlanes, ZPixmap);
+ if (!p_image)
+ {
+ printf("error\n");
+ exit(10);
+ }
+
+ AVPicture pic_rgb24, pic_rgb32;
+ if (m_display && p_image && pixmap)
+ {
+ avpicture_fill(&pic_rgb32, (uint8_t*) p_image->data,
+ PIX_FMT_RGBA32, original_width, original_height);
+ avpicture_fill(&pic_rgb24, gdk_pixbuf_get_pixels(original_pixbuf),
+ PIX_FMT_RGB24, original_width, original_height);
+
+ if (img_convert(&pic_rgb32, PIX_FMT_RGBA32, &pic_rgb24,
+ PIX_FMT_RGB24, original_width, original_height) < 0)
+ {
+ printf("Error pixel format conversion");
+ return -1;
+ }
+
+ XPutImage(m_display, pixmap, m_gc, p_image, 0, 0, 0, 0,
+ original_width, original_height);
+
+ }
+
+ XMapWindow(m_display, m_win);
+ XFreePixmap(m_display, pixmap);
+ gdk_pixbuf_unref(original_pixbuf);
+ XDestroyImage(p_image);
+ }
+
+ m_image = XGetImage(m_display, m_d, 0, 0, m_width, m_height, AllPlanes,
+ ZPixmap);
+ if (!m_image)
+ {
+ printf("error\n");
+ m_imagemutex.Unlock();
+ return false;
+ }
+
+ m_imagemutex.Unlock();
+ m_parent = parent;
+ return true;
+}
+
+void CSDecoder::CloseXImage()
+{
+ if (!m_bInit)
+ return;
+
+ m_imagemutex.Lock();
+ if (m_image)
+ {
+ XDestroyImage( m_image);
+ // m_image->f.destroy_image(m_image);
+ m_image = 0;
+ }
+
+ if (m_display)
+ {
+ if (m_win)
+ {
+ XUnmapWindow(m_display, m_win);
+ XClearWindow(m_display, m_win);
+ m_win = 0;
+ }
+
+ if (m_d)
+ {
+ XClearWindow(m_display, m_d);
+ m_d = 0;
+ }
+
+ XCloseDisplay( m_display);
+ m_display = 0;
+
+ }
+
+ m_imagemutex.Unlock();
+
+}
+
+void CSDecoder::CloseCodec()
+{
+ m_bInit = false;
+
+ if (m_pOutFrame)
+ {
+ free( m_pOutFrame);
+ m_pOutFrame = 0;
+ }
+ if (m_pCodecCtx)
+ {
+ if (m_pCodecCtx->extradata)
+ {
+ free(m_pCodecCtx->extradata);
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+ }
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+
+}
+bool CSDecoder::ResetCodec(const int width, const int height)
+{
+ CodecID nCodecID = m_pCodecCtx->codec_id;
+ CloseCodec();
+ m_bInit = false;
+
+ m_width = width;
+ m_height = height;
+
+ // find the video decoder
+ m_pCodec = avcodec_find_decoder(nCodecID);
+ if (!m_pCodec)
+ {
+ PErrorText("Codec not found");
+ return false;
+ }
+
+ if (m_pOutFrame)
+ {
+ free( m_pOutFrame);
+ m_pOutFrame = 0;
+ }
+ m_pOutFrame = avcodec_alloc_frame();
+
+ if (m_pCodecCtx)
+ {
+ if (m_pCodecCtx->extradata)
+ {
+ free(m_pCodecCtx->extradata);
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+ }
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+ m_pCodecCtx = avcodec_alloc_context();
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+
+ m_pCodecCtx->width = m_width;
+ m_pCodecCtx->height = m_height;
+ /* frames per second */
+ m_pCodecCtx->frame_rate = CSEncoder::S_CODEC_framerate;
+ m_pCodecCtx->frame_rate_base = CSEncoder::S_CODEC_frame_rate_base;
+ m_pCodecCtx->gop_size = CSEncoder::S_CODEC_gop_size; /* emit one intra frame every ten frames */
+
+ m_pCodecCtx->bit_rate = 512 * 1024;
+ m_pCodecCtx->pix_fmt = PIX_FMT_YUV420P;
+ m_pCodecCtx->codec_type = CODEC_TYPE_VIDEO;
+
+ m_pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+ m_pCodecCtx->extradata = malloc(S_De_ExtraHeaderSize);
+ if (m_pCodecCtx->extradata == 0)
+ {
+ return false;
+ }
+ m_pCodecCtx->extradata_size = S_De_ExtraHeaderSize / 8;
+
+ // we dont send complete frames
+ if (m_pCodec->capabilities & CODEC_CAP_TRUNCATED)
+ m_pCodecCtx->flags |= CODEC_FLAG_TRUNCATED;
+
+ // open it
+ if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+ {
+ PErrorText("could not open codec");
+ return false;
+ }
+
+ if (m_image)
+ {
+ XDestroyImage( m_image);
+ m_image = 0;
+ }
+
+ m_image = XGetImage(m_display, m_parent, 0, 0, m_width, m_height,
+ AllPlanes, ZPixmap);
+ if (!m_image)
+ {
+ PErrorText("GetImage error");
+ m_imagemutex.Unlock();
+ return false;
+ }
+
+ m_bInit = true;
+ return true;
+
+}
+
+bool CSDecoder::Init(int width, int height, enum CodecID nCodecID)
+{
+
+ if (m_bInit)
+ return true;
+
+ avcodec_init();
+ avcodec_register_all();
+
+ if (!m_imagemutex.IsInitialized())
+ {
+ if (m_imagemutex.Init() < 0)
+ return false;
+ }
+
+ m_width = width;
+ m_height = height;
+
+ // find the video decoder
+ m_pCodec = avcodec_find_decoder(nCodecID);
+ if (!m_pCodec)
+ {
+ PErrorText("Codec not found");
+ return false;
+ }
+
+ if (m_pOutFrame)
+ {
+ free( m_pOutFrame);
+ m_pOutFrame = 0;
+ }
+ m_pOutFrame = avcodec_alloc_frame();
+
+ if (m_pCodecCtx)
+ {
+ if (m_pCodecCtx->extradata)
+ {
+ free(m_pCodecCtx->extradata);
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+ }
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+ m_pCodecCtx = avcodec_alloc_context();
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+
+ // put sample parameters
+ /* resolution must be a multiple of two */
+ m_pCodecCtx->width = m_width;
+ m_pCodecCtx->height = m_height;
+ /* frames per second */
+ m_pCodecCtx->frame_rate = CSEncoder::S_CODEC_framerate;
+ m_pCodecCtx->frame_rate_base = CSEncoder::S_CODEC_frame_rate_base;
+ m_pCodecCtx->gop_size = CSEncoder::S_CODEC_gop_size; /* emit one intra frame every ten frames */
+
+ m_pCodecCtx->bit_rate = 512 * 1024;
+ m_pCodecCtx->pix_fmt = PIX_FMT_YUV420P;
+ m_pCodecCtx->codec_type = CODEC_TYPE_VIDEO;
+
+ m_pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+ m_pCodecCtx->extradata = malloc(S_De_ExtraHeaderSize);
+ if (m_pCodecCtx->extradata == 0)
+ {
+ return false;
+ }
+ m_pCodecCtx->extradata_size = S_De_ExtraHeaderSize / 8;
+
+ // we dont send complete frames
+ if (m_pCodec->capabilities & CODEC_CAP_TRUNCATED)
+ m_pCodecCtx->flags |= CODEC_FLAG_TRUNCATED;
+
+ // open it
+ if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+ {
+ PErrorText("could not open codec");
+ return false;
+ }
+
+ m_bInit = true;
+ return true;
+}
+
+int CSDecoder::DecodeProcess(uint8_t *encodeddata_v,
+ const int encodeddatasize_v)
+{
+ bool isPaintPic;
+ int realsize, i;
+ int got_picture;
+ if (!m_bInit)
+ {
+ return -1;
+ }
+
+ int left = 0;
+ int len;
+ m_imagemutex.Lock();
+ for (i = 0;; i++)
+ {
+ if (encodeddatasize_v - i * S_De_INBUF_SIZE >= S_De_INBUF_SIZE)
+ realsize = S_De_INBUF_SIZE;
+ else
+ realsize = encodeddatasize_v - i * S_De_INBUF_SIZE;
+
+ if (realsize <= 0)
+ break;
+
+ left = 0;
+ isPaintPic = false;
+ while (realsize > 0)
+ {
+ m_pCodecCtx->width = m_width;
+ m_pCodecCtx->height = m_height;
+ len = avcodec_decode_video(m_pCodecCtx, m_pOutFrame, &got_picture,
+ (encodeddata_v + i * S_De_INBUF_SIZE + left), realsize);
+
+ if (len < 0)
+ {
+ PErrorText("Error while decoding");
+ m_imagemutex.Unlock();
+ return -2;
+ }
+
+ if (m_image != 0)
+ {
+
+ m_encoder.GetColorInfo(m_image, &c_info);
+ switch (m_image->bits_per_pixel)
+ {
+ case 8:
+ input_pixfmt = PIX_FMT_PAL8;
+ break;
+ case 16:
+ if (m_image->red_mask == 0xF800 && m_image->green_mask
+ == 0x07E0 && m_image->blue_mask == 0x1F)
+ {
+ input_pixfmt = PIX_FMT_RGB565;
+ }
+ else if (m_image->red_mask == 0x7C00 && m_image->green_mask
+ == 0x03E0 && m_image->blue_mask == 0x1F)
+ {
+ input_pixfmt = PIX_FMT_RGB555;
+ }
+ else
+ {
+ fprintf(
+ stderr,
+ "xtoffmpeg.XImageToFFMPEG(): rgb ordering at image depth %i not supported ... aborting\n",
+ m_image->bits_per_pixel);
+ fprintf(
+ stderr,
+ "xtoffmpeg.XImageToFFMPEG(): color masks: r 0x%.6lX g 0x%.6lX b 0x%.6lX\n",
+ m_image->red_mask, m_image->green_mask,
+ m_image->blue_mask);
+ }
+ break;
+ case 24:
+ if (m_image->red_mask == 0xFF0000 && m_image->green_mask
+ == 0xFF00 && m_image->blue_mask == 0xFF)
+ {
+ input_pixfmt = PIX_FMT_BGR24;
+ }
+ else if (m_image->red_mask == 0xFF && m_image->green_mask
+ == 0xFF00 && m_image->blue_mask == 0xFF0000)
+ {
+ input_pixfmt = PIX_FMT_RGB24;
+ }
+ else
+ {
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): rgb ordering at image depth not supported ... aborting");
+ PErrorText("xtoffmpeg.XImageToFFMPEG(): color masks");
+ return false;
+ }
+ break;
+ case 32:
+ if (c_info.alpha_mask == 0xFF000000 && m_image->green_mask
+ == 0xFF00)
+ {
+ // byte order is relevant here, not endianness
+ // endianness is handled by avcodec, but atm no such thing
+ // as having ABGR, instead of ARGB in a word. Since we
+ // need this for Solaris/SPARC, but need to do the conversion
+ // for every frame we do it outside of this loop, cf. below
+ // this matches both ARGB32 and ABGR32
+ input_pixfmt = PIX_FMT_RGBA32;
+ }
+ else
+ {
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): image depth not supported ... aborting\n");
+ return false;
+ }
+ break;
+ default:
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): image depth not supported ... aborting\n");
+ return false;
+ }
+ }
+
+ if (got_picture)
+ {
+
+ if (!isPaintPic)
+ {
+ if (m_display && m_image && m_win)
+ {
+ avpicture_fill(&pic_rgb, (uint8_t*) m_image->data,
+ input_pixfmt, m_width, m_height);
+
+ if (img_convert(&pic_rgb, input_pixfmt,
+ (AVPicture*) m_pOutFrame, PIX_FMT_YUV420P,
+ m_width, m_height) < 0)
+ {
+ PErrorText("Error pixel format conversion");
+ m_imagemutex.Unlock();
+ return -3;
+ }
+
+ XPutImage(m_display, m_win, m_gc, m_image, 0, 0, 0, 0,
+ m_width, m_height);
+ }
+ }
+ isPaintPic = true;
+
+ }
+ realsize -= len;
+ left += len;
+ }
+ }
+
+ m_imagemutex.Unlock();
+ return 0;
+}
+
+//CScreenSender class.
+
+CScreenSender::CScreenSender() :
+ m_sendthread(SendBufSize)
+{
+ stop = false;
+ m_bIsRecord = false;
+ m_bInit = 0;
+
+ m_hFile = 0;
+
+}
+
+CScreenSender::~CScreenSender()
+{
+ //first stop thread, because m_pOutBuf is being used by Thread();
+ Stop();
+
+ //close file.
+ if (m_hFile)
+ {
+ fclose( m_hFile);
+ m_hFile = 0;
+ }
+
+ //free buffer.
+ if (m_pOutBuf != 0)
+ free( m_pOutBuf);
+}
+
+bool CScreenSender::Init(int nPort)
+{
+ if (m_bInit)
+ return true;
+
+ //init sendthread.
+ if (!m_sendthread.Init(nPort))
+ return false;
+
+ if (m_pOutBuf == 0)
+ {
+ m_pOutBuf = (uint8_t*) malloc(CSEncoder::S_En_OutBufSize);
+ if (m_pOutBuf == 0)
+ {
+ return false;
+ }
+
+ }
+
+ //Init sencoder.
+ if (!m_sencoder.Init(CODEC_ID_MPEG4))
+ {
+ return false;
+ }
+
+ m_bInit = true;
+ return m_bInit;
+}
+
+int CScreenSender::Start(char* szFile /* =0 */, bool bIsRecord /* =false */)
+{
+ if (!m_bInit)
+ return -1;
+
+ if (JThread::IsRunning())
+ return 0;
+
+ if (!stopmutex.IsInitialized())
+ {
+ if (stopmutex.Init() < 0)
+ return -2;
+ }
+
+ stop = false;
+
+ if (!m_recordmutex.IsInitialized())
+ {
+ if (m_recordmutex.Init() < 0)
+ return -2;
+ }
+
+ m_bIsRecord = bIsRecord;
+
+ if (bIsRecord && szFile != 0)
+ {
+ if (m_hFile)
+ {
+ fclose( m_hFile);
+ m_hFile = 0;
+ }
+
+ m_hFile = fopen(szFile, "wb");
+ if (m_hFile == 0)
+ {
+ return -3;
+ }
+
+ }
+
+ if (m_sendthread.Start() < 0)
+ {
+ return -6;
+ }
+
+ if (JThread::Start() < 0)
+ {
+ return -6;
+ }
+
+ return 0;
+}
+
+void CScreenSender::Stop()
+{
+ if (!IsRunning())
+ return;
+
+ stopmutex.Lock();
+ stop = true;
+ stopmutex.Unlock();
+
+ int count = 0;
+ while (1)
+ {
+ if (count >= 100)
+ {
+ if (JThread::IsRunning())
+ {
+ JThread::Kill();
+ }
+ break;
+ }
+
+ if (JThread::IsRunning())
+ {
+ count++;
+ usleep(10000);
+ continue;
+ }
+
+ break;
+ }
+
+ stop = false;
+
+ //close file.
+ if (m_hFile)
+ {
+ fclose( m_hFile);
+ m_hFile = 0;
+ }
+
+ m_bIsRecord = false;
+
+ m_sendthread.Stop();
+
+}
+
+void CScreenSender::Record(bool bInRecord /* =true */)
+{
+ if (!m_bInit)
+ return;
+
+ m_recordmutex.Lock();
+ m_bIsRecord = bInRecord;
+ m_recordmutex.Unlock();
+
+}
+
+void *CScreenSender::Thread()
+{
+ XImage * pImage;
+ int OutBufSzie;
+
+ JThread::ThreadStarted();
+
+ bool stopthread;
+
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+
+ bool isrecord;
+ m_recordmutex.Lock();
+ isrecord = m_bIsRecord;
+ m_recordmutex.Unlock();
+
+ int status;
+
+ int64_t pre_time, cur_time;
+ useconds_t delay, delay1;
+
+ if (m_sencoder.m_pCodecCtx->frame_rate != 0)
+ {
+ if (m_sencoder.m_pCodecCtx->frame_rate_base != 0)
+ delay1 = 1000000 * m_sencoder.m_pCodecCtx->frame_rate_base
+ / m_sencoder.m_pCodecCtx->frame_rate;
+ else
+ delay1 = 1000000 / m_sencoder.m_pCodecCtx->frame_rate;
+ }
+ else
+ {
+ delay1 = 1000000;
+ }
+
+ // sleep for sync
+ if (delay_time > 0)
+ {
+ sleep( delay_time);
+ }
+
+ // for Utiltiy rate of CPU
+ cur_time = av_gettime();
+ pre_time = cur_time - delay1;
+
+ // for compensate.
+ int64_t nFrame = 0;
+ int64_t rec_time = 0;
+ int64_t rec_start;
+ rec_start = av_gettime();
+
+ while (!stopthread)
+ {
+ delay = cur_time - pre_time;
+ if (delay < delay1)
+ {
+ usleep(delay1 - delay);
+ }
+ pre_time = av_gettime();
+
+ if ((status = m_sencoder.Capture(&pImage)) < 0)
+ {
+ stopthread = true;
+ continue;
+ }
+ else
+ {
+ OutBufSzie = CSEncoder::S_En_OutBufSize;
+ if ((status = m_sencoder.EncodeProcess(pImage, m_pOutBuf,
+ OutBufSzie)) < 0)
+ {
+ stopthread = true;
+ continue;
+ }
+ else
+ {
+ if (status > 0)
+ {
+ //static int iiii=0;
+ m_sendthread.SendData(m_pOutBuf, status);
+ //iiii ++;
+ //printf("\nscreen send(%d): %d bytes\n", iiii, status);
+
+ if (m_hFile != 0 && isrecord)
+ {
+ fwrite(m_pOutBuf + sizeof(ScreenHeader), status
+ - sizeof(ScreenHeader), 1, m_hFile);
+ nFrame++;
+
+ //add up rec_time;
+ rec_time = av_gettime() - rec_start;
+ int i;
+ for (i = 0; rec_time > nFrame * delay1; nFrame++, i++)
+ {
+ printf("\nScreen Frame=%d\n", nFrame);
+ if ((status = m_sencoder.EncodeProcess(pImage,
+ m_pOutBuf, OutBufSzie)) < 0)
+ {
+ printf("\nscreen: encode vital error.");
+ stopthread = true;
+ printf(
+ "\nscreen capture thread stoped by EncodeProcess error!\n");
+ return 0;
+ }
+ if (status > 0)
+ fwrite(m_pOutBuf + sizeof(ScreenHeader), status
+ - sizeof(ScreenHeader), 1, m_hFile);
+ }
+
+ }
+ else
+ {
+ rec_start = av_gettime();
+ nFrame = 0;
+ }
+
+ }
+
+ m_recordmutex.Lock();
+ isrecord = m_bIsRecord;
+ m_recordmutex.Unlock();
+
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+ }
+ }
+
+ cur_time = av_gettime();
+ // printf("\ncur-pre=%d\n", cur-pre);
+ }
+
+ printf("\nscreen capture thread stoped!\n");
+ return 0;
+}
+
+bool CScreenSender::AddDestination(const RTPIPv4Address &des)
+{
+ if (!m_bInit)
+ return false;
+ if (m_sendthread.AddDestination(des) < 0)
+ return false;
+
+ return true;
+}
+void CScreenSender::ClearDestinations()
+{
+ if (!m_bInit)
+ return;
+ m_sendthread.ClearDestinations();
+}
+
+//CScreenReceiver class.
+
+CScreenReceiver::CScreenReceiver()
+{
+ m_bInit = false;
+ m_ScreenInit = false;
+ // m_playback = 0;
+}
+
+CScreenReceiver::~CScreenReceiver()
+{
+
+}
+bool CScreenReceiver::Init()
+{
+ if (m_bInit)
+ return m_bInit;
+
+ //init video decoder.
+ if (!m_sdecoder.Init(800, 600, CODEC_ID_MPEG4))
+ {
+ return false;
+ }
+
+ m_bInit = true;
+ return m_bInit;
+
+}
+bool CScreenReceiver::CreateXImage(Drawable parent, int x, int y, int width,
+ int height)
+{
+ bool ret;
+ //init screen decoder.
+
+ ret = m_sdecoder.CreateXImage(parent, x, y, width, height);
+
+ return ret;
+}
+
+void CScreenReceiver::CloseXImage()
+{
+ m_sdecoder.CloseXImage();
+}
+
+#define MAX_PACKET_SIZE 10240
+
+int CScreenReceiver::Start(int nPort)
+{
+ if (IsActive())
+ return 0;
+
+ //init rtpsession.
+ RTPSessionParams sessParams1;
+ sessParams1.SetOwnTimestampUnit(1.0 / 30.0); //30 video frames per second
+ sessParams1.SetUsePollThread(1); //background thread to call virtual callbacks - set by default, but just to be sure
+ sessParams1.SetMaximumPacketSize(MAX_PACKET_SIZE);
+ //setup transmission parameters
+ RTPUDPv4TransmissionParams transParams1;
+ transParams1.SetPortbase(nPort);
+ //CREATE THE SESSION
+ int status1 = Create(sessParams1, &transParams1);
+ if (status1)
+ {
+ // ReportError(status1);
+ return -2; //unable to create the session
+ }
+
+ return 0;
+}
+
+void CScreenReceiver::Stop()
+{
+ Destroy();
+}
+
+void CScreenReceiver::ProcessFrame(unsigned char* framedata, int framelen)
+{
+ pScreen = (ScreenHeader*) framedata;
+
+ if (pScreen->width != m_sdecoder.m_width || pScreen->height
+ != m_sdecoder.m_height)
+ {
+ m_sdecoder.ResetCodec(pScreen->width, pScreen->height);
+ }
+ m_sdecoder.DecodeProcess(framedata + sizeof(ScreenHeader), framelen
+ - sizeof(ScreenHeader));
+
+}
Added: incubator/bluesky/trunk/RealClass/Student/src/en_de_screen.h
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Student/src/en_de_screen.h?rev=885395&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Student/src/en_de_screen.h (added)
+++ incubator/bluesky/trunk/RealClass/Student/src/en_de_screen.h Mon Nov 30 12:18:34 2009
@@ -0,0 +1,372 @@
+/** \file en_de_screen.h Classes for screen operation:capture,encoder,decoder,sender,receiver
+*
+*
+*Licensed to the Apache Software Foundation (ASF) under one
+*or more contributor license agreements. See the NOTICE file
+*distributed with this work for additional information
+*regarding copyright ownership. The ASF licenses this file
+*to you under the Apache License, Version 2.0 (the
+*"License"); you may not use this file except in compliance
+*with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+*Unless required by applicable law or agreed to in writing,
+*software distributed under the License is distributed on an
+*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+*KIND, either express or implied. See the License for the
+*specific language governing permissions and limitations
+*under the License.
+*/
+//en_de_srceen.h
+
+#include "fecrtpsession.h"
+#include "en_de_sendthread.h"
+
+// Linux sys.
+#include <iostream>
+#include <stdlib.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/time.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/mman.h>
+#include <linux/types.h>
+#include <linux/videodev.h>
+// FFmpeg
+#include <ffmpeg/avcodec.h>
+#include <ffmpeg/avformat.h>
+//Time
+#include <time.h>
+// X11
+#include <X11/Intrinsic.h>
+#include <X11/XWDFile.h>
+// Jthread and JMutex
+#include <jthread.h>
+#include <jmutex.h>
+#include <gdk-pixbuf/gdk-pixbuf.h>
+
+#define max(x,y) (x > y ? x : y)
+
+#if !defined(_EN_DE_SCREEN_H__INCLUDED_)
+#define _EN_DE_SCREEN_H__INCLUDED_
+
+//!Screen Header Information
+/*!
+\param width Screen width
+\param height Screen height
+*/
+typedef struct ScreenHeader
+{
+ int width;
+ int height;
+} ScreenHeader;
+//!Color Info
+typedef struct
+{
+ unsigned long red_shift;
+ unsigned long green_shift;
+ unsigned long blue_shift;
+ unsigned long alpha_shift;
+ unsigned long max_val;
+ unsigned long bit_depth;
+ unsigned long red_max_val;
+ unsigned long green_max_val;
+ unsigned long blue_max_val;
+ unsigned long alpha_max_val;
+ unsigned long red_bit_depth;
+ unsigned long green_bit_depth;
+ unsigned long blue_bit_depth;
+ unsigned long alpha_bit_depth;
+ u_int32_t alpha_mask;
+} ColorInfo;
+
+//!Screen encoder.
+class CSEncoder
+{
+ friend class CSDecoder;
+ friend class CScreenSender;
+
+private:
+ //!buffer size
+ enum SENCODERBUFSIZE
+ {
+ S_En_OutBufSize = 400 * 1024
+ };
+ //!Screen codec params
+ enum SCREEN_CODEC_PARA
+ {
+ S_CODEC_width = 800,
+ S_CODEC_height = 600,
+ S_CODEC_framerate = 1,
+ S_CODEC_frame_rate_base = 1,
+ S_CODEC_gop_size = 4,
+ S_CODEC_max_b_frames
+ };
+public:
+ //!Constructor
+ CSEncoder();
+ //!Destructor
+ virtual ~CSEncoder();
+ //!Get Screen Size
+ /*!
+ \param width Screen width
+ \param height Screen height
+ */
+ bool GetScreenSize(int &width, int &height);
+ //!capture Screen
+ /*!
+ \param image store in XImage struct
+ */
+ bool Capture(XImage **image);
+ //!Encode the captured Screen image
+ /*!
+ \param image pointer to captured Screen image
+ \param pOutBuf store the encoded Screen image
+ \param nOutsize the encoded data size
+ */
+ int EncodeProcess(XImage *image, uint8_t *pOutBuf, int nOutsize);
+ //!Initialise
+ /*!
+ \param nCodecID the Codec ID
+ */
+ bool Init(enum CodecID nCodecID = CODEC_ID_MPEG4);
+ //!Get image Color Info
+ /*!
+ \param image the source XImage Struct
+ \param ci the return ColorInfo Struct
+ */
+ void GetColorInfo(XImage *image, ColorInfo *ci /* return struct */);
+
+private:
+ //!paint Mouse Pointer
+ void paintMousePointer(int *x, int *y, XImage *image);
+ //!get Current Pointer
+ void getCurrentPointer(int *x, int *y);
+ //!Initialise
+ /*!
+ \param width Screen width
+ \param height Screen height
+ */
+ bool InitScreen(int width, int height);
+
+private:
+
+ //!Display Instance
+ Display *m_display;
+ //!Root window
+ Drawable m_d;
+ //!image to be displayed
+ XImage *m_image;
+ //!Store color infor
+ ColorInfo c_info;
+ //!screen number
+ int m_screen_num;
+
+ unsigned int m_width, m_height;
+ //!mark init screen status
+ bool m_bInitScreen;
+ //!pixel format
+ int input_pixfmt;
+ //!Screen Header information
+ ScreenHeader m_ScreenHeader;
+
+private:
+ //!init status
+ bool m_bInit;
+
+ //!AVCodec Instance to store the codec
+ AVCodec *m_pCodec;
+ //!AVCodecContext Instance to store the codec content
+ AVCodecContext *m_pCodecCtx;
+ //!AVFrame Instance to store decoded frame
+ AVFrame *m_pFrame;
+ //!AVPicture Instance to store decoded picture
+ AVPicture m_pic_rgb;
+ //!frame buffer
+ uint8_t *m_pFrameBuf;
+
+};
+
+//!Screen decoder.
+class CSDecoder
+{
+ friend class CScreenReceiver;
+private:
+ enum VDECODERBUFSIZE
+ {
+ S_De_ExtraHeaderSize = 10000, S_De_INBUF_SIZE = 1024
+ };
+public:
+ //!Constructor
+ CSDecoder();
+ //!Destructor
+ virtual ~CSDecoder();
+public:
+ //!Decode a screen frame
+ /*!
+ \param encodeddata_v pointer to encodeddata
+ \param encodeddatasize_v the encodeddata size
+ */
+ int DecodeProcess(uint8_t *encodeddata_v, const int encodeddatasize_v);
+ //!Initialise
+ /*!
+ \param width the screen width
+ \param height the screen height
+ \param nCodecID the Codec ID
+ */
+ bool Init(int width, int height, enum CodecID nCodecID);
+
+public:
+ //!Show the decoded screen (image)
+ /*!
+ \param parent the Drawable parent window
+ \param x the coordinate
+ \param y the coordinate
+ \param width image width
+ \param height image height
+ */
+ bool CreateXImage(Drawable parent, int x, int y, int width, int height);
+ //!close image display
+ void CloseXImage();
+
+private:
+ //!close the codec
+ void CloseCodec();
+ //!reset codec params
+ /*!
+ \param width the image width
+ \param height the image height
+ */
+ bool ResetCodec(const int width, const int height);
+ //!Drawable root window
+ Drawable m_parent;
+ //!Display Instance
+ Display *m_display;
+ //!For dislay image
+ GC m_gc;
+ //!Root window
+ Window m_d;
+ //!window for display image
+ Window m_win;
+ //!image to be displayed
+ XImage *m_image;
+ //!mutex variable for image display
+ JMutex m_imagemutex;
+ //!Store color infor
+ ColorInfo c_info;
+ //!for call encoder getColorInfo
+ CSEncoder m_encoder;
+ //!pixel format
+ int input_pixfmt;
+
+private:
+ //!mark init status
+ bool m_bInit;
+ //!AVCodec Instance to store the codec
+ AVCodec *m_pCodec;
+ //!AVCodecContext Instance to store the codec content
+ AVCodecContext *m_pCodecCtx;
+ //!AVFrame Instance to store decoded frame
+ AVFrame *m_pOutFrame;
+ //!AVPicture Instance to store decoded picture
+ AVPicture pic_rgb;
+ //!codec width
+ int m_width;
+ //!codec height
+ int m_height;
+
+};
+
+//!screen sender.
+class CScreenSender: private JThread
+{
+private:
+ enum SCREENSENDBUFSIZE
+ {
+ SendBufSize = 2
+ };
+public:
+ //!Constructor
+ CScreenSender();
+ //!Destructor
+ ~CScreenSender();
+ //!Initialise
+ bool Init(int nHostPort);
+ //!Add Destination for sending
+ bool AddDestination(const RTPIPv4Address &des);
+ //!Clear Destinations
+ void ClearDestinations();
+ //!Start sender thread
+ int Start(char* szFile = 0, bool bIsRecord = false);
+ //!stop the thread
+ void Stop();
+ //!record screen
+ void Record(bool bInRecord = true);
+private:
+ //!work thread
+ void *Thread();
+ //!stoo status
+ bool stop;
+ //!mutex for stoping thread
+ JMutex stopmutex;
+private:
+ //!record status
+ bool m_bIsRecord;
+ //!mutex for record
+ JMutex m_recordmutex;
+ //!call screen encoder
+ CSEncoder m_sencoder;
+ //!output buffer
+ uint8_t *m_pOutBuf;
+ //!store the recorded screen data
+ FILE* m_hFile;
+ //!call send thread
+ CSendThread m_sendthread;
+ //!init status
+ int m_bInit;
+};
+
+//!screen receiver
+class CScreenReceiver: public CFECRtpSession
+{
+public:
+ //!Constructor
+ CScreenReceiver();
+ //!Destructor
+ virtual ~CScreenReceiver();
+ //Initialise
+ bool Init();
+ //!start receive video data
+ int Start(int nHostPort);
+ //!stop receive the thread
+ void Stop();
+
+public:
+ //!show the image
+ bool CreateXImage(Drawable parent, int x, int y, int width, int height);
+ //!close the image display
+ void CloseXImage();
+
+private:
+ //!Deal with the received screen data
+ /*!
+ \param framedata pointer to frame data
+ \param framelen length of frame
+ */
+ virtual void ProcessFrame(unsigned char* framedata, int framelen);
+
+private:
+ //!screen init
+ bool m_ScreenInit;
+ //!receiver init
+ bool m_bInit;
+ //!call screen decoder
+ CSDecoder m_sdecoder;
+public:
+ //!screen header information
+ ScreenHeader* pScreen;
+};
+
+#endif // !defined(_EN_DE_SCREEN_H__INCLUDED_)
Added: incubator/bluesky/trunk/RealClass/Student/src/en_de_sendthread.cpp
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Student/src/en_de_sendthread.cpp?rev=885395&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Student/src/en_de_sendthread.cpp (added)
+++ incubator/bluesky/trunk/RealClass/Student/src/en_de_sendthread.cpp Mon Nov 30 12:18:34 2009
@@ -0,0 +1,251 @@
+/** \file en_de_sendthread.cpp Implementation for send data clas
+*
+*
+*Licensed to the Apache Software Foundation (ASF) under one
+*or more contributor license agreements. See the NOTICE file
+*distributed with this work for additional information
+*regarding copyright ownership. The ASF licenses this file
+*to you under the Apache License, Version 2.0 (the
+*"License"); you may not use this file except in compliance
+*with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+*Unless required by applicable law or agreed to in writing,
+*software distributed under the License is distributed on an
+*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+*KIND, either express or implied. See the License for the
+*specific language governing permissions and limitations
+*under the License.
+*/
+#include "en_de_sendthread.h"
+
+extern void PErrorText(const char* error);
+//CSendThread class.
+
+CSendThread::CSendThread(int nSendBufSize)
+{
+ stop = false;
+ m_bInit = 0;
+
+ if (nSendBufSize <= 2)
+ {
+ m_nSendBufSize = 2;
+ }
+ else if (nSendBufSize >= 25)
+ {
+ m_nSendBufSize = 25;
+ }
+ else
+ m_nSendBufSize = nSendBufSize;
+
+}
+
+CSendThread::~CSendThread()
+{
+ Stop();
+
+}
+
+#define MAX_PACKET_SIZE 10*1024
+bool CSendThread::Init(int nPort)
+{
+ if (m_bInit)
+ return true;
+
+ //init rtpsession.
+ RTPSessionParams sessParams1;
+ sessParams1.SetOwnTimestampUnit(1.0 / 30.0); //30 video frames per second
+ sessParams1.SetUsePollThread(0); //background thread to call virtual callbacks - set by default, but just to be sure
+ sessParams1.SetMaximumPacketSize(MAX_PACKET_SIZE);
+ //setup transmission parameters
+ RTPUDPv4TransmissionParams transParams1;
+ transParams1.SetPortbase(nPort);
+ //CREATE THE SESSION
+ int status1 = m_fecrtpsession.Create(sessParams1, &transParams1);
+ if (status1)
+ {
+ return false; //unable to create the session
+ }
+
+ //must set for fec SendFECPacket.
+ m_fecrtpsession.SetDefaultMark(true);
+ m_fecrtpsession.SetDefaultPayloadType(1);
+ m_fecrtpsession.SetDefaultTimestampIncrement(0);
+
+ m_bInit = true;
+ return m_bInit;
+}
+
+int CSendThread::Start()
+{
+ if (!m_bInit)
+ return -1;
+
+ if (JThread::IsRunning())
+ return 0;
+
+ if (!stopmutex.IsInitialized())
+ {
+ if (stopmutex.Init() < 0)
+ return -2;
+ }
+
+ stop = false;
+
+ if (!m_senddatamutex.IsInitialized())
+ {
+ if (m_senddatamutex.Init() < 0)
+ return -2;
+ }
+
+ if (JThread::Start() < 0)
+ {
+ return -6;
+ }
+
+ return 0;
+}
+
+void CSendThread::Stop()
+{
+ if (!IsRunning())
+ return;
+
+ stopmutex.Lock();
+ stop = true;
+ stopmutex.Unlock();
+
+ //wait for two minute;
+ sleep(1);
+ if (JThread::IsRunning())
+ {
+ JThread::Kill();
+ }
+ stop = false;
+
+}
+
+int CSendThread::SendData(uint8_t *data, int datalen)
+{
+ SENDBUFDATA *pData;
+ if (!m_bInit)
+ return -1;
+
+ pData = new SENDBUFDATA;
+ if (pData == 0)
+ {
+ PErrorText("Error: CSendThread::SendData new SENDBUFDATA");
+ return -2;
+ }
+
+ pData->data = new uint8_t[datalen];
+ if (pData->data == 0)
+ {
+ delete pData;
+ PErrorText("Error: CSendThread::SendData new uint8_t");
+ return -3;
+ }
+
+ memcpy(pData->data, data, datalen);
+ pData->datalen = datalen;
+
+ m_senddatamutex.Lock();
+ m_senddata.insert(m_senddata.end(), pData);
+ m_senddatamutex.Unlock();
+
+ return 0;
+}
+
+void *CSendThread::Thread()
+{
+ JThread::ThreadStarted();
+
+ SENDBUFDATA *senddata = new SENDBUFDATA[m_nSendBufSize];
+ bool stopthread;
+
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+
+ int size;
+ SENDBUFDATA* p;
+ SendDatalist::iterator itera;
+ int index;
+ while (!stopthread)
+ {
+
+ m_senddatamutex.Lock();
+ size = m_senddata.size();
+ if (size == 0)
+ {
+ m_senddatamutex.Unlock();
+
+ usleep(50000);
+
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+ continue;
+ }
+ if (size > m_nSendBufSize)
+ {
+ for (itera = m_senddata.begin(); itera != m_senddata.end(); itera++)
+ {
+ p = *itera;
+ delete p->data;
+ delete p;
+ p = 0;
+ }
+ m_senddata.clear();
+ size = 0;
+ }
+ else
+ {
+ for (itera = m_senddata.begin(), index = 0; itera
+ != m_senddata.end(); itera++, index++)
+ {
+ p = *itera;
+ senddata[index].data = p->data;
+ senddata[index].datalen = p->datalen;
+ delete p;
+ p = 0;
+ }
+ m_senddata.clear();
+ size = index;
+ }
+
+ m_senddatamutex.Unlock();
+
+ for (index = 0; index < size; index++)
+ {
+ m_fecrtpsession.SendFECPacket(senddata[index].data,
+ senddata[index].datalen, 5000);
+ delete senddata[index].data;
+ }
+
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+
+ }
+ printf("\nSendthread stoped.\n");
+ return 0;
+}
+
+bool CSendThread::AddDestination(const RTPIPv4Address &des)
+{
+ if (!m_bInit)
+ return false;
+ if (m_fecrtpsession.AddDestination(des) < 0)
+ return false;
+
+ return true;
+}
+
+void CSendThread::ClearDestinations()
+{
+ if (!m_bInit)
+ return;
+ m_fecrtpsession.ClearDestinations();
+}
Added: incubator/bluesky/trunk/RealClass/Student/src/en_de_sendthread.h
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Student/src/en_de_sendthread.h?rev=885395&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Student/src/en_de_sendthread.h (added)
+++ incubator/bluesky/trunk/RealClass/Student/src/en_de_sendthread.h Mon Nov 30 12:18:34 2009
@@ -0,0 +1,104 @@
+/** \file en_de_sendthread.h Class for send data
+*
+*
+*Licensed to the Apache Software Foundation (ASF) under one
+*or more contributor license agreements. See the NOTICE file
+*distributed with this work for additional information
+*regarding copyright ownership. The ASF licenses this file
+*to you under the Apache License, Version 2.0 (the
+*"License"); you may not use this file except in compliance
+*with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+*Unless required by applicable law or agreed to in writing,
+*software distributed under the License is distributed on an
+*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+*KIND, either express or implied. See the License for the
+*specific language governing permissions and limitations
+*under the License.
+*/
+// rtp
+#include "fecrtpsession.h"
+// Linux sys.
+#include <iostream>
+#include <stdlib.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/time.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/mman.h>
+#include <linux/types.h>
+#include <linux/videodev.h>
+// FFmpeg
+#include <ffmpeg/avcodec.h>
+#include <ffmpeg/avformat.h>
+
+#include <time.h>
+
+// X11
+#include <X11/Intrinsic.h>
+#include <X11/XWDFile.h>
+// Jthread and JMutex
+#include <jthread.h>
+#include <jmutex.h>
+
+#if !defined(_EN_DE_SENDTHREAD_H__INCLUDED_)
+#define _EN_DE_SENDTHREAD_H__INCLUDED_
+//!Send buffer
+struct SENDBUFDATA
+{
+ uint8_t* data;
+ int datalen;
+};
+typedef std::list<SENDBUFDATA*> SendDatalist;
+
+//!SendThread class.
+class CSendThread: private JThread
+{
+public:
+ //!Constuctor
+ CSendThread(int nSendBufSize);
+ //!Destructor
+ ~CSendThread();
+ //!Initialise
+ bool Init(int nPort);
+ //!Add Destination
+ /*!
+ \param RTPIPv4Address destination address
+ */
+ bool AddDestination(const RTPIPv4Address &des);
+ //!Clear Destinations
+ void ClearDestinations();
+ //!Send Data
+ /*!
+ \param data opinter to data buffer
+ \param datalen data size
+ */
+ int SendData(uint8_t *data, int datalen);
+ //!start send
+ int Start();
+ //!stop send
+ void Stop();
+private:
+ //!work thread
+ void *Thread();
+ //!stop status
+ bool stop;
+ //!mutex for stop
+ JMutex stopmutex;
+private:
+ //!CFECRtpSession instance to ensure communication
+ CFECRtpSession m_fecrtpsession;
+ //!init status
+ int m_bInit;
+ //!send buffer size
+ int m_nSendBufSize;
+ //!send data store as list
+ SendDatalist m_senddata;
+ //!mutex for access data
+ JMutex m_senddatamutex;
+};
+
+#endif // !defined(_EN_DE_SENDTHREAD_H__INCLUDED_)
Added: incubator/bluesky/trunk/RealClass/Student/src/en_de_video.cpp
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Student/src/en_de_video.cpp?rev=885395&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Student/src/en_de_video.cpp (added)
+++ incubator/bluesky/trunk/RealClass/Student/src/en_de_video.cpp Mon Nov 30 12:18:34 2009
@@ -0,0 +1,1710 @@
+/** \file en_de_video.cpp Implementation for video operation:capture,encoder,decoder,sender,receiver
+*
+*
+*Licensed to the Apache Software Foundation (ASF) under one
+*or more contributor license agreements. See the NOTICE file
+*distributed with this work for additional information
+*regarding copyright ownership. The ASF licenses this file
+*to you under the Apache License, Version 2.0 (the
+*"License"); you may not use this file except in compliance
+*with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+*Unless required by applicable law or agreed to in writing,
+*software distributed under the License is distributed on an
+*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+*KIND, either express or implied. See the License for the
+*specific language governing permissions and limitations
+*under the License.
+*/
+
+//V4l classes.
+#include <deque>
+#include <unistd.h>
+#include <string>
+#include <sstream>
+#include <iostream>
+#include <iomanip>
+
+#include <pthread.h>
+
+#include <gtk/gtk.h>
+#include <gdk-pixbuf/gdk-pixbuf.h>
+#include "en_de_video.h"
+static char g_error_txt[550];
+void PErrorText(const char* error)
+{
+ if (strlen(error) <= 500)
+ {
+ sprintf(g_error_txt, "\nEn_De Error:%s", error);
+ }
+}
+extern int v4l_dev_num = 0;
+extern int delay_time = 0;
+
+extern "C"
+{
+#include <sys/types.h>
+#include <sys/soundcard.h>
+#include <sys/time.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+}
+
+V4L::V4L()
+{
+
+}
+
+V4L::~V4L()
+{
+ if (fd != -1)
+ {
+ close( fd);
+
+ delete this->capability;
+ }
+}
+void V4L::setInfo(char *device, char *input, int sample)
+{
+ this->device = device;
+ this->input = input;
+ this->sample = sample;
+}
+
+bool V4L::openDevice()
+{
+ bool ret = true;
+
+ if (!strcmp(this->input, "PAL"))
+ {
+ this->width = VideoWidth;
+ this->height = VideoHeight;
+ this->fps = 25;
+ }
+ else if (!strcmp(this->input, "NTSC"))
+ {
+ this->width = 720;
+ this->height = 480;
+ this->fps = 30;
+ }
+
+ this->fd = open(device, O_RDWR);
+ if (fd == -1)
+ {
+ perror("Unable to open video device");
+ return false;
+ }
+
+ {
+ video_capability capability;
+ if (ioctl(fd, VIDIOCGCAP, &capability) != -1)
+ {
+ printf(" query was successful\n");
+ }
+ else
+ {
+ perror("query");
+ }
+
+ if ((capability.type & VID_TYPE_CAPTURE) != 0)
+ {
+ printf("this device can capture video to memory\n");
+ }
+ else
+ {
+ perror("can't capture video to memtory");
+ }
+
+ struct video_channel queryChannel;
+ int i = 0;
+ while (i < capability.channels)
+ {
+ queryChannel.channel = i;
+ if (ioctl(fd, VIDIOCGCHAN, &queryChannel) != -1)
+ { // ioctl success, queryChannel contains information about this channel
+ printf(" information about this channel:%d, %s\n",
+ queryChannel.channel, queryChannel.name);
+ }
+
+ ++i;
+ }
+
+ struct video_channel selectedChannel;
+ selectedChannel.channel = 0;
+ selectedChannel.norm = VIDEO_MODE_PAL;
+ if (ioctl(fd, VIDIOCSCHAN, &selectedChannel) == -1)
+ {
+ return false;
+ }
+
+ struct video_window captureWindow;
+ memset(&captureWindow, 0, sizeof(captureWindow));
+ captureWindow.width = VideoWidth;
+ captureWindow.height = VideoHeight;
+
+ memset(&captureWindow, 0, sizeof(captureWindow));
+ if (ioctl(fd, VIDIOCGWIN, &captureWindow) == -1)
+ {
+ return false;
+ }
+
+ width = captureWindow.width;
+ height = captureWindow.height;
+ printf("width=%d,height=%d\n", width, height);
+
+ }
+
+ int depth;
+ int palette;
+ struct video_picture imageProperties;
+
+ if (ioctl(fd, VIDIOCGPICT, &imageProperties) == -1)
+ { // failed to retrieve default image properties
+ return false;
+ }
+ depth = imageProperties.depth;
+ palette = imageProperties.palette;
+ printf("\nVideo:depth=%d,palette=%d\n", depth, palette);
+ if (ioctl(fd, VIDIOCSPICT, &imageProperties) == -1)
+ { // failed to retrieve default image properties
+ return false;
+ }
+
+ return ret;
+}
+
+
+int V4L::getWidth()
+{
+ return width;
+}
+
+int V4L::getHeight()
+{
+ return height;
+}
+
+int V4L::mappedMemorySize(bool init)
+{
+ static video_mbuf buf;
+ if (init == true)
+ {
+ init = 1;
+ ioctl(fd, VIDIOCGMBUF, &buf);
+ frame_maps = buf.frames;
+ }
+ return buf.size;
+}
+
+bool V4L::initialiseCapture(int format)
+{
+ map = mmap(0, mappedMemorySize(true), PROT_READ | PROT_WRITE, MAP_SHARED,
+ fd, 0);
+
+ if (map != NULL)
+ {
+ for (int i = 0; i < frame_maps; i++)
+ {
+ frame[i].frame = i;
+ frame[i].width = getWidth();
+ frame[i].height = getHeight();
+ frame[i].format = format;
+ }
+ frames = 0;
+ frame_next = 0;
+
+ int retry = 0;
+ while (ioctl(fd, VIDIOCMCAPTURE, &frame[0]) == -1 && retry++
+ < frame_maps + 1)
+ ;
+
+ return true;
+ }
+ else
+ {
+ return false;
+ }
+}
+void *V4L::getNextFrame()
+{
+ unsigned char * ret = NULL;
+
+ int current = frame_next;
+ frame_next = (frame_next + 1) % frame_maps;
+
+ if (ioctl(fd, VIDIOCMCAPTURE, &frame[frame_next]) == -1)
+ ; //cout << "Frame 1 Failed to initialise" << endl;
+ if (ioctl(fd, VIDIOCSYNC, &frame[current].frame) == -1)
+ ; //cout << "Frame 0 Failed to sync" << endl;
+ ret = (unsigned char *) map + current * (mappedMemorySize() / frame_maps);
+
+ frames++;
+
+ return (void *) ret;
+}
+
+//V4l classes.
+
+
+
+
+int CVDecoder::input_pixfmt = PIX_FMT_RGBA32;
+
+#define STARTTIME { static int MaxTime=0; \
+ static int MinTime=1000; \
+ static int largecount=0; \
+ int IntervalTime=0; \
+ int64_t start_time=av_gettime();
+
+#define ENDTIME IntervalTime = av_gettime()-start_time; \
+ if(IntervalTime>40000) \
+ largecount++; \
+ if(MaxTime<IntervalTime) \
+ MaxTime=IntervalTime; \
+ if(MinTime>IntervalTime) \
+ MinTime=IntervalTime; \
+ printf("\nMaxTime=%d, MinTime=%d, largecount=%d, IntervalTime=%d\n", MaxTime, MinTime, largecount, IntervalTime);}
+
+//CVDecoder class.
+
+CVDecoder::CVDecoder()
+{
+ m_pCodec = 0;
+ m_pCodecCtx = 0;
+ m_pOutFrame = 0;
+ m_bInit = false;
+
+ m_display = 0;
+ m_win = 0;
+ m_image = 0;
+
+}
+
+CVDecoder::~CVDecoder()
+{
+ m_bInit = false;
+
+ if (m_pOutFrame)
+ {
+ free( m_pOutFrame);
+ m_pOutFrame = 0;
+ }
+ if (m_pCodecCtx)
+ {
+ if (m_pCodecCtx->extradata)
+ {
+ free(m_pCodecCtx->extradata);
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+ }
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+
+ if (m_image)
+ {
+ XDestroyImage( m_image);
+ //m_image->f.destroy_image(m_image);
+ m_image = 0;
+ }
+
+ if (m_display)
+ {
+ if (m_win)
+ {
+ XClearWindow(m_display, m_win);
+ m_win = 0;
+ }
+ if (m_d)
+ {
+ XClearWindow(m_display, m_d);
+ m_d = 0;
+ }
+
+ XCloseDisplay( m_display);
+ m_display = 0;
+ }
+
+}
+
+void CVDecoder::GetColorInfo(XImage *image, ColorInfo *ci /* return struct */)
+{
+ unsigned long red_mask, green_mask, blue_mask, alpha_mask;
+ // the shifts are unsigned longs as well
+
+ if (!ci)
+ return;
+
+ // setting shifts and bit_depths to zero
+ ci->red_shift = ci->green_shift = ci->blue_shift = ci->alpha_shift = 0;
+ ci->red_bit_depth = ci->green_bit_depth = ci->blue_bit_depth
+ = ci->alpha_bit_depth = 0;
+
+ red_mask = image->red_mask;
+ if (red_mask > 0)
+ {
+ // shift red_mask to the right till all empty bits have been
+ // shifted out and count how many they were
+ while ((red_mask & 0x01) == 0)
+ {
+ red_mask >>= 1;
+ ci->red_shift++;
+ }
+ // count how many bits are set in the mask = depth
+ while ((red_mask & 0x01) == 1)
+ {
+ red_mask >>= 1;
+ ci->red_bit_depth++;
+ }
+ }
+
+ ci->red_max_val = (1 << ci->red_bit_depth) - 1;
+
+ green_mask = image->green_mask;
+ if (green_mask > 0)
+ {
+ while ((green_mask & 0x01) == 0)
+ {
+ green_mask >>= 1;
+ ci->green_shift++;
+ }
+ while ((green_mask & 0x01) == 1)
+ {
+ green_mask >>= 1;
+ ci->green_bit_depth++;
+ }
+ }
+ ci->green_max_val = (1 << ci->green_bit_depth) - 1;
+
+ blue_mask = image->blue_mask;
+ if (blue_mask > 0)
+ {
+ while ((blue_mask & 0x01) == 0)
+ {
+ blue_mask >>= 1;
+ ci->blue_shift++;
+ }
+ while ((blue_mask & 0x01) == 1)
+ {
+ blue_mask >>= 1;
+ ci->blue_bit_depth++;
+ }
+ }
+ ci->blue_max_val = (1 << ci->blue_bit_depth) - 1;
+
+ /* over all max values */
+ // whatever they are good for
+ ci->max_val = max(ci->red_max_val, ci->green_max_val);
+ ci->max_val = max(ci->blue_max_val, ci->max_val);
+ ci->bit_depth = max(ci->red_bit_depth, ci->green_bit_depth);
+ ci->bit_depth = max(ci->blue_bit_depth, ci->bit_depth);
+ if (image->bits_per_pixel > image->depth)
+ {
+ /* alpha? */
+ // this seems to not reflect X's ignorance of alpha in its
+ // masks
+ ci->alpha_mask = ~(image->red_mask | image->blue_mask
+ | image->green_mask);
+ alpha_mask = ci->alpha_mask;
+ if (alpha_mask > 0)
+ {
+ while ((alpha_mask & 0x01) == 0)
+ {
+ alpha_mask >>= 1;
+ ci->alpha_shift++;
+ }
+ while ((alpha_mask & 0x01) == 1)
+ {
+ alpha_mask >>= 1;
+ ci->alpha_bit_depth++;
+ }
+ }
+ ci->alpha_max_val = (1 << ci->alpha_bit_depth) - 1;
+ }
+}
+
+bool CVDecoder::CreateXImage(Drawable parent, int x, int y, int width,
+ int height)
+{
+ int screen_num;
+
+ GdkPixbuf *original_pixbuf;
+
+ gint original_width, original_height;
+ GdkColorspace original_color;
+ gboolean original_alpha;
+ gboolean pixbuf_has_alpha;
+ XSetWindowAttributes win_attr;
+ XImage *p_image = NULL;
+ if (!m_bInit)
+ return false;
+
+ CloseXImage();
+
+ m_imagemutex.Lock();
+
+ m_display = XOpenDisplay(NULL);
+ screen_num = DefaultScreen(m_display);
+ m_gc = DefaultGC(m_display, screen_num);
+ m_d = RootWindow(m_display, screen_num);
+
+ m_win = XCreateWindow(m_display, parent, x, y, width, height, 1,
+ XDefaultDepth(m_display, screen_num), InputOutput, CopyFromParent,
+ 0, &win_attr);
+
+ if (gdk_pixbuf_new_from_file("pic/teacher2.bmp", NULL) == NULL)
+ {
+ XSetWindowBackgroundPixmap(m_display, m_win, ParentRelative);
+ XMapWindow(m_display, m_win);
+ }
+
+ else
+ {
+ original_pixbuf = gdk_pixbuf_new_from_file("pic/teacher2.bmp", NULL);
+ pixbuf_has_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+ original_color = gdk_pixbuf_get_colorspace(original_pixbuf);
+ original_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+ original_width = gdk_pixbuf_get_width(original_pixbuf);
+ original_height = gdk_pixbuf_get_height(original_pixbuf);
+ printf("original_alpha = %d\n", original_alpha);
+ printf("original_color = %d\n", original_color);
+ printf("original_width = %d\n", original_width);
+ printf("original_height = %d\n", original_height);
+ printf("n_channles = %d\n", gdk_pixbuf_get_n_channels(original_pixbuf));
+
+ Pixmap pixmap = XCreatePixmap(m_display, m_win, original_width,
+ original_height, XDefaultDepth(m_display, screen_num));
+ XSetWindowBackgroundPixmap(m_display, m_win, pixmap);
+
+ p_image = XGetImage(m_display, m_d, 0, 0, original_width,
+ original_height, AllPlanes, ZPixmap);
+ if (!p_image)
+ {
+ printf("error\n");
+ exit(10);
+ }
+
+ AVPicture pic_rgb24, pic_rgb32;
+ if (m_display && p_image && pixmap)
+ {
+ avpicture_fill(&pic_rgb32, (uint8_t*) p_image->data,
+ PIX_FMT_RGBA32, original_width, original_height);
+ avpicture_fill(&pic_rgb24, gdk_pixbuf_get_pixels(original_pixbuf),
+ PIX_FMT_RGB24, original_width, original_height);
+
+ if (img_convert(&pic_rgb32, PIX_FMT_RGBA32, &pic_rgb24,
+ PIX_FMT_RGB24, original_width, original_height) < 0)
+ {
+ printf("Error pixel format conversion");
+ return -1;
+ }
+
+ XPutImage(m_display, pixmap, m_gc, p_image, 0, 0, 0, 0,
+ original_width, original_height);
+
+ }
+
+ XMapWindow(m_display, m_win);
+ XFreePixmap(m_display, pixmap);
+ gdk_pixbuf_unref(original_pixbuf);
+ XDestroyImage(p_image);
+ }
+
+ m_image = XGetImage(m_display, m_d, 0, 0, m_width, m_height, AllPlanes,
+ ZPixmap);
+
+ if (!m_image)
+ {
+ printf("XGetImage error\n");
+ m_imagemutex.Unlock();
+ return false;
+ }
+
+ m_imagemutex.Unlock();
+ return true;
+}
+
+void CVDecoder::CloseXImage()
+{
+ if (!m_bInit)
+ return;
+
+ m_imagemutex.Lock();
+
+ if (m_image)
+ {
+ XDestroyImage( m_image);
+ m_image = 0;
+ }
+
+ if (m_display)
+ {
+ if (m_win)
+ {
+ XUnmapWindow(m_display, m_win);
+ XClearWindow(m_display, m_win);
+ m_win = 0;
+ }
+
+ if (m_d)
+ {
+ XClearWindow(m_display, m_d);
+ m_d = 0;
+ }
+
+ XCloseDisplay( m_display);
+ m_display = 0;
+ }
+
+ m_imagemutex.Unlock();
+ return;
+}
+
+bool CVDecoder::Init(int width, int height, enum CodecID nCodecID)
+{
+ if (m_bInit)
+ return true;
+
+ avcodec_init();
+ avcodec_register_all();
+
+ if (!m_imagemutex.IsInitialized())
+ {
+ if (m_imagemutex.Init() < 0)
+ return false;
+ }
+
+ m_width = width;
+ m_height = height;
+
+ // find the video decoder
+ m_pCodec = avcodec_find_decoder(nCodecID);
+ if (!m_pCodec)
+ {
+ PErrorText("Codec not found\n");
+ return false;
+ }
+
+ if (m_pOutFrame)
+ {
+ free( m_pOutFrame);
+ m_pOutFrame = 0;
+ }
+ m_pOutFrame = avcodec_alloc_frame();
+
+ if (m_pCodecCtx)
+ {
+ if (m_pCodecCtx->extradata)
+ {
+ free(m_pCodecCtx->extradata);
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+ }
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+ m_pCodecCtx = avcodec_alloc_context();
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+
+ // put sample parameters
+ m_pCodecCtx->bit_rate = 512 * 1024;
+ // resolution must be a multiple of two
+ m_pCodecCtx->width = m_width;
+ m_pCodecCtx->height = m_height;
+ // frames per second
+ m_pCodecCtx->frame_rate = V_CODEC_framerate;
+ m_pCodecCtx->frame_rate_base = V_CODEC_frame_rate_base;
+ // emit one intra frame every ten frames
+ m_pCodecCtx->gop_size = V_CODEC_gop_size;
+
+ m_pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+ m_pCodecCtx->extradata = malloc(V_De_ExtraHeaderSize);
+ if (m_pCodecCtx->extradata == 0)
+ {
+ return false;
+ }
+ m_pCodecCtx->extradata_size = V_De_ExtraHeaderSize;
+
+ // we dont send complete frames
+ if (m_pCodec->capabilities & CODEC_CAP_TRUNCATED)
+ m_pCodecCtx->flags |= CODEC_FLAG_TRUNCATED;
+
+ // open it
+ if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+ {
+ PErrorText("could not open codec");
+ return false;
+ }
+
+ m_bInit = true;
+ return true;
+}
+
+int CVDecoder::DecodeProcess(uint8_t *encodeddata_v,
+ const int encodeddatasize_v)
+{
+ bool isPaintPic;
+ int realsize, i;
+ int got_picture;
+
+ if (!m_bInit)
+ {
+ return -1;
+ }
+
+ int left = 0;
+ int len;
+
+ m_imagemutex.Lock();
+
+ for (i = 0;; i++)
+ {
+ if (encodeddatasize_v - i * V_De_INBUF_SIZE >= V_De_INBUF_SIZE)
+ realsize = V_De_INBUF_SIZE;
+ else
+ realsize = encodeddatasize_v - i * V_De_INBUF_SIZE;
+
+ if (realsize <= 0)
+ break;
+
+ left = 0;
+ isPaintPic = false;
+ while (realsize > 0)
+ {
+
+ m_pCodecCtx->width = m_width;
+ m_pCodecCtx->height = m_height;
+
+ len = avcodec_decode_video(m_pCodecCtx, m_pOutFrame, &got_picture,
+ (encodeddata_v + i * V_De_INBUF_SIZE + left), realsize);
+ if (len < 0)
+ {
+ PErrorText("Error while decoding");
+ m_imagemutex.Unlock();
+ return -2;
+ }
+
+ if (m_image != 0)
+ {
+ GetColorInfo(m_image, &c_info);
+ switch (m_image->bits_per_pixel)
+ {
+ case 8:
+ input_pixfmt = PIX_FMT_PAL8;
+ break;
+ case 16:
+ if (m_image->red_mask == 0xF800 && m_image->green_mask
+ == 0x07E0 && m_image->blue_mask == 0x1F)
+ {
+ input_pixfmt = PIX_FMT_RGB565;
+ }
+ else if (m_image->red_mask == 0x7C00 && m_image->green_mask
+ == 0x03E0 && m_image->blue_mask == 0x1F)
+ {
+ input_pixfmt = PIX_FMT_RGB555;
+ }
+ else
+ {
+ fprintf(
+ stderr,
+ "xtoffmpeg.XImageToFFMPEG(): rgb ordering at image depth %i not supported ... aborting\n",
+ m_image->bits_per_pixel);
+ fprintf(
+ stderr,
+ "xtoffmpeg.XImageToFFMPEG(): color masks: r 0x%.6lX g 0x%.6lX b 0x%.6lX\n",
+ m_image->red_mask, m_image->green_mask,
+ m_image->blue_mask);
+ }
+ break;
+ case 24:
+ if (m_image->red_mask == 0xFF0000 && m_image->green_mask
+ == 0xFF00 && m_image->blue_mask == 0xFF)
+ {
+ input_pixfmt = PIX_FMT_BGR24;
+ }
+ else if (m_image->red_mask == 0xFF && m_image->green_mask
+ == 0xFF00 && m_image->blue_mask == 0xFF0000)
+ {
+ input_pixfmt = PIX_FMT_RGB24;
+ }
+ else
+ {
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): rgb ordering at image depth not supported ... aborting\n");
+ PErrorText("xtoffmpeg.XImageToFFMPEG(): color masks");
+ return false;
+ }
+ break;
+ case 32:
+ if (c_info.alpha_mask == 0xFF000000 && m_image->green_mask
+ == 0xFF00)
+ {
+ input_pixfmt = PIX_FMT_RGBA32;
+ }
+ else
+ {
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): image depth not supported ... aborting");
+ return false;
+ }
+ break;
+ default:
+ PErrorText(
+ "xtoffmpeg.XImageToFFMPEG(): image depth not supported ... aborting");
+ return false;
+ }
+
+ }
+
+ if (got_picture)
+ {
+ if (!isPaintPic)
+ {
+ if (m_display && m_image && m_win)
+ {
+
+ avpicture_fill(&pic_rgb, (uint8_t*) m_image->data,
+ input_pixfmt, m_width, m_height);
+
+ if (img_convert(&pic_rgb, input_pixfmt,
+ (AVPicture*) m_pOutFrame, PIX_FMT_YUV420P,
+ m_width, m_height) < 0)
+ {
+ PErrorText("Error pixel format conversion");
+ m_imagemutex.Unlock();
+ return -3;
+ }
+
+ XPutImage(m_display, m_win, m_gc, m_image, 0, 0, 0, 0,
+ m_width, m_height);
+
+ }
+ }
+ isPaintPic = true;
+
+ }
+ realsize -= len;
+ left += len;
+ }
+ }
+
+ m_imagemutex.Unlock();
+ return 0;
+}
+
+void CVDecoder::CloseCodec()
+{
+ m_bInit = false;
+
+ if (m_pOutFrame)
+ {
+ free( m_pOutFrame);
+ m_pOutFrame = 0;
+ }
+ if (m_pCodecCtx)
+ {
+ if (m_pCodecCtx->extradata)
+ {
+ free(m_pCodecCtx->extradata);
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+ }
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+
+}
+
+bool CVDecoder::ResetCodec(const int width, const int height)
+{
+ CodecID nCodecID = m_pCodecCtx->codec_id;
+ CloseCodec();
+ m_bInit = false;
+
+ m_width = width;
+ m_height = height;
+
+ // find the video decoder
+ m_pCodec = avcodec_find_decoder(nCodecID);
+ if (!m_pCodec)
+ {
+ PErrorText("Codec not found");
+ return false;
+ }
+
+ if (m_pOutFrame)
+ {
+ free( m_pOutFrame);
+ m_pOutFrame = 0;
+ }
+ m_pOutFrame = avcodec_alloc_frame();
+
+ if (m_pCodecCtx)
+ {
+ if (m_pCodecCtx->extradata)
+ {
+ free(m_pCodecCtx->extradata);
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+ }
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+ m_pCodecCtx = avcodec_alloc_context();
+ m_pCodecCtx->extradata = 0;
+ m_pCodecCtx->extradata_size = 0;
+
+ // put sample parameters
+ m_pCodecCtx->bit_rate = 512 * 1024;
+ // resolution must be a multiple of two
+ m_pCodecCtx->width = m_width;
+ m_pCodecCtx->height = m_height;
+ // frames per second
+ m_pCodecCtx->frame_rate = V_CODEC_framerate;
+ m_pCodecCtx->frame_rate_base = V_CODEC_frame_rate_base;
+ // emit one intra frame every ten frames
+ m_pCodecCtx->gop_size = V_CODEC_gop_size;
+
+ m_pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+ m_pCodecCtx->extradata = malloc(V_De_ExtraHeaderSize);
+ if (m_pCodecCtx->extradata == 0)
+ {
+ return false;
+ }
+ m_pCodecCtx->extradata_size = V_De_ExtraHeaderSize;
+
+ // we dont send complete frames
+ if (m_pCodec->capabilities & CODEC_CAP_TRUNCATED)
+ m_pCodecCtx->flags |= CODEC_FLAG_TRUNCATED;
+
+ // open it
+ if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+ {
+ PErrorText("could not open codec");
+ return false;
+ }
+
+ m_bInit = true;
+ return true;
+
+}
+
+//CVideoReceiver class.
+
+CVideoReceiver::CVideoReceiver()
+{
+ m_bInit = false;
+}
+
+CVideoReceiver::~CVideoReceiver()
+{
+
+}
+
+bool CVideoReceiver::Init()
+{
+ if (m_bInit)
+ return m_bInit;
+
+ if (!m_vdecoder.Init(320, 240, CODEC_ID_MPEG4))
+ {
+ return false;
+ }
+
+ m_bInit = true;
+ return m_bInit;
+
+}
+
+bool CVideoReceiver::CreateXImage(Drawable parent, int x, int y, int width,
+ int height)
+{
+ bool ret;
+ ret = m_vdecoder.CreateXImage(parent, x, y, width, height);
+
+ return ret;
+}
+
+void CVideoReceiver::CloseXImage()
+{
+ m_vdecoder.CloseXImage();
+}
+
+#define MAX_PACKET_SIZE 10*1024
+int CVideoReceiver::Start(int nHostPort)
+{
+ if (!m_bInit)
+ return -1;
+
+ if (IsActive())
+ return 0;
+
+ //init rtpsession.
+ RTPSessionParams sessParams1;
+ sessParams1.SetOwnTimestampUnit(1.0 / 30.0); //30 video frames per second
+ sessParams1.SetUsePollThread(1); //background thread to call virtual callbacks - set by default, but just to be sure
+ sessParams1.SetMaximumPacketSize(MAX_PACKET_SIZE);
+ //setup transmission parameters
+ RTPUDPv4TransmissionParams transParams1;
+ transParams1.SetPortbase(nHostPort);
+ //CREATE THE SESSION
+ int status1 = Create(sessParams1, &transParams1);
+ if (status1)
+ {
+ return -2; //unable to create the session
+ }
+
+ return 0;
+}
+
+void CVideoReceiver::Stop()
+{
+ Destroy();
+}
+
+void CVideoReceiver::ProcessFrame(unsigned char* framedata, int framelen)
+{
+ pVideo = (VideoHeader*) framedata;
+ if (pVideo->width != m_vdecoder.m_width || pVideo->height
+ != m_vdecoder.m_height)
+ {
+ m_vdecoder.ResetCodec(pVideo->width, pVideo->height);
+ }
+ m_vdecoder.DecodeProcess(framedata + sizeof(VideoHeader), framelen
+ - sizeof(VideoHeader));
+}
+
+//CV4LVEncoder class.
+
+CV4LVEncoder::CV4LVEncoder()
+{
+ m_pFrameBuf = 0;
+ m_pFrame = 0;
+ m_pCodec = 0;
+ m_pCodecCtx = 0;
+ m_bInit = false;
+
+}
+
+CV4LVEncoder::~CV4LVEncoder()
+{
+ if (m_pFrameBuf)
+ {
+ free( m_pFrameBuf);
+ m_pFrameBuf = 0;
+ }
+ if (m_pFrame)
+ {
+ free( m_pFrame);
+ m_pFrame = 0;
+ }
+ if (m_pCodecCtx)
+ {
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+ m_bInit = false;
+
+}
+
+bool CV4LVEncoder::Init(int width, int height, enum CodecID nCodecID /*=CODEC_ID_MPEG4*/)
+{
+ m_bInit = false;
+ /*Init for encode*/
+ avcodec_init();
+ avcodec_register_all();
+ printf("1 good end\n");
+ if (m_pFrame)
+ {
+ free( m_pFrame);
+ m_pFrame = 0;
+ }
+ m_pFrame = avcodec_alloc_frame();
+ if (m_pFrame == 0)
+ {
+ return false;
+ }
+
+ /* find the video encoder */
+ m_pCodec = avcodec_find_encoder(nCodecID);
+ if (!m_pCodec)
+ {
+ PErrorText("codec not found");
+
+ printf("\ncodec not found\n");
+ return false;
+ }
+
+ if (m_pCodecCtx)
+ {
+ avcodec_close( m_pCodecCtx);
+ m_pCodecCtx = 0;
+ }
+ printf("avcodec_find_encoder OK %d\n", nCodecID);
+
+ m_pCodecCtx = avcodec_alloc_context();
+ /* resolution must be a multiple of two */
+ m_pCodecCtx->width = width;
+ m_pCodecCtx->height = height;
+ /* frames per second */
+ m_pCodecCtx->frame_rate = V_CODEC_framerate;
+ m_pCodecCtx->frame_rate_base = V_CODEC_frame_rate_base;
+
+ m_pCodecCtx->bit_rate = 512 * 1024; //1128kbps->512kbps
+ m_pCodecCtx->gop_size = V_CODEC_gop_size; /* emit one intra frame every ten frames */
+ m_pCodecCtx->codec_type = CODEC_TYPE_VIDEO;
+ m_pCodecCtx->pix_fmt = PIX_FMT_YUV420P;
+
+ /* open it */
+ printf("avcodec_open start\n");
+
+ if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+ {
+ PErrorText("could not open codec");
+ return false;
+ }
+ printf("avcodec_open OK\n");
+
+ //malloc outbuf.
+ if (m_pFrameBuf)
+ {
+ free( m_pFrameBuf);
+ m_pFrameBuf = 0;
+ }
+
+ int image_size = avpicture_get_size(PIX_FMT_YUV420P, m_pCodecCtx->width,
+ m_pCodecCtx->height);
+
+ m_pFrameBuf = (uint8_t*) malloc(image_size);
+ if (m_pFrameBuf == 0)
+ {
+ PErrorText("FrameBuf malloc failed!");
+ return false;
+ }
+ /*Init for encode*/
+
+ avpicture_fill((AVPicture*) m_pFrame, m_pFrameBuf, PIX_FMT_YUV420P,
+ m_pCodecCtx->width, m_pCodecCtx->height);
+ m_pFrame->type = FF_BUFFER_TYPE_SHARED;
+
+ m_bInit = true;
+ return true;
+}
+
+int CV4LVEncoder::EncodeProcess(char *frameaddress, uint8_t *pOutBuf,
+ int nOutsize)
+{
+
+ if (!m_bInit)
+ return -1;
+
+ if (nOutsize < V_En_OutBufSize)
+ return -2;
+
+ avpicture_fill(&m_pic_rgb, (uint8_t *) frameaddress, PIX_FMT_BGR24,
+ m_pCodecCtx->width, m_pCodecCtx->height);
+ avpicture_fill((AVPicture*) m_pFrame, m_pFrameBuf, PIX_FMT_YUV420P,
+ m_pCodecCtx->width, m_pCodecCtx->height);
+
+ if (img_convert((AVPicture*) m_pFrame, PIX_FMT_YUV420P, &m_pic_rgb,
+ PIX_FMT_BGR24, m_pCodecCtx->width, m_pCodecCtx->height) < 0)
+ {
+ fprintf(
+ stderr,
+ "xtoffmpeg.XImageToFFMPEG(): pixel format conversion not handled ... aborting\n");
+ return -3;
+ }
+
+ m_VideoHeader.width = m_pCodecCtx->width;
+ m_VideoHeader.height = m_pCodecCtx->height;
+ memcpy((char *) pOutBuf, &m_VideoHeader, sizeof(VideoHeader));
+
+ int iii = avcodec_encode_video(m_pCodecCtx, pOutBuf + sizeof(VideoHeader),
+ nOutsize, m_pFrame);
+
+ if (iii <= 0)
+ return -4;
+
+ return iii + sizeof(VideoHeader);
+}
+
+bool CV4LVEncoder::GetCapSize(int &width, int &height)
+{
+ if (!m_bInit)
+ return false;
+
+ width = m_pCodecCtx->width;
+ height = m_pCodecCtx->height;
+
+ return true;
+}
+
+//CV4LStuVideoSender class.
+
+CV4LStuVideoSender::CV4LStuVideoSender()
+{
+ stop = false;
+ m_bInit = 0;
+
+ m_pOutBuf = 0;
+ m_mode = ModeNone;
+ m_pV4Ldev = 0;
+
+ if (!m_initmutex.IsInitialized())
+ {
+ m_initmutex.Init();
+ }
+
+ m_display = 0;
+ m_win = 0;
+ m_d = 0;
+ m_image = 0;
+}
+
+CV4LStuVideoSender::~CV4LStuVideoSender()
+{
+ //first stop thread, because m_pOutBuf is being used by Thread();
+ Stop();
+
+ //free buffer.
+ if (m_pOutBuf != 0)
+ {
+ free( m_pOutBuf);
+ m_pOutBuf = 0;
+ }
+
+ // for image
+ if (m_image)
+ {
+ XDestroyImage( m_image);
+ m_image = 0;
+ }
+
+ if (m_display)
+ {
+ if (m_win)
+ {
+ XUnmapWindow(m_display, m_win);
+ XClearWindow(m_display, m_win);
+ m_win = 0;
+ }
+
+ if (m_d)
+ {
+ XClearWindow(m_display, m_d);
+ m_d = 0;
+ }
+
+ XCloseDisplay( m_display);
+ m_display = 0;
+ }
+
+ CloseVideo();
+}
+
+bool CV4LStuVideoSender::Init(int nHostPort)
+{
+ m_initmutex.Lock();
+ m_bInit = false;
+
+ if (!m_imagemutex.IsInitialized())
+ {
+ if (m_imagemutex.Init() < 0)
+ {
+ m_initmutex.Unlock();
+ return false;
+ }
+ }
+
+ if (!OpenVideo())
+ {
+ m_initmutex.Unlock();
+ return false;
+ }
+
+ //init rtpsession.
+ RTPSessionParams sessParams1;
+ sessParams1.SetOwnTimestampUnit(1.0 / 30.0); //30 video frames per second
+ sessParams1.SetUsePollThread(0); //background thread to call virtual callbacks - set by default, but just to be sure
+ sessParams1.SetMaximumPacketSize(MAX_PACKET_SIZE);
+ //setup transmission parameters
+ RTPUDPv4TransmissionParams transParams1;
+ transParams1.SetPortbase(nHostPort);
+ //CREATE THE SESSION
+ int status1 = m_fecrtpsession.Create(sessParams1, &transParams1);
+ if (status1)
+ {
+ return false; //unable to create the session
+ }
+
+ //must set for fec SendFECPacket.
+ m_fecrtpsession.SetDefaultMark(true);
+ m_fecrtpsession.SetDefaultPayloadType(1);
+ m_fecrtpsession.SetDefaultTimestampIncrement(0);
+
+ if (m_pOutBuf == 0)
+ {
+ m_pOutBuf = (uint8_t*) malloc(CV4LVEncoder::V_En_OutBufSize);
+ if (m_pOutBuf == 0)
+ {
+ m_initmutex.Unlock();
+ return false;
+ }
+
+ }
+ //Init sencoder.
+ if (!m_vencoder.Init(VideoWidth, VideoHeight, CODEC_ID_MPEG4))
+ {
+ m_initmutex.Unlock();
+ return false;
+ }
+
+ if (!m_modemutex.IsInitialized())
+ {
+ if (m_modemutex.Init() < 0)
+ {
+ m_initmutex.Unlock();
+ return false;
+ }
+
+ }
+
+ m_bInit = true;
+ m_initmutex.Unlock();
+ return true;
+}
+
+bool CV4LStuVideoSender::OpenVideo()
+{
+ CloseVideo();
+
+ m_pV4Ldev = new V4L();
+
+ if (m_pV4Ldev == 0)
+ return false;
+
+ m_pV4Ldev->setInfo("/dev/video0", "PAL", 0);
+
+ if (!m_pV4Ldev->openDevice())
+ {
+ return false;
+ }
+
+ if (!m_pV4Ldev->initialiseCapture(VIDEO_PALETTE_RGB24))
+ {
+ return false;
+ }
+
+ return true;
+}
+
+void CV4LStuVideoSender::CloseVideo()
+{
+ if (m_pV4Ldev != 0)
+ {
+ delete m_pV4Ldev;
+ m_pV4Ldev = 0;
+ }
+}
+
+bool CV4LStuVideoSender::IsInitialized()
+{
+ bool b;
+
+ m_initmutex.Lock();
+ b = m_bInit;
+ m_initmutex.Unlock();
+
+ return b;
+}
+
+bool CV4LStuVideoSender::CreateXImage(Drawable parent, int x, int y, int width,
+ int height)
+{
+
+ int screen_num;
+ XSetWindowAttributes win_attr;
+ GdkPixbuf *original_pixbuf;
+ //GdkPixbuf *scaled_pixbuf;
+ gint original_width, original_height;
+ GdkColorspace original_color;
+ gboolean original_alpha;
+ gboolean pixbuf_has_alpha;
+
+ XImage *p_image = NULL;
+ if (!m_bInit)
+ return false;
+
+ CloseXImage();
+
+ m_imagemutex.Lock();
+
+ m_display = XOpenDisplay(NULL);
+ screen_num = DefaultScreen(m_display);
+ m_gc = DefaultGC(m_display, screen_num);
+ m_d = RootWindow(m_display, screen_num);
+
+ m_win = XCreateWindow(m_display, parent, x, y, width, height, 1,
+ XDefaultDepth(m_display, screen_num), InputOutput, CopyFromParent,
+ 0, &win_attr);
+
+ if (gdk_pixbuf_new_from_file("pic/teacher2.bmp", NULL) == NULL)
+ {
+ XSetWindowBackgroundPixmap(m_display, m_win, ParentRelative);
+
+ XMapWindow(m_display, m_win);
+ }
+
+ else
+ {
+ original_pixbuf = gdk_pixbuf_new_from_file("pic/teacher2.bmp", NULL);
+ pixbuf_has_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+ original_color = gdk_pixbuf_get_colorspace(original_pixbuf);
+ original_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+ original_width = gdk_pixbuf_get_width(original_pixbuf);
+ original_height = gdk_pixbuf_get_height(original_pixbuf);
+ printf("original_alpha = %d\n", original_alpha);
+ printf("original_color = %d\n", original_color);
+ printf("original_width = %d\n", original_width);
+ printf("original_height = %d\n", original_height);
+ printf("n_channles = %d\n", gdk_pixbuf_get_n_channels(original_pixbuf));
+
+ Pixmap pixmap = XCreatePixmap(m_display, m_win, original_width,
+ original_height, XDefaultDepth(m_display, screen_num));
+ XSetWindowBackgroundPixmap(m_display, m_win, pixmap);
+
+ p_image = XGetImage(m_display, m_d, 0, 0, original_width, original_height, AllPlanes, ZPixmap);
+ if (!p_image)
+ {
+ printf("error\n");
+ exit(10);
+ }
+
+ AVPicture pic_rgb24, pic_rgb32;
+ if (m_display && p_image && pixmap)
+ {
+ avpicture_fill(&pic_rgb32, (uint8_t*) p_image->data,
+ PIX_FMT_RGBA32, original_width, original_height);
+ avpicture_fill(&pic_rgb24, gdk_pixbuf_get_pixels(original_pixbuf),
+ PIX_FMT_RGB24, original_width, original_height);
+
+ if (img_convert(&pic_rgb32, PIX_FMT_RGBA32, &pic_rgb24,
+ PIX_FMT_RGB24, original_width, original_height) < 0)
+ {
+ printf("Error pixel format conversion");
+ return -1;
+ }
+
+ XPutImage(m_display, pixmap, m_gc, p_image, 0, 0, 0, 0,
+ original_width, original_height);
+
+ }
+
+ XMapWindow(m_display, m_win);
+ XFreePixmap(m_display, pixmap);
+ gdk_pixbuf_unref(original_pixbuf);
+ XDestroyImage(p_image);
+ }
+ int imagewidth, imageheight;
+ m_vencoder.GetCapSize(imagewidth, imageheight);
+
+ m_image = XGetImage(m_display, m_d, 0, 0, imagewidth, imageheight,
+ AllPlanes, ZPixmap);
+ if (!m_image)
+ {
+ PErrorText("GetImage error");
+ m_imagemutex.Unlock();
+ return false;
+ }
+
+ m_imagemutex.Unlock();
+ return true;
+
+}
+
+void CV4LStuVideoSender::CloseXImage()
+{
+
+ if (!IsInitialized())
+ return;
+
+ m_imagemutex.Lock();
+
+ if (m_image)
+ {
+ XDestroyImage( m_image);
+ m_image = 0;
+ }
+
+ if (m_display)
+ {
+ if (m_win)
+ {
+ XUnmapWindow(m_display, m_win);
+ XClearWindow(m_display, m_win);
+ m_win = 0;
+ }
+
+ if (m_d)
+ {
+ XClearWindow(m_display, m_d);
+ m_d = 0;
+ }
+
+ XCloseDisplay( m_display);
+ m_display = 0;
+ }
+
+ m_imagemutex.Unlock();
+}
+
+void CV4LStuVideoSender::Playback(uint8_t* videodata)
+{
+ AVPicture pic_rgb24, pic_rgb32;
+ int width, height;
+
+ if (!IsInitialized())
+ return;
+
+ m_vencoder.GetCapSize(width, height);
+
+ m_imagemutex.Lock();
+
+ if (m_display && m_image && m_win)
+ {
+
+ avpicture_fill(&pic_rgb32, (uint8_t*) m_image->data,
+ CVDecoder::input_pixfmt, width, height);
+ avpicture_fill(&pic_rgb24, videodata, PIX_FMT_BGR24, width, height);
+
+ if (img_convert(&pic_rgb32, CVDecoder::input_pixfmt, &pic_rgb24,
+ PIX_FMT_BGR24, width, height) < 0)
+ {
+ PErrorText("Error pixel format conversion");
+ m_imagemutex.Unlock();
+ return;
+ }
+
+ XPutImage(m_display, m_win, m_gc, m_image, 0, 0, 0, 0, width, height);
+ }
+
+ m_imagemutex.Unlock();
+ return;
+}
+
+int CV4LStuVideoSender::Start()
+{
+
+ if (!IsInitialized())
+ return -1;
+
+ if (JThread::IsRunning())
+ return 0;
+
+ if (!stopmutex.IsInitialized())
+ {
+ if (stopmutex.Init() < 0)
+ return -2;
+ }
+
+ stop = false;
+
+ if (JThread::Start() < 0)
+ {
+ return -6;
+ }
+
+ return 0;
+}
+
+void CV4LStuVideoSender::Stop()
+{
+ if (!IsRunning())
+ return;
+
+ stopmutex.Lock();
+ stop = true;
+ stopmutex.Unlock();
+
+ int count = 0;
+ while (1)
+ {
+ if (count >= 100)
+ {
+ if (JThread::IsRunning())
+ {
+ JThread::Kill();
+ }
+ break;
+ }
+
+ if (JThread::IsRunning())
+ {
+ count++;
+ usleep(10000);
+ continue;
+ }
+
+ break;
+ }
+
+ stop = false;
+
+}
+
+void CV4LStuVideoSender::SetMode(V4LSTUVIDEO_SEND_MODE mode)
+{
+
+ if (!IsInitialized())
+ return;
+
+ if (mode == ModeCapture || mode == ModeTransmit || mode == ModeNone)
+ {
+
+ m_modemutex.Lock();
+ m_mode = mode;
+ m_modemutex.Unlock();
+ }
+
+}
+
+void *CV4LStuVideoSender::Thread()
+{
+ char * data;
+ int OutBufSzie;
+ int status;
+
+ JThread::ThreadStarted();
+
+ bool stopthread;
+ V4LSTUVIDEO_SEND_MODE mode;
+
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+
+ int64_t pre_time, cur_time;
+ useconds_t delay, delay1;
+ pre_time = av_gettime();
+ if (m_vencoder.m_pCodecCtx->frame_rate != 0)
+ {
+ if (m_vencoder.m_pCodecCtx->frame_rate_base != 0)
+ delay1 = 1000000 * m_vencoder.m_pCodecCtx->frame_rate_base
+ / m_vencoder.m_pCodecCtx->frame_rate;
+ else
+ delay1 = 1000000 / m_vencoder.m_pCodecCtx->frame_rate;
+ }
+ else
+ {
+ delay1 = 62500;
+ }
+
+ // for utility rate of CPU
+ cur_time = pre_time = av_gettime();
+ pre_time -= delay1;
+
+ while (!stopthread)
+ {
+ delay = cur_time - pre_time;
+ if (delay < delay1)
+ {
+ usleep(delay1 - delay);
+ }
+ pre_time = av_gettime();
+
+ m_modemutex.Lock();
+ mode = m_mode;
+ m_modemutex.Unlock();
+
+ if (mode == ModeNone)
+ {
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+
+ cur_time = av_gettime();
+ continue;
+ }
+
+ if ((status = Capture((uint8_t**) &data)) < 0)
+ {
+ printf("\nvideo: capture vital error.");
+ stopthread = true;
+ continue;
+ }
+ else
+ {
+ if (mode == ModeCapture || mode == ModeTransmit)
+ {
+ Playback((uint8_t*) data);
+ if (mode == ModeCapture)
+ {
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+
+ cur_time = av_gettime();
+ continue;
+ }
+ }
+
+ OutBufSzie = CV4LVEncoder::V_En_OutBufSize;
+ if ((status = m_vencoder.EncodeProcess(data, m_pOutBuf, OutBufSzie))
+ < 0)
+ {
+ printf("\nvideo: encode vital error.");
+ stopthread = true;
+ continue;
+ }
+ else
+ {
+ if (status > 0 && mode == ModeTransmit)
+ {
+ m_fecrtpsession.SendFECPacket(m_pOutBuf, status, 10000);
+ }
+ }
+ }
+
+ cur_time = av_gettime();
+ }
+
+ stopmutex.Lock();
+ stopthread = stop;
+ stopmutex.Unlock();
+
+ printf("\nvideo capture thread stoped.\n");
+ return 0;
+}
+
+bool CV4LStuVideoSender::AddDestination(const RTPIPv4Address &des)
+{
+
+ if (!IsInitialized())
+ return false;
+
+ if (m_fecrtpsession.AddDestination(des) < 0)
+ return false;
+
+ return true;
+}
+
+void CV4LStuVideoSender::ClearDestinations()
+{
+
+ if (!IsInitialized())
+ return;
+
+ m_fecrtpsession.ClearDestinations();
+}
+
+int CV4LStuVideoSender::Capture(uint8_t** data)
+{
+ *data = (uint8_t*) m_pV4Ldev->getNextFrame();
+ return 0;
+}