incubator-bluesky-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From p...@apache.org
Subject svn commit: r823467 [3/6] - in /incubator/bluesky/trunk/RealClass/Teacher: ./ src/ src/pic/
Date Fri, 09 Oct 2009 08:58:44 GMT
Added: incubator/bluesky/trunk/RealClass/Teacher/src/en_de_screen.h
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Teacher/src/en_de_screen.h?rev=823467&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Teacher/src/en_de_screen.h (added)
+++ incubator/bluesky/trunk/RealClass/Teacher/src/en_de_screen.h Fri Oct  9 08:58:41 2009
@@ -0,0 +1,218 @@
+//en_de_srceen.h
+
+#include "fecrtpsession.h"
+#include <iostream>
+#include <stdlib.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/time.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/mman.h>
+#include <linux/types.h>
+#include <linux/videodev.h>
+// FFmpeg
+#include <ffmpeg/avcodec.h>
+#include <ffmpeg/avformat.h>
+//Time
+#include <time.h>
+// X11
+#include <X11/Intrinsic.h>
+#include <X11/XWDFile.h>
+// Jthread and JMutex
+#include <jthread.h>
+#include <jmutex.h>
+
+#include "en_de_sendthread.h"
+
+#define max(x,y) (x > y ? x : y)
+
+#if !defined(_EN_DE_SCREEN_H__INCLUDED_)
+#define _EN_DE_SCREEN_H__INCLUDED_
+
+typedef void (*ScreenPlayback)(uint8_t* screendata, int width, int height);
+typedef struct ScreenHeader
+{
+	int width;
+	int height;
+} ScreenHeader;
+
+typedef struct
+{
+	unsigned long red_shift;
+	unsigned long green_shift;
+	unsigned long blue_shift;
+	unsigned long alpha_shift;
+	unsigned long max_val;
+	unsigned long bit_depth;
+	unsigned long red_max_val;
+	unsigned long green_max_val;
+	unsigned long blue_max_val;
+	unsigned long alpha_max_val;
+	unsigned long red_bit_depth;
+	unsigned long green_bit_depth;
+	unsigned long blue_bit_depth;
+	unsigned long alpha_bit_depth;
+	u_int32_t alpha_mask;
+} ColorInfo;
+
+//Screen encoder.
+class CSEncoder
+{
+	friend class CSDecoder;
+	friend class CScreenSender;
+	friend class CWScreenRecorder;
+private:
+	enum SENCODERBUFSIZE
+	{
+		S_En_OutBufSize = 400 * 1024
+	};
+	enum SCREEN_CODEC_PARA
+	{
+		S_CODEC_width = 800,
+		S_CODEC_height = 600,
+		S_CODEC_framerate = 3,
+		S_CODEC_frame_rate_base = 1,
+		S_CODEC_gop_size = 4,
+		S_CODEC_max_b_frames
+	};
+public:
+	bool GetScreenSize(int &width, int &height);
+	bool Capture(XImage **image);
+	int EncodeProcess(XImage *image, uint8_t *pOutBuf, int nOutsize);
+	bool Init(enum CodecID nCodecID = CODEC_ID_MPEG4);
+	CSEncoder();
+	virtual ~CSEncoder();
+	void GetColorInfo(XImage *image, ColorInfo *ci /* return struct */);
+
+private:
+	void paintMousePointer(int *x, int *y, XImage *image);
+	void getCurrentPointer(int *x, int *y);
+	bool InitScreen(int width, int height);
+
+	// for screen .
+private:
+	XImage *m_image;
+	Display *m_display;
+	int m_screen_num;
+	Drawable m_d;
+	unsigned int m_width, m_height;
+	bool m_bInitScreen;
+	ColorInfo c_info;
+	int input_pixfmt;
+	ScreenHeader m_ScreenHeader;
+	// for encode.
+private:
+	bool m_bInit;
+	AVPicture m_pic_rgb;
+	uint8_t *m_pFrameBuf;
+	AVFrame *m_pFrame;
+	AVCodec *m_pCodec;
+	AVCodecContext *m_pCodecCtx;
+};
+
+//Screen decoder.
+class CSDecoder
+{
+	friend class CScreenReceiver;
+private:
+	enum VDECODERBUFSIZE
+	{
+		S_De_ExtraHeaderSize = 10000, S_De_INBUF_SIZE = 1024
+	};
+public:
+	CSDecoder();
+	virtual ~CSDecoder();
+public:
+	int DecodeProcess(uint8_t *encodeddata_v, const int encodeddatasize_v);
+	bool Init(int width, int height, enum CodecID nCodecID);
+
+public:
+	bool CreateXImage(Drawable parent, int x, int y, int width, int height);
+	void CloseXImage();
+
+private:
+	void CloseCodec();
+	bool ResetCodec(const int width, const int height);
+	Drawable m_parent;
+	Display *m_display;
+	GC m_gc;
+	Window m_d;
+
+	Window m_win;
+	XImage *m_image;
+	JMutex m_imagemutex;
+	CSEncoder m_encoder;
+	ColorInfo c_info;
+	int input_pixfmt;
+
+private:
+	bool m_bInit;
+	AVCodec *m_pCodec;
+	AVCodecContext *m_pCodecCtx;
+	AVFrame *m_pOutFrame;
+	AVPicture pic_rgb;
+	int m_width;
+	int m_height;
+};
+
+//screen sender.
+class CScreenSender: private JThread
+{
+private:
+	enum SCREENSENDBUFSIZE
+	{
+		SendBufSize = 2
+	};
+public:
+	CScreenSender();
+	~CScreenSender();
+	bool Init(int nPort);
+	bool AddDestination(const RTPIPv4Address &des);
+	void ClearDestinations();
+	int Start(char* szFile = 0, bool bIsRecord = false);
+	void Stop();
+	void Record(bool bInRecord = true);
+private:
+	void *Thread();
+	bool stop;
+	JMutex stopmutex;
+private:
+	bool m_bIsRecord;
+	JMutex m_recordmutex;
+
+	CSEncoder m_sencoder;
+	uint8_t *m_pOutBuf;
+	FILE* m_hFile;
+	CSendThread m_sendthread;
+	int m_bInit;
+};
+
+//screen receiver
+
+class CScreenReceiver: public CFECRtpSession
+{
+public:
+	CScreenReceiver();
+	virtual ~CScreenReceiver();
+
+	bool Init();
+	int Start(int nPort);
+	void Stop();
+
+public:
+	bool CreateXImage(Drawable parent, int x, int y, int width, int height);
+	void CloseXImage();
+
+private:
+	virtual void ProcessFrame(unsigned char* framedata, int framelen);
+
+private:
+	bool m_ScreenInit;
+	bool m_bInit;
+	CSDecoder m_sdecoder;
+public:
+	ScreenHeader* pScreen;
+};
+
+#endif // !defined(_EN_DE_SCREEN_H__INCLUDED_)

Added: incubator/bluesky/trunk/RealClass/Teacher/src/en_de_sendthread.cpp
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Teacher/src/en_de_sendthread.cpp?rev=823467&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Teacher/src/en_de_sendthread.cpp (added)
+++ incubator/bluesky/trunk/RealClass/Teacher/src/en_de_sendthread.cpp Fri Oct  9 08:58:41 2009
@@ -0,0 +1,230 @@
+#include "en_de_sendthread.h"
+
+extern void PErrorText(const char* error);
+//CSendThread class.
+CSendThread::CSendThread(int nSendBufSize)
+{
+	stop = false;
+	m_bInit = 0;
+
+	if (nSendBufSize <= 2)
+	{
+		m_nSendBufSize = 2;
+	}
+	else if (nSendBufSize >= 25)
+	{
+		m_nSendBufSize = 25;
+	}
+	else
+		m_nSendBufSize = nSendBufSize;
+
+}
+
+CSendThread::~CSendThread()
+{
+	Stop();
+
+}
+
+#define MAX_PACKET_SIZE 10*1024
+bool CSendThread::Init(int nPort)
+{
+	if (m_bInit)
+		return true;
+
+	//init rtpsession.
+	RTPSessionParams sessParams1;
+	sessParams1.SetOwnTimestampUnit(1.0 / 30.0); //30 video frames per second
+	sessParams1.SetUsePollThread(0); //background thread to call virtual callbacks - set by default, but just to be sure
+	sessParams1.SetMaximumPacketSize(MAX_PACKET_SIZE);
+	//setup transmission parameters
+	RTPUDPv4TransmissionParams transParams1;
+	transParams1.SetPortbase(nPort);
+	//CREATE THE SESSION
+	int status1 = m_fecrtpsession.Create(sessParams1, &transParams1);
+	if (status1)
+	{
+		return false; //unable to create the session
+	}
+
+	//must set for fec SendFECPacket. 
+	m_fecrtpsession.SetDefaultMark(true);
+	m_fecrtpsession.SetDefaultPayloadType(1);
+	m_fecrtpsession.SetDefaultTimestampIncrement(0);
+
+	m_bInit = true;
+	return m_bInit;
+}
+
+int CSendThread::Start()
+{
+	if (!m_bInit)
+		return -1;
+
+	if (JThread::IsRunning())
+		return 0;
+
+	if (!stopmutex.IsInitialized())
+	{
+		if (stopmutex.Init() < 0)
+			return -2;
+	}
+
+	stop = false;
+
+	if (!m_senddatamutex.IsInitialized())
+	{
+		if (m_senddatamutex.Init() < 0)
+			return -2;
+	}
+
+	if (JThread::Start() < 0)
+	{
+		return -6;
+	}
+
+	return 0;
+}
+
+void CSendThread::Stop()
+{
+	if (!IsRunning())
+		return;
+
+	stopmutex.Lock();
+	stop = true;
+	stopmutex.Unlock();
+
+	//wait for two minute;
+	sleep(1);
+	if (JThread::IsRunning())
+	{
+		JThread::Kill();
+	}
+	stop = false;
+
+}
+
+int CSendThread::SendData(uint8_t *data, int datalen)
+{
+	SENDBUFDATA *pData;
+	if (!m_bInit)
+		return -1;
+
+	pData = new SENDBUFDATA;
+	if (pData == 0)
+	{
+		PErrorText("Error: CSendThread::SendData new SENDBUFDATA");
+		return -2;
+	}
+
+	pData->data = new uint8_t[datalen];
+	if (pData->data == 0)
+	{
+		delete pData;
+		PErrorText("Error: CSendThread::SendData new uint8_t");
+		return -3;
+	}
+
+	memcpy(pData->data, data, datalen);
+	pData->datalen = datalen;
+
+	m_senddatamutex.Lock();
+	m_senddata.insert(m_senddata.end(), pData);
+	m_senddatamutex.Unlock();
+
+	return 0;
+}
+
+void *CSendThread::Thread()
+{
+	JThread::ThreadStarted();
+
+	SENDBUFDATA *senddata = new SENDBUFDATA[m_nSendBufSize];
+	bool stopthread;
+
+	stopmutex.Lock();
+	stopthread = stop;
+	stopmutex.Unlock();
+
+	int size;
+	SENDBUFDATA* p;
+	SendDatalist::iterator itera;
+	int index;
+	while (!stopthread)
+	{
+
+		m_senddatamutex.Lock();
+		size = m_senddata.size();
+		if (size == 0)
+		{
+			m_senddatamutex.Unlock();
+
+			usleep(50000);
+
+			stopmutex.Lock();
+			stopthread = stop;
+			stopmutex.Unlock();
+			continue;
+		}
+		if (size > m_nSendBufSize)
+		{
+			for (itera = m_senddata.begin(); itera != m_senddata.end(); itera++)
+			{
+				p = *itera;
+				delete p->data;
+				delete p;
+				p = 0;
+			}
+			m_senddata.clear();
+			size = 0;
+		}
+		else
+		{
+			for (itera = m_senddata.begin(), index = 0; itera
+					!= m_senddata.end(); itera++, index++)
+			{
+				p = *itera;
+				senddata[index].data = p->data;
+				senddata[index].datalen = p->datalen;
+				delete p;
+				p = 0;
+			}
+			m_senddata.clear();
+			size = index;
+		}
+
+		m_senddatamutex.Unlock();
+
+		for (index = 0; index < size; index++)
+		{
+			m_fecrtpsession.SendFECPacket(senddata[index].data,
+					senddata[index].datalen, 5000);
+			delete senddata[index].data;
+		}
+
+		stopmutex.Lock();
+		stopthread = stop;
+		stopmutex.Unlock();
+
+	}
+	printf("\nSendthread stoped.\n");
+	return 0;
+}
+
+bool CSendThread::AddDestination(const RTPIPv4Address &des)
+{
+	if (!m_bInit)
+		return false;
+	if (m_fecrtpsession.AddDestination(des) < 0)
+		return false;
+
+	return true;
+}
+
+void CSendThread::ClearDestinations()
+{
+	if (!m_bInit)
+		return;
+	m_fecrtpsession.ClearDestinations();
+}

Added: incubator/bluesky/trunk/RealClass/Teacher/src/en_de_sendthread.h
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Teacher/src/en_de_sendthread.h?rev=823467&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Teacher/src/en_de_sendthread.h (added)
+++ incubator/bluesky/trunk/RealClass/Teacher/src/en_de_sendthread.h Fri Oct  9 08:58:41 2009
@@ -0,0 +1,61 @@
+#include "fecrtpsession.h"
+// Linux sys.
+#include <iostream>
+#include <stdlib.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/time.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/mman.h>
+#include <linux/types.h>
+#include <linux/videodev.h>
+// FFmpeg
+#include <ffmpeg/avcodec.h>
+#include <ffmpeg/avformat.h>
+
+#include <time.h>
+
+// X11
+#include <X11/Intrinsic.h>
+#include <X11/XWDFile.h>
+// Jthread and JMutex
+#include <jthread.h>
+#include <jmutex.h>
+
+#if !defined(_EN_DE_SENDTHREAD_H__INCLUDED_)
+#define _EN_DE_SENDTHREAD_H__INCLUDED_
+
+struct SENDBUFDATA
+{
+	uint8_t* data;
+	int datalen;
+};
+typedef std::list<SENDBUFDATA*> SendDatalist;
+
+//CSendThread class.
+class CSendThread: private JThread
+{
+public:
+	CSendThread(int nSendBufSize);
+	~CSendThread();
+	bool Init(int nPort);
+	bool AddDestination(const RTPIPv4Address &des);
+	void ClearDestinations();
+	int SendData(uint8_t *data, int datalen);
+	int Start();
+	void Stop();
+private:
+	void *Thread();
+	bool stop;
+	JMutex stopmutex;
+private:
+	CFECRtpSession m_fecrtpsession;
+	int m_bInit;
+
+	int m_nSendBufSize;
+	SendDatalist m_senddata;
+	JMutex m_senddatamutex;
+};
+
+#endif // !defined(_EN_DE_SENDTHREAD_H__INCLUDED_)

Added: incubator/bluesky/trunk/RealClass/Teacher/src/en_de_video.cpp
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Teacher/src/en_de_video.cpp?rev=823467&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Teacher/src/en_de_video.cpp (added)
+++ incubator/bluesky/trunk/RealClass/Teacher/src/en_de_video.cpp Fri Oct  9 08:58:41 2009
@@ -0,0 +1,2279 @@
+static char *primary_version = "";
+static char *video_version_ = "1.0";
+
+//V4l classes.
+#include <deque>
+#include <unistd.h>
+#include <string>
+#include <sstream>
+#include <iostream>
+#include <iomanip>
+
+#include <pthread.h>
+
+#include <gtk/gtk.h>
+#include <gdk-pixbuf/gdk-pixbuf.h>
+#include "en_de_video.h"
+
+extern int v4l_dev_num;
+extern int delay_time;
+
+extern "C"
+{
+#include <sys/types.h>
+#include <sys/soundcard.h>
+#include <sys/time.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+}
+
+static int ENCODE_YUV = 0;
+
+/** Send a request to the v4l device associated to a V4LStruct object.
+ */
+
+bool V4LDevice::request(int req, V4LStruct *v4l)
+{
+	ENCODE_YUV = getenv("KINO_V4L_PATCHED") != NULL;
+	return request(req, v4l->getStruct());
+}
+
+/** Send a request to the v4l device associated to an arbitrary address.
+ */
+
+bool V4LDevice::request(int req, void *addr)
+{
+	return ioctl(getHandle(), req, addr) != -1;
+}
+
+V4LCapability::V4LCapability(V4LDevice *device)
+{
+	device->request(VIDIOCGCAP, this);
+}
+
+V4LCapability::~V4LCapability()
+{
+}
+
+void *V4LCapability::getStruct()
+{
+	return &capability;
+}
+
+char *V4LCapability::getName()
+{
+	return capability.name;
+}
+
+int V4LCapability::getNumberOfChannels()
+{
+	return capability.channels;
+}
+
+int V4LCapability::getNumberOfAudioDevices()
+{
+	return capability.audios;
+}
+
+int V4LCapability::getMinWidth()
+{
+	return capability.minwidth;
+}
+
+int V4LCapability::getMinHeight()
+{
+	return capability.minheight;
+}
+
+int V4LCapability::getMaxWidth()
+{
+	return capability.maxwidth;
+}
+
+int V4LCapability::getMaxHeight()
+{
+	return capability.maxheight;
+}
+
+bool V4LCapability::canCapture()
+{
+	return capability.type & VID_TYPE_CAPTURE;
+}
+
+bool V4LCapability::hasTuner()
+{
+	return capability.type & VID_TYPE_TUNER;
+}
+
+bool V4LCapability::hasChromakey()
+{
+	return capability.type & VID_TYPE_CHROMAKEY;
+}
+
+bool V4LCapability::hasClipping()
+{
+	return capability.type & VID_TYPE_CLIPPING;
+}
+
+bool V4LCapability::hasOverwrite()
+{
+	return capability.type & VID_TYPE_FRAMERAM;
+}
+
+bool V4LCapability::hasScaling()
+{
+	return capability.type & VID_TYPE_SCALES;
+}
+
+bool V4LCapability::isMonochrome()
+{
+	return capability.type & VID_TYPE_MONOCHROME;
+}
+
+bool V4LCapability::canSubCapture()
+{
+	return capability.type & VID_TYPE_SUBCAPTURE;
+}
+
+void V4LCapability::report()
+{
+
+}
+
+V4LTuner::V4LTuner(V4LDevice *device, int index)
+{
+	this->device = device;
+	this->tuner.tuner = index;
+	this->device->request(VIDIOCGTUNER, this);
+}
+
+void *V4LTuner::getStruct()
+{
+	return &tuner;
+}
+
+void V4LTuner::report()
+{
+}
+
+int V4LTuner::getRangeLow()
+{
+	return tuner.rangelow;
+}
+
+void V4LTuner::setRangeLow(int low)
+{
+	tuner.rangelow = low;
+}
+
+int V4LTuner::getRangeHigh()
+{
+	return tuner.rangehigh;
+}
+
+void V4LTuner::setRangeHigh(int high)
+{
+	tuner.rangehigh = high;
+}
+
+int V4LTuner::getFlags()
+{
+	return tuner.flags;
+}
+
+void V4LTuner::setFlags(int flags)
+{
+	tuner.flags = flags;
+}
+
+int V4LTuner::getMode()
+{
+	return tuner.mode;
+}
+
+void V4LTuner::setMode(int mode)
+{
+	tuner.mode = mode;
+}
+
+int V4LTuner::getSignal()
+{
+	return tuner.signal;
+}
+
+V4LChannel::V4LChannel(V4LDevice *device, int index)
+{
+	memset(&channel, 0, sizeof(struct video_channel));
+	this->device = device;
+	this->channel.channel = index;
+	device->request(VIDIOCGCHAN, this);
+	device->request(VIDIOCSCHAN, this);
+	for (unsigned int i = 0; i < getNumberOfTuners(); i++)
+	{
+		V4LTuner *tuner = new V4LTuner(this->device, i);
+		tuners.insert(tuners.end(), tuner);
+	}
+}
+
+V4LChannel::~V4LChannel()
+{
+}
+
+void *V4LChannel::getStruct()
+{
+	return &channel;
+}
+
+char *V4LChannel::getName()
+{
+	return channel.name;
+}
+
+bool V4LChannel::setTuner(unsigned int index)
+{
+	if (index >= 0 && index < tuners.size())
+	{
+		current = tuners[index];
+		current->setRangeLow(0);
+		current->setRangeHigh(0xffff);
+		return device->request(VIDIOCSTUNER, current);
+	}
+	else
+	{
+		return false;
+	}
+}
+
+unsigned int V4LChannel::getNumberOfTuners()
+{
+	return channel.tuners;
+}
+
+V4LTuner *V4LChannel::getTuner(unsigned int index)
+{
+	if (index >= 0 && index < tuners.size())
+	{
+		return tuners[index];
+	}
+	else
+	{
+		return NULL;
+	}
+}
+
+int V4LChannel::getSignal()
+{
+	device->request(VIDIOCGTUNER, current);
+	return current->getSignal();
+}
+
+void V4LChannel::report()
+{
+}
+
+V4L::V4L()
+{
+}
+
+void V4L::setInfo(char *device, char *input, char *audio, int sample)
+{
+	this->device = device;
+	this->input = input;
+	this->audio = audio;
+	this->sample = sample;
+}
+
+bool V4L::openDevice()
+{
+	bool ret = true;
+
+	if (!strcmp(this->input, "PAL"))
+	{
+		this->width = VideoWidth;
+		this->height = VideoHeight;
+		this->fps = 25;
+		this->frameSample = this->sample / this->fps;
+	}
+	else if (!strcmp(this->input, "NTSC"))
+	{
+		this->width = 720;
+		this->height = 480;
+		this->fps = 30;
+		this->frameSample = this->sample / this->fps;
+	}
+
+	this->current = NULL;
+	this->fd = open(device, O_RDWR);
+	if (fd == -1)
+	{
+		perror("Unable to open video device");
+		return false;
+	}
+
+	{
+		video_capability capability;
+		if (ioctl(fd, VIDIOCGCAP, &capability) != -1)
+		{
+			printf(" query was successful\n");
+		}
+		else
+		{
+			perror("query");
+		}
+
+		if ((capability.type & VID_TYPE_CAPTURE) != 0)
+		{
+			printf("this device can capture video to memory\n");
+		}
+		else
+		{
+			perror("can't capture video to memtory");
+		}
+
+		struct video_channel queryChannel;
+		int i = 0;
+		while (i < capability.channels)
+		{
+			queryChannel.channel = i;
+			if (ioctl(fd, VIDIOCGCHAN, &queryChannel) != -1)
+			{ // ioctl success, queryChannel contains information about this channel
+				printf(" information about this channel:%d, %s\n",
+						queryChannel.channel, queryChannel.name);
+			}
+			else
+			{ // PErrorText("querychannel");
+			}
+			++i;
+		}
+
+		struct video_channel selectedChannel;
+		selectedChannel.channel = 0;
+		selectedChannel.norm = VIDEO_MODE_PAL;
+		if (ioctl(fd, VIDIOCSCHAN, &selectedChannel) == -1)
+		{
+			return false;
+		}
+
+		struct video_window captureWindow;
+		memset(&captureWindow, 0, sizeof(captureWindow));
+		captureWindow.width = VideoWidth;
+		captureWindow.height = VideoHeight;
+		if (ioctl(fd, VIDIOCSWIN, &captureWindow) == -1)
+		{
+			//			return false;
+		}
+
+		memset(&captureWindow, 0, sizeof(captureWindow));
+		if (ioctl(fd, VIDIOCGWIN, &captureWindow) == -1)
+		{
+			return false;
+		}
+
+		width = captureWindow.width;
+		height = captureWindow.height;
+		printf("width=%d,height=%d\n", width, height);
+
+	}
+
+	int depth;
+	int palette;
+	struct video_picture imageProperties;
+
+	if (ioctl(fd, VIDIOCGPICT, &imageProperties) == -1)
+	{ // failed to retrieve default image properties
+		return false;
+	}
+	depth = imageProperties.depth;
+	palette = imageProperties.palette;
+	printf("\nVideo:depth=%d,palette=%d\n", depth, palette);
+	if (ioctl(fd, VIDIOCSPICT, &imageProperties) == -1)
+	{ // failed to retrieve default image properties
+		return false;
+	}
+
+	return ret;
+}
+
+/** Destructor for the V4L device.
+ */
+
+V4L::~V4L()
+{
+	if (fd != -1)
+	{
+		close( fd);
+		for (unsigned int index = 0; index < channels.size(); index++)
+			delete channels[index];
+		delete this->capability;
+	}
+}
+
+bool V4L::deviceAvailable()
+{
+	return fd != -1;
+}
+
+int V4L::getHandle()
+{
+	return fd;
+}
+
+bool V4L::setChannel(unsigned int channel)
+{
+	if (channel >= 0 && channel < channels.size())
+	{
+		current = channels[channel];
+		return this->request(VIDIOCSCHAN, current);
+	}
+	else
+	{
+		return false;
+	}
+}
+
+unsigned int V4L::getNumberOfChannels()
+{
+	return channels.size();
+}
+
+V4LChannel *V4L::getChannel(unsigned int channel)
+{
+	if (channel >= 0 && channel < channels.size())
+		return channels[channel];
+	else
+		return NULL;
+}
+
+bool V4L::setTuner(unsigned int tuner)
+{
+	if (current != NULL)
+		return current->setTuner(tuner);
+	else
+		return false;
+}
+
+unsigned int V4L::getNumberOfTuners()
+{
+	if (current != NULL)
+		return current->getNumberOfTuners();
+	else
+		return 0;
+}
+
+V4LTuner *V4L::getTuner(unsigned int tuner)
+{
+	if (current != NULL)
+		return current->getTuner(tuner);
+	else
+		return NULL;
+}
+
+bool V4L::setCaptureResolution(int width, int height)
+{
+	if (width > capability->getMaxWidth() || width < capability->getMinWidth())
+		return false;
+	if (height > capability->getMaxHeight() || height
+			< capability->getMinHeight())
+		return false;
+	if (!capability->hasScaling() && (width != capability->getMaxWidth()
+			|| height != capability->getMaxHeight()))
+		return false;
+	this->width = width;
+	this->height = height;
+	return true;
+}
+int V4L::getWidth()
+{
+	return width;
+}
+
+int V4L::getHeight()
+{
+	return height;
+}
+
+void V4L::startAudio()
+{
+	struct video_audio audio;
+	ioctl(fd, VIDIOCGAUDIO, &audio);
+	if (audio.flags & VIDEO_AUDIO_MUTE)
+		audio.flags ^= VIDEO_AUDIO_MUTE;
+	audio.volume = 65535;
+	ioctl(fd, VIDIOCSAUDIO, &audio);
+}
+
+void V4L::stopAudio()
+{
+	struct video_audio audio;
+	ioctl(fd, VIDIOCGAUDIO, &audio);
+	audio.flags |= VIDEO_AUDIO_MUTE;
+	audio.volume = 0;
+	ioctl(fd, VIDIOCSAUDIO, &audio);
+}
+
+int V4L::mappedMemorySize(bool init)
+{
+	static video_mbuf buf;
+	if (init == true)
+	{
+		init = 1;
+		ioctl(fd, VIDIOCGMBUF, &buf);
+		frame_maps = buf.frames;
+	}
+	return buf.size;
+}
+
+bool V4L::initialiseCapture(int format)
+{
+	size = width * height * 4;
+
+	map = mmap(0, mappedMemorySize(true), PROT_READ | PROT_WRITE, MAP_SHARED,
+			fd, 0);
+
+	if (map != NULL)
+	{
+		for (int i = 0; i < frame_maps; i++)
+		{
+			frame[i].frame = i;
+			frame[i].width = getWidth();
+			frame[i].height = getHeight();
+			frame[i].format = format;
+		}
+
+		struct timeval tv;
+		gettimeofday(&tv, NULL);
+		starttime = tv.tv_sec * 1000000 + tv.tv_usec;
+		frames = 0;
+		frame_next = 0;
+
+		int retry = 0;
+		while (ioctl(fd, VIDIOCMCAPTURE, &frame[0]) == -1 && retry++
+				< frame_maps + 1)
+			;
+
+		return true;
+	}
+	else
+	{
+		return false;
+	}
+}
+
+void *V4L::getNextFrame()
+{
+	unsigned char * ret = NULL;
+
+	int current = frame_next;
+	frame_next = (frame_next + 1) % frame_maps;
+
+	if (ioctl(fd, VIDIOCMCAPTURE, &frame[frame_next]) == -1)
+		if (ioctl(fd, VIDIOCSYNC, &frame[current].frame) == -1)
+			ret = (unsigned char *) map + current * (mappedMemorySize()
+					/ frame_maps);
+
+	frames++;
+
+	return (void *) ret;
+}
+
+void V4L::stopCapture()
+{
+	if (map != NULL)
+	{
+		struct timeval tv;
+		gettimeofday(&tv, NULL);
+		long long endtime = tv.tv_sec * 1000000 + tv.tv_usec;
+		double fps = (frames) / (((double) (endtime - starttime)) / 1000000);
+		munmap(map, mappedMemorySize());
+		map = NULL;
+		int enable = 0;
+		ioctl(getHandle(), VIDIOCCAPTURE, &enable);
+	}
+}
+
+int V4L::getFrequency()
+{
+	unsigned long current;
+	ioctl(fd, VIDIOCGFREQ, &current);
+	return (int) current;
+}
+
+bool V4L::setFrequency(int frequency)
+{
+	unsigned long val = (unsigned long) frequency & 0xffff;
+	return ioctl(fd, VIDIOCSFREQ, &val) != -1;
+}
+
+int V4L::getSignal()
+{
+	return current->getSignal();
+}
+
+void V4L::report()
+{
+	capability->report();
+
+	for (unsigned int index = 0; index < channels.size(); index++)
+	{
+		channels[index] ->report();
+	}
+}
+
+extern void PErrorText(const char* error);
+
+int CVDecoder::input_pixfmt = PIX_FMT_RGBA32;
+
+#define STARTTIME { static int MaxTime=0;                         \
+	                static int MinTime=1000;                      \
+	                static int largecount=0;                      \
+	                int IntervalTime=0;                           \
+	                int64_t start_time=av_gettime();
+
+#define ENDTIME     IntervalTime = av_gettime()-start_time;       \
+	                if(IntervalTime>40000)                         \
+	                    largecount++;                             \
+	                if(MaxTime<IntervalTime)                      \
+	                    MaxTime=IntervalTime;                     \
+	                if(MinTime>IntervalTime)                      \
+	                    MinTime=IntervalTime;                     \
+	                printf("\nMaxTime=%d, MinTime=%d, largecount=%d, IntervalTime=%d\n", MaxTime, MinTime, largecount, IntervalTime);}
+
+//CVDecoder class.
+
+CVDecoder::CVDecoder()
+{
+	m_pCodec = 0;
+	m_pCodecCtx = 0;
+	m_pOutFrame = 0;
+	m_bInit = false;
+
+	m_display = 0;
+	m_win = 0;
+	m_image = 0;
+
+}
+
+CVDecoder::~CVDecoder()
+{
+	m_bInit = false;
+
+	if (m_pOutFrame)
+	{
+		free( m_pOutFrame);
+		m_pOutFrame = 0;
+	}
+	if (m_pCodecCtx)
+	{
+		if (m_pCodecCtx->extradata)
+		{
+			free(m_pCodecCtx->extradata);
+			m_pCodecCtx->extradata = 0;
+			m_pCodecCtx->extradata_size = 0;
+		}
+		avcodec_close( m_pCodecCtx);
+		m_pCodecCtx = 0;
+	}
+
+	if (m_image)
+	{
+		XDestroyImage( m_image);
+		m_image = 0;
+	}
+
+	if (m_display)
+	{
+		if (m_win)
+		{
+			XClearWindow(m_display, m_win);
+			m_win = 0;
+		}
+		if (m_d)
+		{
+			XClearWindow(m_display, m_d);
+			m_d = 0;
+		}
+
+		XCloseDisplay( m_display);
+		m_display = 0;
+	}
+
+}
+void CVDecoder::GetColorInfo(XImage *image, ColorInfo *ci /* return struct */)
+{
+	unsigned long red_mask, green_mask, blue_mask, alpha_mask;
+	// the shifts are unsigned longs as well
+
+	if (!ci)
+		return;
+
+	// setting shifts and bit_depths to zero
+	ci->red_shift = ci->green_shift = ci->blue_shift = ci->alpha_shift = 0;
+	ci->red_bit_depth = ci->green_bit_depth = ci->blue_bit_depth
+			= ci->alpha_bit_depth = 0;
+
+	red_mask = image->red_mask;
+	if (red_mask > 0)
+	{
+		// shift red_mask to the right till all empty bits have been
+		// shifted out and count how many they were
+		while ((red_mask & 0x01) == 0)
+		{
+			red_mask >>= 1;
+			ci->red_shift++;
+		}
+		// count how many bits are set in the mask = depth
+		while ((red_mask & 0x01) == 1)
+		{
+			red_mask >>= 1;
+			ci->red_bit_depth++;
+		}
+	}
+
+	ci->red_max_val = (1 << ci->red_bit_depth) - 1;
+
+	green_mask = image->green_mask;
+	if (green_mask > 0)
+	{
+		while ((green_mask & 0x01) == 0)
+		{
+			green_mask >>= 1;
+			ci->green_shift++;
+		}
+		while ((green_mask & 0x01) == 1)
+		{
+			green_mask >>= 1;
+			ci->green_bit_depth++;
+		}
+	}
+	ci->green_max_val = (1 << ci->green_bit_depth) - 1;
+
+	blue_mask = image->blue_mask;
+	if (blue_mask > 0)
+	{
+		while ((blue_mask & 0x01) == 0)
+		{
+			blue_mask >>= 1;
+			ci->blue_shift++;
+		}
+		while ((blue_mask & 0x01) == 1)
+		{
+			blue_mask >>= 1;
+			ci->blue_bit_depth++;
+		}
+	}
+	ci->blue_max_val = (1 << ci->blue_bit_depth) - 1;
+
+	ci->max_val = max(ci->red_max_val, ci->green_max_val);
+	ci->max_val = max(ci->blue_max_val, ci->max_val);
+	ci->bit_depth = max(ci->red_bit_depth, ci->green_bit_depth);
+	ci->bit_depth = max(ci->blue_bit_depth, ci->bit_depth);
+	if (image->bits_per_pixel > image->depth)
+	{
+		ci->alpha_mask = ~(image->red_mask | image->blue_mask
+				| image->green_mask);
+		alpha_mask = ci->alpha_mask;
+		if (alpha_mask > 0)
+		{
+			while ((alpha_mask & 0x01) == 0)
+			{
+				alpha_mask >>= 1;
+				ci->alpha_shift++;
+			}
+			while ((alpha_mask & 0x01) == 1)
+			{
+				alpha_mask >>= 1;
+				ci->alpha_bit_depth++;
+			}
+		}
+		ci->alpha_max_val = (1 << ci->alpha_bit_depth) - 1;
+	}
+}
+
+bool CVDecoder::CreateXImage(Drawable parent, int x, int y, int width,
+		int height)
+{
+	int screen_num;
+	GdkPixbuf *original_pixbuf;
+	gint original_width, original_height;
+	GdkColorspace original_color;
+	gboolean original_alpha;
+	gboolean pixbuf_has_alpha;
+	Pixmap pixmap;
+	XImage *p_image = NULL;
+	if (!m_bInit)
+		return false;
+
+	CloseXImage();
+
+	m_imagemutex.Lock();
+
+	m_display = XOpenDisplay(NULL);
+	screen_num = DefaultScreen(m_display);
+	m_gc = DefaultGC(m_display, screen_num);
+	m_d = RootWindow(m_display, screen_num);
+
+	m_win = XCreateWindow(m_display, parent, x, y, width, height, 1,
+			XDefaultDepth(m_display, screen_num), InputOutput, CopyFromParent,
+			0, &win_attr);
+
+	if (gdk_pixbuf_new_from_file("pic/student.bmp", NULL) == NULL)
+	{
+		XSetWindowBackgroundPixmap(m_display, m_win, ParentRelative);
+		XMapWindow(m_display, m_win);
+	}
+	else
+	{
+		original_pixbuf = gdk_pixbuf_new_from_file("pic/student.bmp", NULL);
+		pixbuf_has_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+		original_color = gdk_pixbuf_get_colorspace(original_pixbuf);
+		original_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+		original_width = gdk_pixbuf_get_width(original_pixbuf);
+		original_height = gdk_pixbuf_get_height(original_pixbuf);
+		printf("original_alpha = %d\n", original_alpha);
+		printf("original_color = %d\n", original_color);
+		printf("original_width = %d\n", original_width);
+		printf("original_height = %d\n", original_height);
+		printf("n_channles = %d\n", gdk_pixbuf_get_n_channels(original_pixbuf));
+
+		pixmap = XCreatePixmap(m_display, m_win, original_width,
+				original_height, XDefaultDepth(m_display, screen_num));
+		XSetWindowBackgroundPixmap(m_display, m_win, pixmap);
+
+		p_image = XGetImage(m_display, m_d, 0, 0, original_width,
+				original_height, AllPlanes, ZPixmap);
+		if (!p_image)
+		{
+			printf("error\n");
+			exit(10);
+		}
+
+		AVPicture pic_rgb24, pic_rgb32;
+		if (m_display && p_image && pixmap)
+		{
+			avpicture_fill(&pic_rgb32, (uint8_t*) p_image->data,
+					PIX_FMT_RGBA32, original_width, original_height);
+			avpicture_fill(&pic_rgb24, gdk_pixbuf_get_pixels(original_pixbuf),
+					PIX_FMT_RGB24, original_width, original_height);
+
+			if (img_convert(&pic_rgb32, PIX_FMT_RGBA32, &pic_rgb24,
+					PIX_FMT_RGB24, original_width, original_height) < 0)
+			{
+				printf("Error pixel format conversion");
+				return -1;
+			}
+
+			XPutImage(m_display, pixmap, m_gc, p_image, 0, 0, 0, 0,
+					original_width, original_height);
+
+		}
+
+		XMapWindow(m_display, m_win);
+		XFreePixmap(m_display, pixmap);
+		gdk_pixbuf_unref(original_pixbuf);
+		XDestroyImage(p_image);
+	}
+
+	m_image = XGetImage(m_display, m_d, 0, 0, m_width, m_height, AllPlanes,
+			ZPixmap);
+
+	if (!m_image)
+	{
+		printf("XGetImage error\n");
+		m_imagemutex.Unlock();
+		return false;
+	}
+
+	m_imagemutex.Unlock();
+	return true;
+}
+
+void CVDecoder::CloseXImage()
+{
+	if (!m_bInit)
+		return;
+
+	m_imagemutex.Lock();
+
+	if (m_image)
+	{
+		XDestroyImage( m_image);
+		m_image = 0;
+	}
+
+	if (m_display)
+	{
+		if (m_win)
+		{
+			XUnmapWindow(m_display, m_win);
+			XClearWindow(m_display, m_win);
+			m_win = 0;
+		}
+
+		if (m_d)
+		{
+			XClearWindow(m_display, m_d);
+			m_d = 0;
+		}
+
+		XCloseDisplay( m_display);
+		m_display = 0;
+	}
+
+	m_imagemutex.Unlock();
+	return;
+}
+
+bool CVDecoder::Init(int width, int height, enum CodecID nCodecID)
+{
+	if (m_bInit)
+		return true;
+
+	avcodec_init();
+	avcodec_register_all();
+
+	if (!m_imagemutex.IsInitialized())
+	{
+		if (m_imagemutex.Init() < 0)
+			return false;
+	}
+
+	m_width = width;
+	m_height = height;
+
+	m_pCodec = avcodec_find_decoder(CODEC_ID_MPEG4);
+	if (!m_pCodec)
+	{
+		PErrorText("Codec not found\n");
+		return false;
+	}
+
+	if (m_pOutFrame)
+	{
+		free( m_pOutFrame);
+		m_pOutFrame = 0;
+	}
+	m_pOutFrame = avcodec_alloc_frame();
+
+	if (m_pCodecCtx)
+	{
+		if (m_pCodecCtx->extradata)
+		{
+			free(m_pCodecCtx->extradata);
+			m_pCodecCtx->extradata = 0;
+			m_pCodecCtx->extradata_size = 0;
+		}
+		avcodec_close( m_pCodecCtx);
+		m_pCodecCtx = 0;
+	}
+	m_pCodecCtx = avcodec_alloc_context();
+	m_pCodecCtx->extradata = 0;
+	m_pCodecCtx->extradata_size = 0;
+
+	// put sample parameters
+	m_pCodecCtx->bit_rate = 512 * 1024;
+	// resolution must be a multiple of two 
+	m_pCodecCtx->width = m_width;
+	m_pCodecCtx->height = m_height;
+	// frames per second
+	m_pCodecCtx->frame_rate = V_CODEC_framerate;
+	m_pCodecCtx->frame_rate_base = V_CODEC_frame_rate_base;
+	// emit one intra frame every ten frames
+	m_pCodecCtx->gop_size = V_CODEC_gop_size;
+
+	m_pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+	m_pCodecCtx->extradata = malloc(V_De_ExtraHeaderSize);
+	if (m_pCodecCtx->extradata == 0)
+	{
+		return false;
+	}
+	m_pCodecCtx->extradata_size = V_De_ExtraHeaderSize;
+
+	if (m_pCodec->capabilities & CODEC_CAP_TRUNCATED)
+		m_pCodecCtx->flags |= CODEC_FLAG_TRUNCATED;
+
+	// open it
+	if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+	{
+		PErrorText("could not open codec");
+		return false;
+	}
+
+	m_bInit = true;
+	return true;
+}
+
+int CVDecoder::DecodeProcess(uint8_t *encodeddata_v,
+		const int encodeddatasize_v)
+{
+	bool isPaintPic;
+	int realsize, i;
+	int got_picture;
+
+	if (!m_bInit)
+	{
+		return -1;
+	}
+
+	int left = 0;
+	int len;
+
+	m_imagemutex.Lock();
+
+	for (i = 0;; i++)
+	{
+		if (encodeddatasize_v - i * V_De_INBUF_SIZE >= V_De_INBUF_SIZE)
+			realsize = V_De_INBUF_SIZE;
+		else
+			realsize = encodeddatasize_v - i * V_De_INBUF_SIZE;
+
+		if (realsize <= 0)
+			break;
+
+		left = 0;
+		isPaintPic = false;
+		while (realsize > 0)
+		{
+
+			m_pCodecCtx->width = m_width;
+			m_pCodecCtx->height = m_height;
+
+			len = avcodec_decode_video(m_pCodecCtx, m_pOutFrame, &got_picture,
+					(encodeddata_v + i * V_De_INBUF_SIZE + left), realsize);
+			if (len < 0)
+			{
+				PErrorText("Error while decoding");
+				m_imagemutex.Unlock();
+				return -2;
+			}
+
+			if (m_image != 0)
+			{
+
+				GetColorInfo(m_image, &c_info);
+				switch (m_image->bits_per_pixel)
+				{
+				case 8:
+					input_pixfmt = PIX_FMT_PAL8;
+					break;
+				case 16:
+					if (m_image->red_mask == 0xF800 && m_image->green_mask
+							== 0x07E0 && m_image->blue_mask == 0x1F)
+					{
+						input_pixfmt = PIX_FMT_RGB565;
+					}
+					else if (m_image->red_mask == 0x7C00 && m_image->green_mask
+							== 0x03E0 && m_image->blue_mask == 0x1F)
+					{
+						input_pixfmt = PIX_FMT_RGB555;
+					}
+					else
+					{
+						fprintf(
+								stderr,
+								"xtoffmpeg.XImageToFFMPEG(): rgb ordering at image depth %i not supported ... aborting\n",
+								m_image->bits_per_pixel);
+						fprintf(
+								stderr,
+								"xtoffmpeg.XImageToFFMPEG(): color masks: r 0x%.6lX g 0x%.6lX b 0x%.6lX\n",
+								m_image->red_mask, m_image->green_mask,
+								m_image->blue_mask);
+					}
+					break;
+				case 24:
+					if (m_image->red_mask == 0xFF0000 && m_image->green_mask
+							== 0xFF00 && m_image->blue_mask == 0xFF)
+					{
+						input_pixfmt = PIX_FMT_BGR24;
+					}
+					else if (m_image->red_mask == 0xFF && m_image->green_mask
+							== 0xFF00 && m_image->blue_mask == 0xFF0000)
+					{
+						input_pixfmt = PIX_FMT_RGB24;
+					}
+					else
+					{
+						PErrorText(
+								"xtoffmpeg.XImageToFFMPEG(): rgb ordering at image depth not supported ... aborting\n");
+						PErrorText("xtoffmpeg.XImageToFFMPEG(): color masks");
+						return false;
+					}
+					break;
+				case 32:
+					if (c_info.alpha_mask == 0xFF000000 && m_image->green_mask
+							== 0xFF00)
+					{
+
+						input_pixfmt = PIX_FMT_RGBA32;
+					}
+					else
+					{
+						PErrorText(
+								"xtoffmpeg.XImageToFFMPEG(): image depth not supported ... aborting");
+						return false;
+					}
+					break;
+				default:
+					PErrorText(
+							"xtoffmpeg.XImageToFFMPEG(): image depth not supported ... aborting");
+					return false;
+				}
+			}
+
+			if (got_picture)
+			{
+				if (!isPaintPic)
+				{
+					if (m_display && m_image && m_win)
+					{
+
+						avpicture_fill(&pic_rgb, (uint8_t*) m_image->data,
+								input_pixfmt, m_width, m_height);
+
+						if (img_convert(&pic_rgb, input_pixfmt,
+								(AVPicture*) m_pOutFrame, PIX_FMT_YUV420P,
+								m_width, m_height) < 0)
+						{
+							PErrorText("Error pixel format conversion");
+							m_imagemutex.Unlock();
+							return -3;
+						}
+
+						XPutImage(m_display, m_win, m_gc, m_image, 0, 0, 0, 0,
+								m_width, m_height);
+
+					}
+				}
+				isPaintPic = true;
+
+			}
+			realsize -= len;
+			left += len;
+		}
+	}
+
+	m_imagemutex.Unlock();
+	return 0;
+}
+
+void CVDecoder::CloseCodec()
+{
+	m_bInit = false;
+
+	if (m_pOutFrame)
+	{
+		free( m_pOutFrame);
+		m_pOutFrame = 0;
+	}
+	if (m_pCodecCtx)
+	{
+		if (m_pCodecCtx->extradata)
+		{
+			free(m_pCodecCtx->extradata);
+			m_pCodecCtx->extradata = 0;
+			m_pCodecCtx->extradata_size = 0;
+		}
+		avcodec_close( m_pCodecCtx);
+		m_pCodecCtx = 0;
+	}
+
+}
+
+bool CVDecoder::ResetCodec(const int width, const int height)
+{
+	CodecID nCodecID = m_pCodecCtx->codec_id;
+	CloseCodec();
+	m_bInit = false;
+
+	m_width = width;
+	m_height = height;
+
+	// find the video decoder
+	m_pCodec = avcodec_find_decoder(nCodecID);
+	if (!m_pCodec)
+	{
+		PErrorText("Codec not found");
+		return false;
+	}
+
+	if (m_pOutFrame)
+	{
+		free( m_pOutFrame);
+		m_pOutFrame = 0;
+	}
+	m_pOutFrame = avcodec_alloc_frame();
+
+	if (m_pCodecCtx)
+	{
+		if (m_pCodecCtx->extradata)
+		{
+			free(m_pCodecCtx->extradata);
+			m_pCodecCtx->extradata = 0;
+			m_pCodecCtx->extradata_size = 0;
+		}
+		avcodec_close( m_pCodecCtx);
+		m_pCodecCtx = 0;
+	}
+	m_pCodecCtx = avcodec_alloc_context();
+	m_pCodecCtx->extradata = 0;
+	m_pCodecCtx->extradata_size = 0;
+
+	// put sample parameters
+	m_pCodecCtx->bit_rate = 512 * 1024;
+	// resolution must be a multiple of two 
+	m_pCodecCtx->width = m_width;
+	m_pCodecCtx->height = m_height;
+	// frames per second
+	m_pCodecCtx->frame_rate = V_CODEC_framerate;
+	m_pCodecCtx->frame_rate_base = V_CODEC_frame_rate_base;
+	// emit one intra frame every ten frames
+	m_pCodecCtx->gop_size = V_CODEC_gop_size;
+
+	m_pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+	m_pCodecCtx->extradata = malloc(V_De_ExtraHeaderSize);
+	if (m_pCodecCtx->extradata == 0)
+	{
+		return false;
+	}
+	m_pCodecCtx->extradata_size = V_De_ExtraHeaderSize;
+
+	if (m_pCodec->capabilities & CODEC_CAP_TRUNCATED)
+		m_pCodecCtx->flags |= CODEC_FLAG_TRUNCATED;
+
+	// open it
+	if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+	{
+		PErrorText("could not open codec");
+		return false;
+	}
+
+	m_bInit = true;
+	return true;
+
+}
+
+#define MAX_PACKET_SIZE 10*1024
+//CVideoReceiver class.
+
+CVideoReceiver::CVideoReceiver()
+{
+	m_bInit = false;
+}
+
+CVideoReceiver::~CVideoReceiver()
+{
+
+}
+
+bool CVideoReceiver::Init()
+{
+	if (m_bInit)
+		return m_bInit;
+
+	if (!m_vdecoder.Init(320, 240, CODEC_ID_MPEG4))
+	{
+		return false;
+	}
+
+	m_bInit = true;
+	return m_bInit;
+
+}
+
+bool CVideoReceiver::CreateXImage(Drawable parent, int x, int y, int width,
+		int height)
+{
+	bool ret;
+	ret = m_vdecoder.CreateXImage(parent, x, y, width, height);
+
+	return ret;
+}
+
+void CVideoReceiver::CloseXImage()
+{
+	m_vdecoder.CloseXImage();
+}
+
+int CVideoReceiver::Start(int nHostPort)
+{
+	if (!m_bInit)
+		return -1;
+
+	if (IsActive())
+		return 0;
+
+	//init rtpsession.
+	RTPSessionParams sessParams1;
+	sessParams1.SetOwnTimestampUnit(1.0 / 30.0); //30 video frames per second
+	sessParams1.SetUsePollThread(1); //background thread to call virtual callbacks - set by default, but just to be sure
+	sessParams1.SetMaximumPacketSize(MAX_PACKET_SIZE);
+	//setup transmission parameters
+	RTPUDPv4TransmissionParams transParams1;
+	transParams1.SetPortbase(nHostPort);
+	//CREATE THE SESSION
+	int status1 = Create(sessParams1, &transParams1);
+	if (status1)
+	{
+		//		ReportError(status1);
+		return -2; //unable to create the session
+	}
+
+	return 0;
+}
+
+void CVideoReceiver::Stop()
+{
+	Destroy();
+}
+
+void CVideoReceiver::ProcessFrame(unsigned char* framedata, int framelen)
+{
+	pVideo = (VideoHeader*) framedata;
+	if (pVideo->width != m_vdecoder.m_width || pVideo->height
+			!= m_vdecoder.m_height)
+	{
+		m_vdecoder.ResetCodec(pVideo->width, pVideo->height);
+	}
+	m_vdecoder.DecodeProcess(framedata + sizeof(VideoHeader), framelen
+			- sizeof(VideoHeader));
+}
+
+//CV4LVEncoder class.
+
+CV4LVEncoder::CV4LVEncoder()
+{
+	m_pFrameBuf = 0;
+	m_pFrame = 0;
+	m_pCodec = 0;
+	m_pCodecCtx = 0;
+	m_bInit = false;
+}
+
+CV4LVEncoder::~CV4LVEncoder()
+{
+
+	if (m_pFrameBuf)
+	{
+		free( m_pFrameBuf);
+		m_pFrameBuf = 0;
+	}
+
+	if (m_pFrame)
+	{
+		free( m_pFrame);
+		m_pFrame = 0;
+	}
+	if (m_pCodecCtx)
+	{
+		avcodec_close( m_pCodecCtx);
+		m_pCodecCtx = 0;
+	}
+	m_bInit = false;
+
+}
+
+bool CV4LVEncoder::Init(int width, int height, enum CodecID nCodecID /*=CODEC_ID_MPEG4*/)
+{
+	m_bInit = false;
+	/*Init for encode*/
+	avcodec_init();
+	avcodec_register_all();
+
+	//new a frame object.
+	if (m_pFrame)
+	{
+		free( m_pFrame);
+		m_pFrame = 0;
+	}
+	m_pFrame = avcodec_alloc_frame();
+	if (m_pFrame == 0)
+	{
+		return false;
+	}
+
+	/* find the video encoder */
+	m_pCodec = avcodec_find_encoder(nCodecID);
+	if (!m_pCodec)
+	{
+		PErrorText("codec not found");
+
+		printf("\ncodec not found\n");
+		return false;
+	}
+
+	if (m_pCodecCtx)
+	{
+		avcodec_close( m_pCodecCtx);
+		m_pCodecCtx = 0;
+	}
+
+	m_pCodecCtx = avcodec_alloc_context();
+	/* resolution must be a multiple of two */
+	m_pCodecCtx->width = width;
+	m_pCodecCtx->height = height;
+	/* frames per second */
+	m_pCodecCtx->frame_rate = V_CODEC_framerate;
+	m_pCodecCtx->frame_rate_base = V_CODEC_frame_rate_base;
+
+	m_pCodecCtx->bit_rate = 512 * 1024; //1128kbps->512kbps
+	m_pCodecCtx->gop_size = V_CODEC_gop_size; /* emit one intra frame every ten frames */
+	m_pCodecCtx->codec_type = CODEC_TYPE_VIDEO;
+	m_pCodecCtx->pix_fmt = PIX_FMT_YUV420P;
+
+	/* open it */
+	if (avcodec_open(m_pCodecCtx, m_pCodec) < 0)
+	{
+		PErrorText("could not open codec");
+		return false;
+	}
+
+	//malloc outbuf.
+	if (m_pFrameBuf)
+	{
+		free( m_pFrameBuf);
+		m_pFrameBuf = 0;
+	}
+
+	int image_size = avpicture_get_size(PIX_FMT_YUV420P, m_pCodecCtx->width,
+			m_pCodecCtx->height);
+
+	m_pFrameBuf = (uint8_t*) malloc(image_size);
+	if (m_pFrameBuf == 0)
+	{
+		PErrorText("FrameBuf malloc failed!");
+		return false;
+	}
+	/*Init for encode*/
+
+	avpicture_fill((AVPicture*) m_pFrame, m_pFrameBuf, PIX_FMT_YUV420P,
+			m_pCodecCtx->width, m_pCodecCtx->height);
+	m_pFrame->type = FF_BUFFER_TYPE_SHARED;
+
+	m_bInit = true;
+	return true;
+}
+
+int CV4LVEncoder::EncodeProcess(char *frameaddress, uint8_t *pOutBuf,
+		int nOutsize)
+{
+
+	if (!m_bInit)
+		return -1;
+
+	if (nOutsize < V_En_OutBufSize)
+	{
+		return -2;
+	}
+
+	avpicture_fill(&m_pic_rgb, (uint8_t *) frameaddress, PIX_FMT_BGR24,
+			m_pCodecCtx->width, m_pCodecCtx->height);
+	avpicture_fill((AVPicture*) m_pFrame, m_pFrameBuf, PIX_FMT_YUV420P,
+			m_pCodecCtx->width, m_pCodecCtx->height);
+
+	if (img_convert((AVPicture*) m_pFrame, PIX_FMT_YUV420P, &m_pic_rgb,
+			PIX_FMT_BGR24, m_pCodecCtx->width, m_pCodecCtx->height) < 0)
+	{
+		fprintf(
+				stderr,
+				"xtoffmpeg.XImageToFFMPEG(): pixel format conversion not handled ... aborting\n");
+		return -3;
+	}
+
+	m_VideoHeader.width = m_pCodecCtx->width;
+	m_VideoHeader.height = m_pCodecCtx->height;
+	memcpy((char *) pOutBuf, &m_VideoHeader, sizeof(VideoHeader));
+
+	int iii = avcodec_encode_video(m_pCodecCtx, pOutBuf + sizeof(VideoHeader),
+			nOutsize, m_pFrame);
+
+	if (iii <= 0)
+		return -4;
+
+	return iii + sizeof(VideoHeader);
+}
+
+bool CV4LVEncoder::GetCapSize(int &width, int &height)
+{
+	if (!m_bInit)
+		return false;
+
+	width = m_pCodecCtx->width;
+	height = m_pCodecCtx->height;
+
+	return true;
+}
+
+void CV4LVEncoder::GetColorInfo(XImage *image, ColorInfo *ci /* return struct */)
+{
+	unsigned long red_mask, green_mask, blue_mask, alpha_mask;
+	// the shifts are unsigned longs as well
+
+	if (!ci)
+		return;
+
+	// setting shifts and bit_depths to zero
+	ci->red_shift = ci->green_shift = ci->blue_shift = ci->alpha_shift = 0;
+	ci->red_bit_depth = ci->green_bit_depth = ci->blue_bit_depth
+			= ci->alpha_bit_depth = 0;
+
+	red_mask = image->red_mask;
+	if (red_mask > 0)
+	{
+		// shift red_mask to the right till all empty bits have been
+		// shifted out and count how many they were
+		while ((red_mask & 0x01) == 0)
+		{
+			red_mask >>= 1;
+			ci->red_shift++;
+		}
+		// count how many bits are set in the mask = depth
+		while ((red_mask & 0x01) == 1)
+		{
+			red_mask >>= 1;
+			ci->red_bit_depth++;
+		}
+	}
+
+	ci->red_max_val = (1 << ci->red_bit_depth) - 1;
+
+	green_mask = image->green_mask;
+	if (green_mask > 0)
+	{
+		while ((green_mask & 0x01) == 0)
+		{
+			green_mask >>= 1;
+			ci->green_shift++;
+		}
+		while ((green_mask & 0x01) == 1)
+		{
+			green_mask >>= 1;
+			ci->green_bit_depth++;
+		}
+	}
+	ci->green_max_val = (1 << ci->green_bit_depth) - 1;
+
+	blue_mask = image->blue_mask;
+	if (blue_mask > 0)
+	{
+		while ((blue_mask & 0x01) == 0)
+		{
+			blue_mask >>= 1;
+			ci->blue_shift++;
+		}
+		while ((blue_mask & 0x01) == 1)
+		{
+			blue_mask >>= 1;
+			ci->blue_bit_depth++;
+		}
+	}
+	ci->blue_max_val = (1 << ci->blue_bit_depth) - 1;
+
+	ci->max_val = max(ci->red_max_val, ci->green_max_val);
+	ci->max_val = max(ci->blue_max_val, ci->max_val);
+	ci->bit_depth = max(ci->red_bit_depth, ci->green_bit_depth);
+	ci->bit_depth = max(ci->blue_bit_depth, ci->bit_depth);
+	if (image->bits_per_pixel > image->depth)
+	{
+
+		ci->alpha_mask = ~(image->red_mask | image->blue_mask
+				| image->green_mask);
+		alpha_mask = ci->alpha_mask;
+		if (alpha_mask > 0)
+		{
+			while ((alpha_mask & 0x01) == 0)
+			{
+				alpha_mask >>= 1;
+				ci->alpha_shift++;
+			}
+			while ((alpha_mask & 0x01) == 1)
+			{
+				alpha_mask >>= 1;
+				ci->alpha_bit_depth++;
+			}
+		}
+		ci->alpha_max_val = (1 << ci->alpha_bit_depth) - 1;
+	}
+}
+
+//CV4LVideoSender class.
+
+CV4LVideoSender::CV4LVideoSender() :
+	m_sendthread(SendBufSize)
+{
+	stop = false;
+	m_bIsRecord = false;
+	m_bInit = 0;
+
+	m_pOutBuf = 0;
+	m_hFile = 0;
+	m_mode = ModeNone;
+	m_pV4Ldev = 0;
+
+	if (!m_initmutex.IsInitialized())
+	{
+		m_initmutex.Init();
+	}
+
+	m_display = 0;
+	m_win = 0;
+	m_d = 0;
+	m_image = 0;
+
+}
+
+CV4LVideoSender::~CV4LVideoSender()
+{
+	//first stop thread, because  m_pOutBuf is being used by Thread();
+	Stop();
+
+	//close file.
+	if (m_hFile)
+	{
+		fclose( m_hFile);
+		m_hFile = 0;
+	}
+
+	//free buffer.
+	if (m_pOutBuf != 0)
+	{
+		free( m_pOutBuf);
+		m_pOutBuf = 0;
+	}
+
+	// for image
+	if (m_image)
+	{
+		XDestroyImage( m_image);
+		m_image = 0;
+	}
+
+	if (m_display)
+	{
+		if (m_win)
+		{
+			XUnmapWindow(m_display, m_win);
+			XClearWindow(m_display, m_win);
+			m_win = 0;
+		}
+
+		if (m_d)
+		{
+			XClearWindow(m_display, m_d);
+			m_d = 0;
+		}
+
+		XCloseDisplay( m_display);
+		m_display = 0;
+	}
+
+	CloseVideo();
+}
+
+bool CV4LVideoSender::Init(int nHostPort)
+{
+	m_initmutex.Lock();
+	m_bInit = false;
+
+	if (!m_imagemutex.IsInitialized())
+	{
+		if (m_imagemutex.Init() < 0)
+		{
+			m_initmutex.Unlock();
+			return false;
+		}
+	}
+
+	if (!OpenVideo())
+	{
+		m_initmutex.Unlock();
+		return false;
+	}
+
+	if (!m_sendthread.Init(nHostPort))
+	{
+
+		m_initmutex.Unlock();
+		return false;
+	}
+
+	if (m_pOutBuf == 0)
+	{
+		m_pOutBuf = (uint8_t*) malloc(CV4LVEncoder::V_En_OutBufSize);
+		if (m_pOutBuf == 0)
+		{
+			m_initmutex.Unlock();
+			return false;
+		}
+
+	}
+
+	//Init sencoder.
+	if (!m_vencoder.Init(VideoWidth, VideoHeight, CODEC_ID_MPEG4))
+	{
+		m_initmutex.Unlock();
+		return false;
+	}
+
+	if (!m_modemutex.IsInitialized())
+	{
+		if (m_modemutex.Init() < 0)
+		{
+			m_initmutex.Unlock();
+			return false;
+		}
+
+	}
+
+	m_bIsRecord = false;
+
+	m_bInit = true;
+	m_initmutex.Unlock();
+	return true;
+}
+
+bool CV4LVideoSender::OpenVideo()
+{
+	CloseVideo();
+
+	m_pV4Ldev = new V4L();
+
+	if (m_pV4Ldev == 0)
+		return false;
+
+	char dev_name[20] = "/dev/video";
+	char temp_count[3] = "\0";
+
+	strcpy(dev_name, "/dev/video");
+	sprintf(temp_count, "%d", v4l_dev_num);
+	strcat(dev_name, temp_count);
+
+	m_pV4Ldev->setInfo(dev_name, "PAL", "", 0); //dev/video
+
+	printf("CV4LVideoSender::OpenVideo(): dev_name = %s\n", dev_name);
+
+	if (!m_pV4Ldev->openDevice())
+	{
+		return false;
+	}
+
+	if (!m_pV4Ldev->initialiseCapture(VIDEO_PALETTE_RGB24))
+	{
+		return false;
+	}
+
+	return true;
+}
+
+void CV4LVideoSender::CloseVideo()
+{
+	if (m_pV4Ldev != 0)
+	{
+		delete m_pV4Ldev;
+		m_pV4Ldev = 0;
+	}
+}
+
+bool CV4LVideoSender::IsInitialized()
+{
+	bool b;
+
+	m_initmutex.Lock();
+	b = m_bInit;
+	m_initmutex.Unlock();
+
+	return b;
+}
+
+bool CV4LVideoSender::CreateXImage(Drawable parent, int x, int y, int width,
+		int height)
+{
+	int screen_num;
+
+	GdkPixbuf *original_pixbuf;
+
+	gint original_width, original_height;
+	GdkColorspace original_color;
+	gboolean original_alpha;
+	gboolean pixbuf_has_alpha;
+	XSetWindowAttributes win_attr;
+	XImage *p_image = NULL;
+	if (!m_bInit)
+		return false;
+
+	CloseXImage();
+
+	m_imagemutex.Lock();
+
+	m_display = XOpenDisplay(NULL);
+	screen_num = DefaultScreen(m_display);
+	m_gc = DefaultGC(m_display, screen_num);
+	m_d = RootWindow(m_display, screen_num);
+
+	m_win = XCreateWindow(m_display, parent, x, y, width, height, 1,
+			XDefaultDepth(m_display, screen_num), InputOutput, CopyFromParent,
+			0, &win_attr);
+
+	if (gdk_pixbuf_new_from_file("pic/teacher.bmp", NULL) == NULL)
+	{
+		XSetWindowBackgroundPixmap(m_display, m_win, ParentRelative);
+		XMapWindow(m_display, m_win);
+	}
+	else
+	{
+		original_pixbuf = gdk_pixbuf_new_from_file("pic/teacher.bmp", NULL);
+		pixbuf_has_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+		original_color = gdk_pixbuf_get_colorspace(original_pixbuf);
+		original_alpha = gdk_pixbuf_get_has_alpha(original_pixbuf);
+		original_width = gdk_pixbuf_get_width(original_pixbuf);
+		original_height = gdk_pixbuf_get_height(original_pixbuf);
+		printf("original_alpha = %d\n", original_alpha);
+		printf("original_color = %d\n", original_color);
+		printf("original_width = %d\n", original_width);
+		printf("original_height = %d\n", original_height);
+		printf("n_channles = %d\n", gdk_pixbuf_get_n_channels(original_pixbuf));
+
+		Pixmap pixmap = XCreatePixmap(m_display, m_win, original_width,
+				original_height, XDefaultDepth(m_display, screen_num));
+		XSetWindowBackgroundPixmap(m_display, m_win, pixmap);
+
+		p_image = XGetImage(m_display, m_d, 0, 0, original_width,
+				original_height, AllPlanes, ZPixmap);
+		if (!p_image)
+		{
+			printf("error\n");
+			exit(10);
+		}
+
+		AVPicture pic_rgb24, pic_rgb32;
+		if (m_display && p_image && pixmap)
+		{
+			avpicture_fill(&pic_rgb32, (uint8_t*) p_image->data,
+					PIX_FMT_RGBA32, original_width, original_height);
+			avpicture_fill(&pic_rgb24, gdk_pixbuf_get_pixels(original_pixbuf),
+					PIX_FMT_RGB24, original_width, original_height);
+
+			if (img_convert(&pic_rgb32, PIX_FMT_RGBA32, &pic_rgb24,
+					PIX_FMT_RGB24, original_width, original_height) < 0)
+			{
+				printf("Error pixel format conversion");
+				return -1;
+			}
+
+			XPutImage(m_display, pixmap, m_gc, p_image, 0, 0, 0, 0,
+					original_width, original_height);
+
+		}
+
+		XMapWindow(m_display, m_win);
+		XFreePixmap(m_display, pixmap);
+		gdk_pixbuf_unref(original_pixbuf);
+		XDestroyImage(p_image);
+	}
+	int imagewidth, imageheight;
+	m_vencoder.GetCapSize(imagewidth, imageheight);
+
+	m_image = XGetImage(m_display, m_d, 0, 0, imagewidth, imageheight,
+			AllPlanes, ZPixmap);
+	if (!m_image)
+	{
+		PErrorText("GetImage error");
+		m_imagemutex.Unlock();
+		return false;
+	}
+
+	m_imagemutex.Unlock();
+	return true;
+}
+
+void CV4LVideoSender::CloseXImage()
+{
+
+	if (!IsInitialized())
+		return;
+
+	m_imagemutex.Lock();
+
+	if (m_image)
+	{
+		XDestroyImage( m_image);
+		m_image = 0;
+	}
+
+	if (m_display)
+	{
+		if (m_win)
+		{
+			XUnmapWindow(m_display, m_win);
+			XClearWindow(m_display, m_win);
+			m_win = 0;
+		}
+
+		if (m_d)
+		{
+			XClearWindow(m_display, m_d);
+			m_d = 0;
+		}
+
+		XCloseDisplay( m_display);
+		m_display = 0;
+	}
+
+	m_imagemutex.Unlock();
+}
+
+void CV4LVideoSender::Playback(uint8_t* videodata)
+{
+	AVPicture pic_rgb24, pic_rgb32;
+	int width, height;
+
+	if (!IsInitialized())
+		return;
+
+	m_vencoder.GetCapSize(width, height);
+
+	m_imagemutex.Lock();
+	if (m_display && m_image && m_win)
+	{
+
+		avpicture_fill(&pic_rgb32, (uint8_t*) m_image->data,
+				CVDecoder::input_pixfmt, width, height);
+		avpicture_fill(&pic_rgb24, videodata, PIX_FMT_BGR24, width, height);
+
+		if (img_convert(&pic_rgb32, CVDecoder::input_pixfmt, &pic_rgb24,
+				PIX_FMT_BGR24, width, height) < 0)
+		{
+			PErrorText("Error pixel format conversion");
+			m_imagemutex.Unlock();
+			return;
+		}
+
+		XPutImage(m_display, m_win, m_gc, m_image, 0, 0, 0, 0, width, height);
+	}
+
+	m_imagemutex.Unlock();
+	return;
+}
+
+int CV4LVideoSender::Start(char* szFile /* =0 */, bool bIsRecord /* =false */)
+{
+
+	if (!IsInitialized())
+		return -1;
+
+	if (JThread::IsRunning())
+		return 0;
+
+	if (!stopmutex.IsInitialized())
+	{
+		if (stopmutex.Init() < 0)
+			return -2;
+	}
+
+	stop = false;
+
+	if (!m_recordmutex.IsInitialized())
+	{
+		if (m_recordmutex.Init() < 0)
+			return -2;
+	}
+
+	m_bIsRecord = bIsRecord;
+
+	if (szFile != 0)
+	{
+		if (m_hFile)
+		{
+			fclose( m_hFile);
+			m_hFile = 0;
+		}
+
+		m_hFile = fopen(szFile, "wb");
+		if (m_hFile == 0)
+		{
+			m_bIsRecord = false;
+			return -3;
+		}
+
+	}
+
+	if (m_sendthread.Start() < 0)
+	{
+		return -6;
+	}
+
+	if (JThread::Start() < 0)
+	{
+		m_bIsRecord = false;
+		return -6;
+	}
+
+	return 0;
+}
+
+void CV4LVideoSender::Stop()
+{
+	if (!IsRunning())
+		return;
+
+	stopmutex.Lock();
+	stop = true;
+	stopmutex.Unlock();
+
+	int count = 0;
+	while (1)
+	{
+		if (count >= 100)
+		{
+			if (JThread::IsRunning())
+			{
+				JThread::Kill();
+			}
+			break;
+		}
+
+		if (JThread::IsRunning())
+		{
+			count++;
+			usleep(10000);
+			continue;
+		}
+
+		break;
+	}
+
+	stop = false;
+
+	//close file.
+	if (m_hFile)
+	{
+		fclose( m_hFile);
+		m_hFile = 0;
+	}
+
+	m_sendthread.Stop();
+
+	m_bIsRecord = false;
+}
+
+void CV4LVideoSender::SetMode(V4LVIDEO_SEND_MODE mode)
+{
+
+	if (!IsInitialized())
+		return;
+
+	if (mode == ModeCapture || mode == ModeTransmit || mode == ModeNone)
+	{
+
+		m_modemutex.Lock();
+		m_mode = mode;
+		m_modemutex.Unlock();
+	}
+
+}
+
+void CV4LVideoSender::Record(bool bInRecord /* =true */)
+{
+
+	if (!IsInitialized())
+		return;
+
+	m_recordmutex.Lock();
+	m_bIsRecord = bInRecord;
+	m_recordmutex.Unlock();
+
+}
+
+void *CV4LVideoSender::Thread()
+{
+	char * data;
+	int OutBufSzie;
+	int status;
+
+	JThread::ThreadStarted();
+
+	bool stopthread;
+	V4LVIDEO_SEND_MODE mode;
+
+	stopmutex.Lock();
+	stopthread = stop;
+	stopmutex.Unlock();
+
+	bool isrecord;
+	m_recordmutex.Lock();
+	isrecord = m_bIsRecord;
+	m_recordmutex.Unlock();
+
+	int64_t pre_time, cur_time;
+	useconds_t delay, delay1;
+	pre_time = av_gettime();
+	if (m_vencoder.m_pCodecCtx->frame_rate != 0)
+	{
+		if (m_vencoder.m_pCodecCtx->frame_rate_base != 0)
+			delay1 = 1000000 * m_vencoder.m_pCodecCtx->frame_rate_base
+					/ m_vencoder.m_pCodecCtx->frame_rate;
+		else
+			delay1 = 1000000 / m_vencoder.m_pCodecCtx->frame_rate;
+	}
+	else
+	{
+		delay1 = 62500;
+	}
+
+	if (delay_time > 0)
+	{
+		sleep( delay_time);
+	}
+
+	// for utility rate of CPU
+	cur_time = pre_time = av_gettime();
+	pre_time -= delay1;
+
+	// for compensate.
+	int64_t nFrame = 0;
+	int64_t rec_time = 0;
+	int64_t rec_start;
+	rec_start = av_gettime();
+	while (!stopthread)
+	{
+		delay = cur_time - pre_time;
+		if (delay < delay1)
+		{
+			usleep(delay1 - delay);
+		}
+		pre_time = av_gettime();
+
+		m_modemutex.Lock();
+		mode = m_mode;
+		m_modemutex.Unlock();
+
+		if (mode == ModeNone)
+		{
+			stopmutex.Lock();
+			stopthread = stop;
+			stopmutex.Unlock();
+
+			rec_start = cur_time = av_gettime();
+			nFrame = 0;
+			cur_time = av_gettime();
+			continue;
+		}
+
+		if ((status = Capture((uint8_t**) &data)) < 0)
+		{
+			printf("\nvideo: capture vital error.");
+			stopthread = true;
+			continue;
+		}
+		else
+		{
+			if (mode == ModeCapture || mode == ModeTransmit)
+			{
+				Playback((uint8_t*) data);
+				if (mode == ModeCapture)
+				{
+					stopmutex.Lock();
+					stopthread = stop;
+					stopmutex.Unlock();
+
+					rec_start = cur_time = av_gettime();
+					nFrame = 0;
+					cur_time = av_gettime();
+					continue;
+				}
+			}
+
+			OutBufSzie = CV4LVEncoder::V_En_OutBufSize;
+			if ((status = m_vencoder.EncodeProcess(data, m_pOutBuf, OutBufSzie))
+					< 0)
+			{
+				printf("\nvideo: encode vital error.");
+				stopthread = true;
+				continue;
+			}
+			else
+			{
+				if (status > 0 && mode == ModeTransmit)
+				{
+					m_sendthread.SendData(m_pOutBuf, status);
+
+					if (m_hFile != 0 && isrecord)
+					{
+						nFrame++;
+						fwrite(m_pOutBuf + sizeof(VideoHeader), status
+								- sizeof(VideoHeader), 1, m_hFile);
+
+						rec_time = av_gettime() - rec_start;
+						int i;
+						for (i = 0; rec_time > nFrame * delay1; nFrame++, i++)
+						{
+							if ((status = m_vencoder.EncodeProcess(data,
+									m_pOutBuf, OutBufSzie)) < 0)
+							{
+								printf("\nvideo: encode vital error.");
+								stopthread = true;
+								printf(
+										"\nvideo capture thread stoped by EncodeProcess error!\n");
+								return 0;
+							}
+							if (status > 0)
+							{
+								fwrite(m_pOutBuf + sizeof(VideoHeader), status
+										- sizeof(VideoHeader), 1, m_hFile);
+							}
+						}
+
+					}
+					else
+					{
+						rec_start = av_gettime();
+						nFrame = 0;
+					}
+
+				}
+
+			}
+		}
+
+		m_recordmutex.Lock();
+		isrecord = m_bIsRecord;
+		m_recordmutex.Unlock();
+
+		stopmutex.Lock();
+		stopthread = stop;
+		stopmutex.Unlock();
+
+		cur_time = av_gettime();
+	}
+	printf("\nvideo capture thread stoped.\n");
+	return 0;
+}
+
+bool CV4LVideoSender::AddDestination(const RTPIPv4Address &des)
+{
+
+	if (!IsInitialized())
+		return false;
+
+	if (m_sendthread.AddDestination(des) < 0)
+		return false;
+
+	return true;
+}
+
+void CV4LVideoSender::ClearDestinations()
+{
+
+	if (!IsInitialized())
+		return;
+
+	m_sendthread.ClearDestinations();
+}
+
+int CV4LVideoSender::Capture(uint8_t** data)
+{
+	*data = (uint8_t*) m_pV4Ldev->getNextFrame();
+	return 0;
+}
+

Added: incubator/bluesky/trunk/RealClass/Teacher/src/en_de_video.h
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Teacher/src/en_de_video.h?rev=823467&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Teacher/src/en_de_video.h (added)
+++ incubator/bluesky/trunk/RealClass/Teacher/src/en_de_video.h Fri Oct  9 08:58:41 2009
@@ -0,0 +1,415 @@
+//en_de_video.h
+
+#include "fecrtpsession.h"
+
+#ifndef _KINO_V4L_H
+#define _KINO_V4L_H
+
+#include <vector>
+#include <deque>
+using std::vector;
+using std::deque;
+
+#include <string>
+
+#include <fcntl.h>
+#include <unistd.h>
+#include <sys/mman.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/ioctl.h>
+
+#include <time.h>
+#include <sys/time.h>
+
+#define _DEVICE_H_
+#define _LINUX_TIME_H
+#include <linux/videodev.h>
+
+/** Abstract V4L Structure class. Provides a wrapping for a video4linux struct.
+ */
+
+class V4LStruct
+{
+public:
+	virtual ~V4LStruct()
+	{
+	}
+	virtual void *getStruct() = 0;
+};
+
+/** Abstract V4L Device class.
+ */
+
+class V4LDevice
+{
+public:
+	virtual ~V4LDevice()
+	{
+	}
+	virtual int getHandle() = 0;
+	bool request(int req, V4LStruct *v4l);
+	bool request(int req, void *addr);
+};
+
+/** Wrapping for the V4L capabilities structure.
+ */
+
+class V4LCapability: public V4LStruct
+{
+private:
+	struct video_capability capability;
+
+public:
+	V4LCapability(V4LDevice *device);
+	virtual ~V4LCapability();
+	void *getStruct();
+	char *getName();
+	int getNumberOfChannels();
+	int getNumberOfAudioDevices();
+	int getMinWidth();
+	int getMinHeight();
+	int getMaxWidth();
+	int getMaxHeight();
+	bool canCapture();
+	bool hasTuner();
+	bool hasChromakey();
+	bool hasClipping();
+	bool hasOverwrite();
+	bool hasScaling();
+	bool isMonochrome();
+	bool canSubCapture();
+	void report();
+};
+
+class V4LTuner: public V4LStruct
+{
+private:
+	V4LDevice *device;
+	struct video_tuner tuner;
+
+public:
+	V4LTuner(V4LDevice *device, int index);
+	void *getStruct();
+	void report();
+	int getRangeLow();
+	void setRangeLow(int low);
+	int getRangeHigh();
+	void setRangeHigh(int high);
+	int getFlags();
+	void setFlags(int flags);
+	int getMode();
+	void setMode(int mode);
+	int getSignal();
+};
+
+class V4LChannel: public V4LStruct
+{
+private:
+	V4LDevice *device;
+	struct video_channel channel;
+	vector<V4LTuner *> tuners;
+	V4LTuner *current;
+
+public:
+	V4LChannel(V4LDevice *device, int index);
+	virtual ~V4LChannel();
+	void *getStruct();
+	char *getName();
+	bool setTuner(unsigned int index);
+	unsigned int getNumberOfTuners();
+	V4LTuner *getTuner(unsigned int index);
+	int getSignal();
+	void report();
+};
+
+class V4LFrame: public V4LStruct
+{
+};
+
+class V4L: public V4LDevice
+{
+
+private:
+	enum V4LPARA
+	{
+		VideoWidth = 320, VideoHeight = 240
+	};
+
+private:
+	int fd;
+	vector<V4LChannel *> channels;
+	V4LChannel *current;
+	int width;
+	int height;
+	void *map;
+	struct video_mmap frame[32];
+	int frame_maps;
+	int frame_next;
+	int size;
+	int frames;
+	long long starttime;
+	char *device;
+	char *input;
+	int sample;
+	int fps;
+public:
+	char *audio;
+	V4LCapability *capability;
+	V4L();
+	virtual ~V4L();
+	void setInfo(char *device, char *input, char *audio, int sample);
+	bool openDevice();
+	bool deviceAvailable();
+	int getHandle();
+	unsigned int getNumberOfChannels();
+	V4LChannel *getChannel(unsigned int);
+	bool setChannel(unsigned int channel);
+	unsigned int getNumberOfTuners();
+	V4LTuner *getTuner(unsigned int);
+	bool setTuner(unsigned int tuner);
+	bool setCaptureResolution(int width, int height);
+	int getWidth();
+	int getHeight();
+	void startAudio();
+	void stopAudio();
+	bool initialiseCapture(int format);
+	void *getNextFrame();
+	void stopCapture();
+	int getFrequency();
+	bool setFrequency(int frequency);
+	int getSignal();
+	void report();
+	int mappedMemorySize(bool init = false);
+	int frameSample;
+};
+
+#endif
+
+// Linux sys.
+#include <iostream>
+#include <stdlib.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/time.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/mman.h>
+#include <linux/types.h>
+#include <linux/videodev.h>
+// FFmpeg
+#include <ffmpeg/avcodec.h>
+#include <ffmpeg/avformat.h>
+// Time
+#include <time.h>
+// X11
+#include <X11/Intrinsic.h>
+#include <X11/XWDFile.h>
+// Jthread and JMutex
+#include <jthread.h>
+#include <jmutex.h>
+
+#include "en_de_sendthread.h"
+#include "en_de_screen.h"
+
+#if !defined(_EN_DE_VIDEO_H__INCLUDED_)
+#define _EN_DE_VIDEO_H__INCLUDED_
+
+typedef void (*VideoPlayback)(uint8_t* videodata, int width, int height);
+
+typedef struct VideoHeader
+{
+	int width;
+	int height;
+} VideoHeader;
+
+//Video decoder.
+class CVDecoder
+{
+	friend class CVideoReceiver;
+private:
+	enum VDECODERBUFSIZE
+	{
+		V_De_ExtraHeaderSize = 10000,
+		V_De_SDLBufSize = 512 * 1024,
+		V_De_INBUF_SIZE = 1024
+	};
+	enum VIDEO_CODEC_PARA
+	{
+		V_CODEC_width = 320,
+		V_CODEC_height = 240,
+		V_CODEC_framerate = 16,
+		V_CODEC_frame_rate_base = 1,
+		V_CODEC_gop_size = 12,
+		V_CODEC_max_b_frames = 1
+	};
+public:
+	CVDecoder();
+	virtual ~CVDecoder();
+public:
+	int DecodeProcess(uint8_t *encodeddata_v, const int encodeddatasize_v);
+	bool Init(int width, int height, enum CodecID nCodecID);
+
+public:
+	bool CreateXImage(Drawable parent, int x, int y, int width, int height);
+	void CloseXImage();
+	void GetColorInfo(XImage *image, ColorInfo *ci /* return struct */);
+
+private:
+
+	void CloseCodec();
+	bool ResetCodec(const int width, const int height);
+	bool m_bInit;
+	AVCodec *m_pCodec;
+	AVCodecContext *m_pCodecCtx;
+	AVFrame *m_pOutFrame;
+	AVPicture pic_rgb;
+	int m_width;
+	int m_height;
+	Display *m_display;
+	GC m_gc;
+	Window m_d;
+	XSetWindowAttributes win_attr;
+	Window m_win;
+	XImage *m_image;
+	JMutex m_imagemutex;
+	ColorInfo c_info;
+public:
+	static int input_pixfmt;
+
+};
+
+//video receiver
+
+class CVideoReceiver: public CFECRtpSession
+{
+public:
+	VideoHeader* pVideo;
+	CVideoReceiver();
+	virtual ~CVideoReceiver();
+
+	bool Init();
+	int Start(int nHostPort);
+	void Stop();
+
+public:
+	bool CreateXImage(Drawable parent, int x, int y, int width, int height);
+	void CloseXImage();
+
+private:
+	virtual void ProcessFrame(unsigned char* framedata, int framelen);
+
+private:
+	bool m_bInit;
+	CVDecoder m_vdecoder;
+};
+
+// V4L Video encoder.
+class CV4LVEncoder
+{
+
+	friend class CV4LVideoSender;
+	friend class CV4LStuVideoSender;
+
+private:
+	enum V4L_VENCODE_BUFSIZE
+	{
+		V_En_OutBufSize = 400 * 1024
+	};
+	enum V4L_VIDEO_CODEC_PARA
+	{
+		V_CODEC_framerate = 16,
+		V_CODEC_frame_rate_base = 1,
+		V_CODEC_gop_size = 12,
+		V_CODEC_max_b_frames = 1
+	};
+public:
+	bool GetCapSize(int &width, int &height);
+	int EncodeProcess(char * frameaddress, uint8_t * pOutBuf, int nOutsize);
+	bool Init(int width, int height, enum CodecID nCodecID = CODEC_ID_MPEG4);
+	CV4LVEncoder();
+	virtual ~CV4LVEncoder();
+	void GetColorInfo(XImage *image, ColorInfo *ci);
+
+private:
+	bool m_bInit;
+
+	//for avcodec
+	AVPicture m_pic_rgb;
+	uint8_t *m_pFrameBuf;
+	AVFrame *m_pFrame;
+	AVCodec *m_pCodec;
+	AVCodecContext *m_pCodecCtx;
+	VideoHeader m_VideoHeader;
+};
+
+// V4L video sender.
+class CV4LVideoSender: private JThread
+{
+public:
+	enum V4LVIDEO_SEND_MODE
+	{
+		ModeNone = 0, ModeCapture = 1, ModeTransmit = 2
+	};
+private:
+	enum V4LVIDEO_SEND_BUFSIZE
+	{
+		SendBufSize = 16
+	};
+private:
+	enum V4LVIDEO_SEND_PARA
+	{
+		VideoWidth = 320, VideoHeight = 240
+	};
+public:
+	CV4LVideoSender();
+	~CV4LVideoSender();
+	bool Init(int nHostPort);
+	bool AddDestination(const RTPIPv4Address &des);
+	void ClearDestinations();
+	int Start(char* szFile = 0, bool bIsRecord = false);
+	void Stop();
+	void SetMode(V4LVIDEO_SEND_MODE mode);
+	bool CreateXImage(Drawable parent, int x, int y, int width, int height);
+	void CloseXImage();
+	void Record(bool bInRecord = true);
+	void GetColorInfo(XImage *image, ColorInfo *ci);
+	bool IsInitialized();
+
+private:
+	bool OpenVideo();
+	void CloseVideo();
+	int Capture(uint8_t** data);
+	bool IsHavedV4LVideo();
+
+	void Playback(uint8_t* videodata);
+	void *Thread();
+	bool stop;
+	JMutex stopmutex;
+private:
+	bool m_bIsRecord;
+	JMutex m_recordmutex;
+
+	CV4LVEncoder m_vencoder;
+	uint8_t *m_pOutBuf;
+	FILE* m_hFile;
+	CSendThread m_sendthread;
+	bool m_bInit;
+	JMutex m_initmutex;
+	V4LVIDEO_SEND_MODE m_mode;
+	JMutex m_modemutex;
+
+	//for ximage
+	Window m_d;
+	Display *m_display;
+	GC m_gc;
+	Window m_win;
+	XImage *m_image;
+
+	JMutex m_imagemutex;
+	ColorInfo c_info;
+	V4L *m_pV4Ldev;
+
+};
+
+#endif // !defined(_EN_DE_VIDEO_H__INCLUDED_)

Added: incubator/bluesky/trunk/RealClass/Teacher/src/errorinfo.h
URL: http://svn.apache.org/viewvc/incubator/bluesky/trunk/RealClass/Teacher/src/errorinfo.h?rev=823467&view=auto
==============================================================================
--- incubator/bluesky/trunk/RealClass/Teacher/src/errorinfo.h (added)
+++ incubator/bluesky/trunk/RealClass/Teacher/src/errorinfo.h Fri Oct  9 08:58:41 2009
@@ -0,0 +1,76 @@
+#ifndef _ERRORINFO_H
+#define _ERRORINFO_H
+
+#define ERROR001 "Error 001:The establishment of server and classroom socket connection failure!\n\n"
+#define ERROR_S001 "Information 001: Please check server's IP is correct and confirm the server has been activated!"
+
+#define ERROR002 "Error 002:The establishment of service centers and classroom socket connection failure!\n\n"
+#define ERROR_S002 "Information 002:Please check the server's status." 
+
+#define ERROR003 "Error 003:Medium-Login failed!\n\n"
+#define ERROR_S003 "Information 003:Please check server's status, inspection user's name and password." 
+
+#define ERROR004 "Error 004:Send message to create class failure!\n\n"
+#define ERROR_S004 "Information 004:Please check server's status." 
+
+#define ERROR005 "Error 005:The creation of classroom failure!\n\n"
+#define ERROR_S005 "Information 005:Please check server's status, and make sure that the DTU has been activated."
+
+#define ERROR006 "Error 006:Send message to cancele creating class error!\n\n"
+#define ERROR_S006 "Information 006:Please check server's status."
+
+#define ERROR007 "Error 007:Send message to get Students List error!\n\n"
+#define ERROR_S007 "Information 007:Please check server's status."
+
+#define ERROR008 "Error 008:Send message to get Students List error!\n\n"
+#define ERROR_S008 "Information 008:Please check server's status."
+
+#define ERROR009 "Error 009:Send message to student error!\n\n"
+#define ERROR_S009 "Information 009:Please check server's status." 
+
+#define ERROR010 "Error 010:Designated focus of students fail!\n\n"
+#define ERROR_S010 "Information 010:Please check server's status and make sure that the user is still online."
+
+#define ERROR011 "Error 011:Send message to cancel the focus of the student error!\n\n"
+#define ERROR_S011 "Information 011:Please check server's status."
+
+#define ERROR012 "Error 012: Cancel the student's focus error!\n\n"
+#define ERROR_S012 "Information 012:Please check server's status, and make sure that the user do not logout."
+
+#define ERROR013 "Error 013:Send message to chat failure!\n\n"
+#define ERROR_S013 "Information 013:Please check server's status."
+
+#define ERROR014 "Error 014:Data reception port write error!\n\n"
+#define ERROR_S014 "Information 014:Focus of students receiving audio data ports please fill in even-numbered."
+
+#define ERROR015 "Error 015:Data reception port write error!\n\n"
+#define ERROR_S015 "Information 015:Focus of students receiving audio data ports please fill in even-numbered."
+
+#define ERROR016 "Error 016:Data reception port write error!\n\n"
+#define ERROR_S016 "Information 016:Focus of students receiving audio data ports please fill in even-numbered."
+
+#define ERROR017 "Error 017:The class's name filling in is error!\n\n"
+#define ERROR_S017 "Information 017:Please check whether you fill in the class's name, and whether the name is too long."
+
+#define ERROR018 "Error 018:Username filled is error!\n\n"
+#define ERROR_S018 "Information 018:Please check whether you fill in username, and whether the name is too long."
+
+#define ERROR019 "Error 019:Prohibiting chatting failure!\n\n"
+#define ERROR_S019 "Information 019:Please check server's status."
+
+#define ERROR020 "Error 020:Failure to allow chatting!\n\n"
+#define ERROR_S020 "Information 020:Please check server's status."
+
+#define ERROR021 "Error 021:No audio equipment found!\n\n"
+#define ERROR_S021 "Information 021:Please check sound card."
+
+#define ERROR022 "Error 022:No video equipment found!\n\n"
+#define ERROR_S022 "Information 022:Please check whether video equipment ready."
+
+#define ERROR023 "Error 023:Display's resolution too large, and cann't show correctly!\n\n"
+#define ERROR_S023 "Information 023:Please set the resolution to 800 * 600."
+
+#define ERROR024 "Error 024:Display's resolution is too small, and cann't show correctly!\n\n"
+#define ERROR_S024 "Information 024:Please set the resolution to 800 * 600."
+
+#endif



Mime
View raw message