forked from Lainports/freebsd-ports
442 lines
14 KiB
Text
442 lines
14 KiB
Text
--- server/mp4live/mp4live.cpp.bak Mon May 1 06:49:58 2006
|
|
+++ server/mp4live/mp4live.cpp Mon May 1 06:50:11 2006
|
|
@@ -223,7 +223,7 @@
|
|
if(fork() !=0) {
|
|
exit(0);
|
|
}
|
|
- setpgrp();
|
|
+ setpgrp(0,0);
|
|
}
|
|
|
|
InitAudioEncoders();
|
|
--- server/mp4live/audio_oss_source.h.bak Mon May 1 06:30:34 2006
|
|
+++ server/mp4live/audio_oss_source.h Mon May 1 06:30:46 2006
|
|
@@ -25,7 +25,7 @@
|
|
|
|
#include <sys/types.h>
|
|
#include <sys/ioctl.h>
|
|
-#include <linux/soundcard.h>
|
|
+#include <sys/soundcard.h>
|
|
|
|
#include "media_source.h"
|
|
//#include "audio_encoder.h"
|
|
--- server/mp4live/video_v4l_source.h.bak Mon May 1 07:08:42 2006
|
|
+++ server/mp4live/video_v4l_source.h Mon May 1 07:06:04 2006
|
|
@@ -32,7 +32,7 @@
|
|
|
|
const char *get_linux_video_type(void);
|
|
|
|
-#if 1
|
|
+#if 0
|
|
#include "video_v4l2_source.h"
|
|
#else
|
|
// note - we no longer support v4l interfaces
|
|
--- server/mp4live/video_v4l_source.cpp.orig Tue Aug 23 00:18:53 2005
|
|
+++ server/mp4live/video_v4l_source.cpp Fri May 5 04:04:00 2006
|
|
@@ -36,7 +36,6 @@
|
|
//#define DEBUG_TIMESTAMPS 1
|
|
int CV4LVideoSource::ThreadMain(void)
|
|
{
|
|
- m_v4l_mutex = SDL_CreateMutex();
|
|
while (true) {
|
|
int rc;
|
|
|
|
@@ -108,7 +107,6 @@
|
|
ReleaseDevice();
|
|
|
|
m_source = false;
|
|
- SDL_DestroyMutex(m_v4l_mutex);
|
|
}
|
|
|
|
bool CV4LVideoSource::Init(void)
|
|
@@ -135,7 +133,7 @@
|
|
{
|
|
int rc;
|
|
const char* deviceName = m_pConfig->GetStringValue(CONFIG_VIDEO_SOURCE_NAME);
|
|
- u_int16_t format;
|
|
+ struct video_window vw = { 0 };
|
|
|
|
// open the video device
|
|
m_videoDevice = open(deviceName, O_RDWR);
|
|
@@ -234,109 +232,14 @@
|
|
}
|
|
}
|
|
|
|
- // get info on video capture buffers
|
|
- rc = ioctl(m_videoDevice, VIDIOCGMBUF, &m_videoMbuf);
|
|
- if (rc < 0) {
|
|
- error_message("Failed to get video capture info for %s",
|
|
- deviceName);
|
|
- goto failure;
|
|
- }
|
|
-
|
|
- // map the video capture buffers
|
|
- m_videoMap = mmap(0, m_videoMbuf.size,
|
|
- PROT_READ | PROT_WRITE, MAP_SHARED, m_videoDevice, 0);
|
|
- if (m_videoMap == MAP_FAILED) {
|
|
- error_message("Failed to map video capture memory for %s",
|
|
- deviceName);
|
|
- goto failure;
|
|
- }
|
|
-
|
|
- // allocate enough frame maps
|
|
- m_videoFrameMap = (struct video_mmap*)
|
|
- malloc(m_videoMbuf.frames * sizeof(struct video_mmap));
|
|
- if (m_videoFrameMap == NULL) {
|
|
- error_message("Failed to allocate enough memory");
|
|
+ vw.width = m_pConfig->GetIntegerValue(CONFIG_VIDEO_RAW_WIDTH);
|
|
+ vw.height = m_pConfig->GetIntegerValue(CONFIG_VIDEO_RAW_HEIGHT);
|
|
+
|
|
+ if(ioctl(m_videoDevice,VIDIOCSWIN,&vw) < 0) {
|
|
+ error_message("Failed to set videowindow size to %dx%d",vw.width,vw.height);
|
|
goto failure;
|
|
- }
|
|
-
|
|
- m_videoFrameMapFrame = (uint64_t *)malloc(m_videoMbuf.frames * sizeof(uint64_t));
|
|
- m_videoFrameMapTimestamp = (uint64_t *)malloc(m_videoMbuf.frames * sizeof(Timestamp));
|
|
- m_captureHead = 0;
|
|
- m_encodeHead = -1;
|
|
- format = VIDEO_PALETTE_YUV420P;
|
|
-
|
|
- m_videoFrames = 0;
|
|
- m_videoSrcFrameDuration =
|
|
- (Duration)(((float)TimestampTicks / m_videoSrcFrameRate) + 0.5);
|
|
- m_cacheTimestamp = true;
|
|
- if (m_pConfig->GetBoolValue(CONFIG_V4L_CACHE_TIMESTAMP) == false) {
|
|
- m_videoMbuf.frames = 2;
|
|
- m_cacheTimestamp = false;
|
|
}
|
|
- uint32_t width, height;
|
|
- width = m_pConfig->GetIntegerValue(CONFIG_VIDEO_RAW_WIDTH);
|
|
- height = m_pConfig->GetIntegerValue(CONFIG_VIDEO_RAW_HEIGHT);
|
|
|
|
- if (strncasecmp(m_pConfig->GetStringValue(CONFIG_VIDEO_FILTER),
|
|
- VIDEO_FILTER_DECIMATE,
|
|
- strlen(VIDEO_FILTER_DECIMATE)) == 0) {
|
|
- uint32_t max_width, max_height;
|
|
- switch (m_pConfig->GetIntegerValue(CONFIG_VIDEO_SIGNAL)) {
|
|
- case VIDEO_SIGNAL_NTSC:
|
|
- max_width = 720;
|
|
- max_height = 480;
|
|
- break;
|
|
- case VIDEO_SIGNAL_PAL:
|
|
- case VIDEO_SIGNAL_SECAM:
|
|
- default:
|
|
- max_width = 768;
|
|
- max_height = 576;
|
|
- break;
|
|
- }
|
|
- if (max_width < width * 2 || max_height < height * 2) {
|
|
- error_message("Decimate filter choosen with too large video size - max %ux%u",
|
|
- max_width / 2, max_height / 2);
|
|
- } else {
|
|
- m_decimate_filter = true;
|
|
- width *= 2;
|
|
- height *= 2;
|
|
- }
|
|
- }
|
|
-
|
|
- for (int i = 0; i < m_videoMbuf.frames; i++) {
|
|
- // initialize frame map
|
|
- m_videoFrameMap[i].frame = i;
|
|
- m_videoFrameMap[i].width = width;
|
|
- m_videoFrameMap[i].height = height;
|
|
- m_videoFrameMap[i].format = format;
|
|
-
|
|
- // give frame to the video capture device
|
|
- rc = ioctl(m_videoDevice, VIDIOCMCAPTURE, &m_videoFrameMap[i]);
|
|
- if (rc < 0) {
|
|
- // try RGB24 palette instead
|
|
- if (i == 0 && format == VIDEO_PALETTE_YUV420P) {
|
|
- format = VIDEO_PALETTE_RGB24;
|
|
- i--;
|
|
- continue;
|
|
- }
|
|
-
|
|
- error_message("Failed to allocate video capture buffer for %s - frame %d max %d rc %d errno %d format %d",
|
|
- deviceName,
|
|
- i, m_videoMbuf.frames,
|
|
- rc, errno, format);
|
|
- goto failure;
|
|
- }
|
|
- if (i == 0) {
|
|
- m_videoCaptureStartTimestamp = GetTimestamp();
|
|
- }
|
|
- m_lastVideoFrameMapFrameLoaded = m_videoFrameMapFrame[i] = i;
|
|
- m_lastVideoFrameMapTimestampLoaded =
|
|
- m_videoFrameMapTimestamp[i] =
|
|
- CalculateVideoTimestampFromFrames(i);
|
|
- }
|
|
-
|
|
- m_videoNeedRgbToYuv = (format == VIDEO_PALETTE_RGB24);
|
|
-
|
|
SetPictureControls();
|
|
|
|
if (videoCapability.audios) {
|
|
@@ -346,11 +249,6 @@
|
|
return true;
|
|
|
|
failure:
|
|
- free(m_videoFrameMap);
|
|
- if (m_videoMap) {
|
|
- munmap(m_videoMap, m_videoMbuf.size);
|
|
- m_videoMap = NULL;
|
|
- }
|
|
close(m_videoDevice);
|
|
m_videoDevice = -1;
|
|
return false;
|
|
@@ -361,9 +259,6 @@
|
|
SetVideoAudioMute(true);
|
|
|
|
// release device resources
|
|
- munmap(m_videoMap, m_videoMbuf.size);
|
|
- m_videoMap = NULL;
|
|
-
|
|
close(m_videoDevice);
|
|
m_videoDevice = -1;
|
|
}
|
|
@@ -432,22 +327,7 @@
|
|
|
|
int8_t CV4LVideoSource::AcquireFrame(Timestamp &frameTimestamp)
|
|
{
|
|
- int rc;
|
|
- ReleaseFrames();
|
|
- rc = ioctl(m_videoDevice, VIDIOCSYNC, &m_videoFrameMap[m_captureHead]);
|
|
- if (rc != 0) {
|
|
- return -1;
|
|
- }
|
|
-
|
|
- if (m_cacheTimestamp)
|
|
- frameTimestamp = m_videoFrameMapTimestamp[m_captureHead];
|
|
- else
|
|
- frameTimestamp = GetTimestamp();
|
|
-
|
|
- int8_t capturedFrame = m_captureHead;
|
|
- m_captureHead = (m_captureHead + 1) % m_videoMbuf.frames;
|
|
-
|
|
- return capturedFrame;
|
|
+ return 0;
|
|
}
|
|
|
|
void c_ReleaseFrame (void *f)
|
|
@@ -455,61 +335,12 @@
|
|
yuv_media_frame_t *yuv = (yuv_media_frame_t *)f;
|
|
if (yuv->free_y) {
|
|
CHECK_AND_FREE(yuv->y);
|
|
- } else {
|
|
- CV4LVideoSource *s = (CV4LVideoSource *)yuv->hardware;
|
|
- s->IndicateReleaseFrame(yuv->hardware_index);
|
|
}
|
|
free(yuv);
|
|
}
|
|
|
|
void CV4LVideoSource::ReleaseFrames (void)
|
|
{
|
|
- uint32_t index_mask = 1;
|
|
- uint32_t released_mask;
|
|
- uint32_t back_on_queue_mask = 0;
|
|
-
|
|
- SDL_LockMutex(m_v4l_mutex);
|
|
- released_mask = m_release_index_mask;
|
|
- SDL_UnlockMutex(m_v4l_mutex);
|
|
-
|
|
- m_release_index_mask = 0;
|
|
-
|
|
- while (released_mask != 0) {
|
|
- index_mask = 1 << m_encodeHead;
|
|
- if ((index_mask & released_mask) != 0) {
|
|
- back_on_queue_mask |= index_mask;
|
|
- Timestamp calc = GetTimestamp();
|
|
-
|
|
- if (calc > m_videoSrcFrameDuration + m_lastVideoFrameMapTimestampLoaded) {
|
|
-#ifdef DEBUG_TIMESTAMPS
|
|
- debug_message("video frame delay past end of buffer - time is "U64" should be "U64,
|
|
- calc,
|
|
- m_videoSrcFrameDuration + m_lastVideoFrameMapTimestampLoaded);
|
|
-#endif
|
|
- m_videoCaptureStartTimestamp = calc;
|
|
- m_videoFrameMapFrame[m_encodeHead] = 0;
|
|
- m_videoFrameMapTimestamp[m_encodeHead] = calc;
|
|
- } else {
|
|
- m_videoFrameMapFrame[m_encodeHead] = m_lastVideoFrameMapFrameLoaded + 1;
|
|
- m_videoFrameMapTimestamp[m_encodeHead] =
|
|
- CalculateVideoTimestampFromFrames(m_videoFrameMapFrame[m_encodeHead]);
|
|
- }
|
|
-
|
|
- m_lastVideoFrameMapFrameLoaded = m_videoFrameMapFrame[m_encodeHead];
|
|
- m_lastVideoFrameMapTimestampLoaded = m_videoFrameMapTimestamp[m_encodeHead];
|
|
- ioctl(m_videoDevice, VIDIOCMCAPTURE,
|
|
- &m_videoFrameMap[m_encodeHead]);
|
|
- m_encodeHead = (m_encodeHead + 1) % m_videoMbuf.frames;
|
|
- } else {
|
|
- released_mask = 0;
|
|
- }
|
|
- }
|
|
- if (back_on_queue_mask != 0) {
|
|
- SDL_LockMutex(m_v4l_mutex);
|
|
- m_release_index_mask &= ~back_on_queue_mask;
|
|
- SDL_UnlockMutex(m_v4l_mutex);
|
|
- }
|
|
-
|
|
}
|
|
|
|
void CV4LVideoSource::ProcessVideo(void)
|
|
@@ -518,39 +349,16 @@
|
|
Timestamp frameTimestamp;
|
|
for (int pass = 0; pass < m_maxPasses; pass++) {
|
|
|
|
- // get next frame from video capture device
|
|
- int8_t index;
|
|
- index = AcquireFrame(frameTimestamp);
|
|
- if (index == -1) {
|
|
- continue;
|
|
- }
|
|
-
|
|
-
|
|
- u_int8_t* mallocedYuvImage = NULL;
|
|
- u_int8_t* pY;
|
|
- u_int8_t* pU;
|
|
- u_int8_t* pV;
|
|
+ frameTimestamp = GetTimestamp();
|
|
+ u_int8_t* pY = (u_int8_t*)Malloc(m_videoSrcYUVSize);
|
|
+ u_int8_t* pU = pY + m_videoSrcYSize;
|
|
+ u_int8_t* pV = pU + m_videoSrcUVSize;
|
|
|
|
- // perform colorspace conversion if necessary
|
|
- if (m_videoNeedRgbToYuv) {
|
|
- mallocedYuvImage = (u_int8_t*)Malloc(m_videoSrcYUVSize);
|
|
-
|
|
- pY = mallocedYuvImage;
|
|
- pU = pY + m_videoSrcYSize;
|
|
- pV = pU + m_videoSrcUVSize,
|
|
-
|
|
- RGB2YUV(
|
|
- m_videoSrcWidth,
|
|
- m_videoSrcHeight,
|
|
- (u_int8_t*)m_videoMap + m_videoMbuf.offsets[index],
|
|
- pY,
|
|
- pU,
|
|
- pV,
|
|
- 1, false);
|
|
- } else {
|
|
- pY = (u_int8_t*)m_videoMap + m_videoMbuf.offsets[index];
|
|
- pU = pY + m_videoSrcYSize;
|
|
- pV = pU + m_videoSrcUVSize;
|
|
+ if(read(m_videoDevice,pY,m_videoSrcYUVSize) == -1) {
|
|
+ error_message("Failed to read from device: %s",strerror(errno));
|
|
+ CHECK_AND_FREE(pY);
|
|
+ DoStopCapture();
|
|
+ return;
|
|
}
|
|
|
|
if (m_decimate_filter) {
|
|
@@ -567,14 +375,14 @@
|
|
yuv->w = m_videoSrcWidth;
|
|
yuv->h = m_videoSrcHeight;
|
|
yuv->hardware = this;
|
|
- yuv->hardware_index = index;
|
|
+ yuv->hardware_index = 0;
|
|
if (m_videoWantKeyFrame && frameTimestamp >= m_audioStartTimestamp) {
|
|
yuv->force_iframe = true;
|
|
m_videoWantKeyFrame = false;
|
|
debug_message("Frame "U64" request key frame", frameTimestamp);
|
|
} else
|
|
yuv->force_iframe = false;
|
|
- yuv->free_y = (mallocedYuvImage != NULL);
|
|
+ yuv->free_y = true;
|
|
|
|
CMediaFrame *frame = new CMediaFrame(YUVVIDEOFRAME,
|
|
yuv,
|
|
@@ -582,11 +390,6 @@
|
|
frameTimestamp);
|
|
frame->SetMediaFreeFunction(c_ReleaseFrame);
|
|
ForwardFrame(frame);
|
|
- //debug_message("video source forward");
|
|
- // enqueue the frame to video capture buffer
|
|
- if (mallocedYuvImage != NULL) {
|
|
- IndicateReleaseFrame(index);
|
|
- }
|
|
}
|
|
}
|
|
|
|
--- configure.bak Fri May 5 09:11:29 2006
|
|
+++ configure Fri May 5 09:12:26 2006
|
|
@@ -26811,7 +26811,7 @@
|
|
esac
|
|
fi
|
|
|
|
-
|
|
+have_mp4live=true
|
|
|
|
if test x$have_mp4live = xtrue; then
|
|
MP4LIVE_TRUE=
|
|
--- server/mp4live/Makefile.in.bak Fri May 5 09:30:57 2006
|
|
+++ server/mp4live/Makefile.in Fri May 5 09:32:18 2006
|
|
@@ -70,7 +70,7 @@
|
|
video_util_filter.cpp video_util_filter.h video_util_mpeg4.cpp \
|
|
video_util_resize.h video_util_resize.cpp video_util_rgb.h \
|
|
video_util_rgb.cpp video_util_tv.h video_util_tv.cpp \
|
|
- video_v4l_source.h video_v4l2_source.cpp video_v4l2_source.h \
|
|
+ video_v4l_source.h video_v4l_source.cpp \
|
|
video_encoder.h video_encoder_base.h video_encoder_base.cpp \
|
|
video_encoder_class.cpp audio_ffmpeg.cpp audio_ffmpeg.h \
|
|
video_ffmpeg.cpp video_ffmpeg.h video_x264.cpp video_x264.h \
|
|
@@ -87,7 +87,7 @@
|
|
rtp_transmitter.lo sdp_file.lo text_encoder.lo text_source.lo \
|
|
util.lo video_util_convert.lo video_util_filter.lo \
|
|
video_util_mpeg4.lo video_util_resize.lo video_util_rgb.lo \
|
|
- video_util_tv.lo video_v4l2_source.lo video_encoder_base.lo \
|
|
+ video_util_tv.lo video_v4l_source.lo video_encoder_base.lo \
|
|
video_encoder_class.lo audio_ffmpeg.lo video_ffmpeg.lo \
|
|
video_x264.lo $(am__objects_1)
|
|
libmp4live_la_OBJECTS = $(am_libmp4live_la_OBJECTS)
|
|
@@ -418,8 +418,7 @@
|
|
video_util_tv.h \
|
|
video_util_tv.cpp \
|
|
video_v4l_source.h \
|
|
- video_v4l2_source.cpp \
|
|
- video_v4l2_source.h \
|
|
+ video_v4l_source.cpp \
|
|
video_encoder.h \
|
|
video_encoder_base.h \
|
|
video_encoder_base.cpp \
|
|
@@ -608,7 +607,7 @@
|
|
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/video_util_resize.Plo@am__quote@
|
|
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/video_util_rgb.Plo@am__quote@
|
|
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/video_util_tv.Plo@am__quote@
|
|
-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/video_v4l2_source.Plo@am__quote@
|
|
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/video_v4l_source.Plo@am__quote@
|
|
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/video_x264.Plo@am__quote@
|
|
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/video_xvid.Plo@am__quote@
|
|
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/video_xvid10.Plo@am__quote@
|
|
--- server/mp4live/mp4live_common.cpp.orig Thu Sep 15 00:16:13 2005
|
|
+++ server/mp4live/mp4live_common.cpp Mon May 8 03:14:29 2006
|
|
@@ -65,7 +65,7 @@
|
|
// attempt to exploit any real time features of the OS
|
|
// will probably only succeed if user has root privileges
|
|
if (pConfig->GetBoolValue(CONFIG_APP_REAL_TIME_SCHEDULER)) {
|
|
-#ifdef _POSIX_PRIORITY_SCHEDULING
|
|
+#if 0
|
|
// put us into the lowest real-time scheduling queue
|
|
struct sched_param sp;
|
|
sp.sched_priority = 1;
|
|
@@ -81,7 +81,7 @@
|
|
#endif
|
|
|
|
#endif /* _POSIX_PRIORITY_SCHEDULING */
|
|
-#ifdef _POSIX_MEMLOCK
|
|
+#if 0
|
|
// recommendation is to reserve some stack pages
|
|
u_int8_t huge[1024 * 1024];
|
|
memset(huge, 1, sizeof(huge));
|
|
--- server/mp4live/mp4live_config.h.bak Tue Nov 1 20:44:31 2005
|
|
+++ server/mp4live/mp4live_config.h Mon May 8 03:19:23 2006
|
|
@@ -238,7 +238,7 @@
|
|
CONFIG_BOOL_HELP(CONFIG_VIDEO_ENABLE, "videoEnable", true, "Enable Video"),
|
|
CONFIG_STRING(CONFIG_VIDEO_SOURCE_TYPE, "videoSourceType", VIDEO_SOURCE_V4L),
|
|
CONFIG_STRING(CONFIG_VIDEO_SOURCE_NAME, "videoDevice", "/dev/video0"),
|
|
- CONFIG_INT(CONFIG_VIDEO_INPUT, "videoInput", 1),
|
|
+ CONFIG_INT(CONFIG_VIDEO_INPUT, "videoInput", 0),
|
|
CONFIG_INT(CONFIG_VIDEO_SIGNAL, "videoSignal", VIDEO_SIGNAL_NTSC),
|
|
|
|
CONFIG_INT(CONFIG_VIDEO_TUNER, "videoTuner", -1),
|