Improves cl_capturevideo_printfps.
Renames some func ptrs for clarity.
Signed-off-by: bones_was_here <bones_was_here@xonotic.au>
#include "qdefs.h"
#include "fs.h"
#include "snd_main.h"
+#include "glquake.h"
typedef enum capturevideoformat_e
{
double lastfpstime;
int lastfpsframe;
int soundsampleframe;
- unsigned char *screenbuffer;
unsigned char *outbuffer;
char basename[MAX_QPATH];
int width, height;
qfile_t *videofile;
// always use this:
// cls.capturevideo.videofile = FS_OpenRealFile(va(vabuf, sizeof(vabuf), "%s.%s", cls.capturevideo.basename, cls.capturevideo.formatextension), "wb", false);
- void (*endvideo) (void);
- void (*videoframes) (int num);
- void (*soundframe) (const portable_sampleframe_t *paintbuffer, size_t length);
+ void (*writeEndVideo) (void);
+ void (*writeVideoFrame) (int num, u8 *in);
+ void (*writeSoundFrame) (const portable_sampleframe_t *paintbuffer, size_t length);
// format specific data
void *formatspecific;
+
+ // GL backend
+#define PBO_COUNT 3 // bones_was_here: slightly faster than double buffering
+ GLuint PBOs[PBO_COUNT];
+ GLuint PBOindex;
+ GLuint FBO;
+ GLuint FBOtex;
}
capturevideostate_t;
#endif
}
}
-static void SCR_CaptureVideo_Avi_VideoFrames(int num)
+static void SCR_CaptureVideo_Avi_VideoFrames(int num, u8 *in)
{
LOAD_FORMATSPECIFIC_AVI();
int x = 0, width = cls.capturevideo.width, height = cls.capturevideo.height;
- unsigned char *in, *out;
- // FIXME: width/height must be multiple of 2, enforce this?
- in = cls.capturevideo.outbuffer;
- out = cls.capturevideo.outbuffer + width*height*4;
+ unsigned char *out = cls.capturevideo.outbuffer;
+
SCR_CaptureVideo_ConvertFrame_BGRA_to_I420_flip(width, height, in, out);
x = width*height+(width/2)*(height/2)*2;
while(num-- > 0)
cls.capturevideo.format = CAPTUREVIDEOFORMAT_AVI_I420;
cls.capturevideo.formatextension = "avi";
cls.capturevideo.videofile = FS_OpenRealFile(va(vabuf, sizeof(vabuf), "%s.%s", cls.capturevideo.basename, cls.capturevideo.formatextension), "wb", false);
- cls.capturevideo.endvideo = SCR_CaptureVideo_Avi_EndVideo;
- cls.capturevideo.videoframes = SCR_CaptureVideo_Avi_VideoFrames;
- cls.capturevideo.soundframe = SCR_CaptureVideo_Avi_SoundFrame;
+ cls.capturevideo.writeEndVideo = SCR_CaptureVideo_Avi_EndVideo;
+ cls.capturevideo.writeVideoFrame = SCR_CaptureVideo_Avi_VideoFrames;
+ cls.capturevideo.writeSoundFrame = SCR_CaptureVideo_Avi_SoundFrame;
cls.capturevideo.formatspecific = Mem_Alloc(tempmempool, sizeof(capturevideostate_avi_formatspecific_t));
{
LOAD_FORMATSPECIFIC_AVI();
cls.capturevideo.videofile = NULL;
}
-static void SCR_CaptureVideo_Ogg_ConvertFrame_BGRA_to_YUV(void)
+static void SCR_CaptureVideo_Ogg_ConvertFrame_BGRA_to_YUV(u8 *in)
{
LOAD_FORMATSPECIFIC_OGG();
yuv_buffer *yuv;
for(y = 0; y < h; ++y)
{
- for(b = cls.capturevideo.outbuffer + (h-1-y)*w*4, x = 0; x < w; ++x)
+ for(b = in + (h-1-y)*w*4, x = 0; x < w; ++x)
{
blockr = b[2];
blockg = b[1];
if ((y & 1) == 0 && y/2 < h/2) // if h is odd, this skips the last row
{
- for(b = cls.capturevideo.outbuffer + (h-2-y)*w*4, x = 0; x < w/2; ++x)
+ for(b = in + (h-2-y)*w*4, x = 0; x < w/2; ++x)
{
blockr = (b[2] + b[6] + b[inpitch+2] + b[inpitch+6]) >> 2;
blockg = (b[1] + b[5] + b[inpitch+1] + b[inpitch+5]) >> 2;
}
}
-static void SCR_CaptureVideo_Ogg_VideoFrames(int num)
+static void SCR_CaptureVideo_Ogg_VideoFrames(int num, u8 *in)
{
LOAD_FORMATSPECIFIC_OGG();
ogg_packet pt;
}
format->yuvi = (format->yuvi + 1) % 2;
- SCR_CaptureVideo_Ogg_ConvertFrame_BGRA_to_YUV();
+ SCR_CaptureVideo_Ogg_ConvertFrame_BGRA_to_YUV(in);
format->lastnum = num;
// TODO maybe send num-1 frames from here already
cls.capturevideo.format = CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA;
cls.capturevideo.formatextension = "ogv";
cls.capturevideo.videofile = FS_OpenRealFile(va(vabuf, sizeof(vabuf), "%s.%s", cls.capturevideo.basename, cls.capturevideo.formatextension), "wb", false);
- cls.capturevideo.endvideo = SCR_CaptureVideo_Ogg_EndVideo;
- cls.capturevideo.videoframes = SCR_CaptureVideo_Ogg_VideoFrames;
- cls.capturevideo.soundframe = SCR_CaptureVideo_Ogg_SoundFrame;
+ cls.capturevideo.writeEndVideo = SCR_CaptureVideo_Ogg_EndVideo;
+ cls.capturevideo.writeVideoFrame = SCR_CaptureVideo_Ogg_VideoFrames;
+ cls.capturevideo.writeSoundFrame = SCR_CaptureVideo_Ogg_SoundFrame;
cls.capturevideo.formatspecific = Mem_Alloc(tempmempool, sizeof(capturevideostate_ogg_formatspecific_t));
{
LOAD_FORMATSPECIFIC_OGG();
#ifdef CONFIG_VIDEO_CAPTURE
cvar_t cl_capturevideo = {CF_CLIENT, "cl_capturevideo", "0", "enables saving of video to a .avi file using uncompressed I420 colorspace and PCM audio, note that scr_screenshot_gammaboost affects the brightness of the output)"};
cvar_t cl_capturevideo_demo_stop = {CF_CLIENT | CF_ARCHIVE, "cl_capturevideo_demo_stop", "1", "automatically stops video recording when demo ends"};
-cvar_t cl_capturevideo_printfps = {CF_CLIENT | CF_ARCHIVE, "cl_capturevideo_printfps", "1", "prints the frames per second captured in capturevideo (is only written to the log file, not to the console, as that would be visible on the video)"};
+cvar_t cl_capturevideo_printfps = {CF_CLIENT | CF_ARCHIVE, "cl_capturevideo_printfps", "1", "prints the frames per second captured in capturevideo (is only written to stdout and any log file, not to the console as that would be visible on the video), value is seconds of wall time between prints"};
cvar_t cl_capturevideo_width = {CF_CLIENT | CF_ARCHIVE, "cl_capturevideo_width", "0", "scales all frames to this resolution before saving the video"};
cvar_t cl_capturevideo_height = {CF_CLIENT | CF_ARCHIVE, "cl_capturevideo_height", "0", "scales all frames to this resolution before saving the video"};
cvar_t cl_capturevideo_realtime = {CF_CLIENT, "cl_capturevideo_realtime", "0", "causes video saving to operate in realtime (mostly useful while playing, not while capturing demos), this can produce a much lower quality video due to poor sound/video sync and will abort saving if your machine stalls for over a minute"};
cls.capturevideo.starttime = cls.capturevideo.lastfpstime = host.realtime;
cls.capturevideo.soundsampleframe = 0;
cls.capturevideo.realtime = cl_capturevideo_realtime.integer != 0;
- cls.capturevideo.screenbuffer = (unsigned char *)Mem_Alloc(tempmempool, vid.mode.width * vid.mode.height * 4);
- cls.capturevideo.outbuffer = (unsigned char *)Mem_Alloc(tempmempool, width * height * (4+4) + 18);
+ cls.capturevideo.outbuffer = (unsigned char *)Mem_Alloc(tempmempool, width * height * 4 + 18); // +18 ?
Sys_TimeString(timestring, sizeof(timestring), cl_capturevideo_nameformat.string);
dpsnprintf(cls.capturevideo.basename, sizeof(cls.capturevideo.basename), "video/%s%03i", timestring, cl_capturevideo_number.integer);
Cvar_SetValueQuick(&cl_capturevideo_number, cl_capturevideo_number.integer + 1);
cls.capturevideo.yuvnormalizetable[2][i] = 16 + i * (240-16) / 256;
}
+ GL_CaptureVideo_BeginVideo();
+
if (cl_capturevideo_ogg.integer)
{
if(SCR_CaptureVideo_Ogg_Available())
Con_Printf("Finishing capture of %s.%s (%d frames, %d audio frames)\n", cls.capturevideo.basename, cls.capturevideo.formatextension, cls.capturevideo.frame, cls.capturevideo.soundsampleframe);
- if (cls.capturevideo.videofile)
- {
- cls.capturevideo.endvideo();
- }
+ GL_CaptureVideo_EndVideo(); // must be called before writeEndVideo !
- if (cls.capturevideo.screenbuffer)
- {
- Mem_Free (cls.capturevideo.screenbuffer);
- cls.capturevideo.screenbuffer = NULL;
- }
+ if (cls.capturevideo.videofile)
+ cls.capturevideo.writeEndVideo();
if (cls.capturevideo.outbuffer)
{
memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
}
-static void SCR_ScaleDownBGRA(unsigned char *in, int inw, int inh, unsigned char *out, int outw, int outh)
-{
- // TODO optimize this function
-
- int x, y;
- float area;
-
- // memcpy is faster than me
- if(inw == outw && inh == outh)
- {
- memcpy(out, in, 4 * inw * inh);
- return;
- }
-
- // otherwise: a box filter
- area = (float)outw * (float)outh / (float)inw / (float)inh;
- for(y = 0; y < outh; ++y)
- {
- float iny0 = y / (float)outh * inh; int iny0_i = (int) floor(iny0);
- float iny1 = (y+1) / (float)outh * inh; int iny1_i = (int) ceil(iny1);
- for(x = 0; x < outw; ++x)
- {
- float inx0 = x / (float)outw * inw; int inx0_i = (int) floor(inx0);
- float inx1 = (x+1) / (float)outw * inw; int inx1_i = (int) ceil(inx1);
- float r = 0, g = 0, b = 0, alpha = 0;
- int xx, yy;
-
- for(yy = iny0_i; yy < iny1_i; ++yy)
- {
- float ya = min(yy+1, iny1) - max(iny0, yy);
- for(xx = inx0_i; xx < inx1_i; ++xx)
- {
- float a = ya * (min(xx+1, inx1) - max(inx0, xx));
- r += a * in[4*(xx + inw * yy)+0];
- g += a * in[4*(xx + inw * yy)+1];
- b += a * in[4*(xx + inw * yy)+2];
- alpha += a * in[4*(xx + inw * yy)+3];
- }
- }
-
- out[4*(x + outw * y)+0] = (unsigned char) (r * area);
- out[4*(x + outw * y)+1] = (unsigned char) (g * area);
- out[4*(x + outw * y)+2] = (unsigned char) (b * area);
- out[4*(x + outw * y)+3] = (unsigned char) (alpha * area);
- }
- }
-}
-
-static void SCR_CaptureVideo_VideoFrame(int newframestepframenum)
-{
- int x = 0, y = 0;
- int width = cls.capturevideo.width, height = cls.capturevideo.height;
-
- if(newframestepframenum == cls.capturevideo.framestepframe)
- return;
-
- CHECKGLERROR
- // speed is critical here, so do saving as directly as possible
-
- GL_ReadPixelsBGRA(x, y, vid.mode.width, vid.mode.height, cls.capturevideo.screenbuffer);
-
- SCR_ScaleDownBGRA (cls.capturevideo.screenbuffer, vid.mode.width, vid.mode.height, cls.capturevideo.outbuffer, width, height);
-
- cls.capturevideo.videoframes(newframestepframenum - cls.capturevideo.framestepframe);
- cls.capturevideo.framestepframe = newframestepframenum;
-
- if(cl_capturevideo_printfps.integer && host.realtime > cls.capturevideo.lastfpstime + 1)
- {
- double fps1 = (cls.capturevideo.frame - cls.capturevideo.lastfpsframe) / (host.realtime - cls.capturevideo.lastfpstime + 0.0000001);
- double fps = (cls.capturevideo.frame ) / (host.realtime - cls.capturevideo.starttime + 0.0000001);
- Sys_Printf("capturevideo: (%.1fs) last second %.3ffps, total %.3ffps\n", cls.capturevideo.frame / cls.capturevideo.framerate, fps1, fps);
- cls.capturevideo.lastfpstime = host.realtime;
- cls.capturevideo.lastfpsframe = cls.capturevideo.frame;
- }
-}
-
void SCR_CaptureVideo_SoundFrame(const portable_sampleframe_t *paintbuffer, size_t length)
{
cls.capturevideo.soundsampleframe += (int)length;
- cls.capturevideo.soundframe(paintbuffer, length);
+ cls.capturevideo.writeSoundFrame(paintbuffer, length);
}
static void SCR_CaptureVideo(void)
{
int newframenum;
+ int newframestepframenum;
+
if (cl_capturevideo.integer)
{
if (!cls.capturevideo.active)
SCR_CaptureVideo_BeginVideo();
if (cls.capturevideo.framerate != cl_capturevideo_fps.value * cl_capturevideo_framestep.integer)
{
- Con_Printf("You can not change the video framerate while recording a video.\n");
+ Con_Printf(CON_WARN "You can not change the video framerate while recording a video.\n");
Cvar_SetValueQuick(&cl_capturevideo_fps, cls.capturevideo.framerate / (double) cl_capturevideo_framestep.integer);
}
// for AVI saving we have to make sure that sound is saved before video
if (newframenum - cls.capturevideo.frame > 60 * (int)ceil(cls.capturevideo.framerate))
{
Cvar_SetValueQuick(&cl_capturevideo, 0);
- Con_Printf("video saving failed on frame %i, your machine is too slow for this capture speed.\n", cls.capturevideo.frame);
+ Con_Printf(CON_ERROR "video saving failed on frame %i, your machine is too slow for this capture speed.\n", cls.capturevideo.frame);
SCR_CaptureVideo_EndVideo();
return;
}
// write frames
- SCR_CaptureVideo_VideoFrame(newframenum / cls.capturevideo.framestep);
+ newframestepframenum = newframenum / cls.capturevideo.framestep;
+ if (newframestepframenum != cls.capturevideo.framestepframe)
+ GL_CaptureVideo_VideoFrame(newframestepframenum);
+ cls.capturevideo.framestepframe = newframestepframenum;
+ // report progress
+ if(cl_capturevideo_printfps.value && host.realtime > cls.capturevideo.lastfpstime + cl_capturevideo_printfps.value)
+ {
+ double fps1 = (cls.capturevideo.frame - cls.capturevideo.lastfpsframe) / (host.realtime - cls.capturevideo.lastfpstime + 0.0000001);
+ double fps = (cls.capturevideo.frame ) / (host.realtime - cls.capturevideo.starttime + 0.0000001);
+ Sys_Printf("captured %.1fs of video, last second %.3ffps (%.1fx), total %.3ffps (%.1fx)\n",
+ cls.capturevideo.frame / cls.capturevideo.framerate,
+ fps1, fps1 / cls.capturevideo.framerate,
+ fps, fps / cls.capturevideo.framerate);
+ cls.capturevideo.lastfpstime = host.realtime;
+ cls.capturevideo.lastfpsframe = cls.capturevideo.frame;
+ }
cls.capturevideo.frame = newframenum;
if (cls.capturevideo.error)
{
Cvar_SetValueQuick(&cl_capturevideo, 0);
- Con_Printf("video saving failed on frame %i, out of disk space? stopping video capture.\n", cls.capturevideo.frame);
+ Con_Printf(CON_ERROR "video saving failed on frame %i, out of disk space? stopping video capture.\n", cls.capturevideo.frame);
SCR_CaptureVideo_EndVideo();
}
}
status = qglCheckFramebufferStatus(GL_FRAMEBUFFER);CHECKGLERROR
if (status != GL_FRAMEBUFFER_COMPLETE)
{
- Con_Printf("R_Mesh_CreateFramebufferObject: glCheckFramebufferStatus returned %i\n", status);
+ Con_Printf(CON_ERROR "R_Mesh_CreateFramebufferObject: glCheckFramebufferStatus returned %i\n", status);
gl_state.framebufferobject = 0; // GL unbinds it for us
qglDeleteFramebuffers(1, (GLuint*)&temp);CHECKGLERROR
temp = 0;
}
}
+
+#ifdef CONFIG_VIDEO_CAPTURE
+/*
+ * GL_CaptureVideo*
+ * GPU scaling and async DMA transfer of completed frames
+ * Minimum GL version: 3.0, for glBlitFramebuffer
+ * Minimum GLES version: 3.0, for glBlitFramebuffer and GL_PIXEL_PACK_BUFFER (PBOs)
+ */
+
+void GL_CaptureVideo_BeginVideo(void)
+{
+ int width = cls.capturevideo.width, height = cls.capturevideo.height;
+ // format is GL_BGRA type is GL_UNSIGNED_BYTE
+ GLsizeiptr data_size = width * height * 4;
+
+// create PBOs
+ qglGenBuffers(PBO_COUNT, cls.capturevideo.PBOs);
+ for (int i = 0; i < PBO_COUNT; ++i)
+ {
+ qglBindBuffer(GL_PIXEL_PACK_BUFFER, cls.capturevideo.PBOs[i]);
+ // Allocate memory and leave it uninitialised.
+ qglBufferData(GL_PIXEL_PACK_BUFFER, data_size, NULL, GL_DYNAMIC_READ);CHECKGLERROR
+ }
+ qglBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
+
+// If scaling is necessary create an FBO with attached texture
+ if (width == vid.mode.width && height == vid.mode.height)
+ {
+ cls.capturevideo.FBO = 0;
+ return;
+ }
+ qglGenFramebuffers(1, &cls.capturevideo.FBO);
+ qglBindFramebuffer(GL_FRAMEBUFFER, cls.capturevideo.FBO);
+ qglGenTextures(1, &cls.capturevideo.FBOtex);
+ qglBindTexture(GL_TEXTURE_2D, cls.capturevideo.FBOtex);
+ // Allocate memory and leave it uninitialised (format and type don't matter: data is NULL).
+ // Same internalformat as TEXTYPE_COLORBUFFER.
+ qglTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);CHECKGLERROR
+ qglFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, cls.capturevideo.FBOtex, 0);CHECKGLERROR
+ qglBindTexture(GL_TEXTURE_2D, 0);
+ qglBindFramebuffer(GL_FRAMEBUFFER, 0);
+}
+
+void GL_CaptureVideo_VideoFrame(int newframestepframenum)
+{
+ int width = cls.capturevideo.width, height = cls.capturevideo.height;
+ GLubyte *pixbuf;
+ GLuint oldestPBOindex;
+
+ if (++cls.capturevideo.PBOindex >= PBO_COUNT)
+ cls.capturevideo.PBOindex = 0;
+ if ((oldestPBOindex = cls.capturevideo.PBOindex + 1) >= PBO_COUNT)
+ oldestPBOindex = 0;
+
+ // Ensure we'll read from the default FB
+ R_Mesh_SetRenderTargets(0, NULL, NULL, NULL, NULL, NULL);
+
+ // If necessary, scale the newest frame with linear filtering
+ if (cls.capturevideo.FBO)
+ {
+ qglBindFramebuffer(GL_DRAW_FRAMEBUFFER, cls.capturevideo.FBO);
+ qglBlitFramebuffer(0, 0, vid.mode.width, vid.mode.height, 0, 0, width, height, GL_COLOR_BUFFER_BIT, GL_LINEAR);CHECKGLERROR
+ qglBindFramebuffer(GL_READ_FRAMEBUFFER, cls.capturevideo.FBO);
+ }
+
+ // Copy the newest frame to a PBO for later CPU access.
+ qglBindBuffer(GL_PIXEL_PACK_BUFFER, cls.capturevideo.PBOs[cls.capturevideo.PBOindex]);
+ qglReadPixels(0, 0, width, height, GL_BGRA, GL_UNSIGNED_BYTE, 0);CHECKGLERROR
+
+ if (cls.capturevideo.FBO)
+ qglBindFramebuffer(GL_FRAMEBUFFER, 0);
+
+ // Save the oldest frame from its PBO (blocks until sync if still not ready)
+ // speed is critical here, so do saving as directly as possible
+ if (newframestepframenum >= PBO_COUNT) // Don't read uninitialised memory, newframestepframenum starts at 1
+ {
+ qglBindBuffer(GL_PIXEL_PACK_BUFFER, cls.capturevideo.PBOs[oldestPBOindex]);
+ pixbuf = (GLubyte *)qglMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY);CHECKGLERROR
+ if(pixbuf)
+ {
+ cls.capturevideo.writeVideoFrame(newframestepframenum - cls.capturevideo.framestepframe, pixbuf);
+ qglUnmapBuffer(GL_PIXEL_PACK_BUFFER);
+ }
+ }
+
+ qglBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
+}
+
+void GL_CaptureVideo_EndVideo(void)
+{
+ // SCR_CaptureVideo won't call GL_CaptureVideo_VideoFrame again
+ // but the last frame(s) are waiting in PBO(s) due to async transfer.
+ // On the last normal frame we queued to 1 PBO and saved from 1, so we have PBO_COUNT-1 left
+ for (int i = 1; i < PBO_COUNT; ++i)
+ GL_CaptureVideo_VideoFrame(cls.capturevideo.framestepframe + i);
+
+ qglDeleteTextures(1, &cls.capturevideo.FBOtex);
+ qglDeleteFramebuffers(1, &cls.capturevideo.FBO);
+ qglDeleteBuffers(PBO_COUNT, cls.capturevideo.PBOs);
+}
+#endif
+
+
// called at beginning of frame
void R_Mesh_Start(void)
{
void GL_ScissorTest(int state);
void GL_Clear(int mask, const float *colorvalue, float depthvalue, int stencilvalue);
void GL_ReadPixelsBGRA(int x, int y, int width, int height, unsigned char *outpixels);
+void GL_CaptureVideo_BeginVideo(void);
+void GL_CaptureVideo_VideoFrame(int newframestepframenum);
+void GL_CaptureVideo_EndVideo(void);
int R_Mesh_CreateFramebufferObject(rtexture_t *depthtexture, rtexture_t *colortexture, rtexture_t *colortexture2, rtexture_t *colortexture3, rtexture_t *colortexture4);
void R_Mesh_DestroyFramebufferObject(int fbo);
void R_Mesh_SetRenderTargets(int fbo, rtexture_t *depthtexture, rtexture_t *colortexture, rtexture_t *colortexture2, rtexture_t *colortexture3, rtexture_t *colortexture4);
#define GL_BUFFER_ACCESS 0x88BB
#define GL_BUFFER_MAPPED 0x88BC
#define GL_BUFFER_MAP_POINTER 0x88BD
+#define GL_PIXEL_PACK_BUFFER 0x88EB
#define GL_FRAMEBUFFER 0x8D40
#define GL_READ_FRAMEBUFFER 0x8CA8