mirror of
https://repo.dec05eba.com/gpu-screen-recorder
synced 2026-05-05 22:40:42 +09:00
Add support for software video encoding with '-encoder cpu' option, currently h264 only
This commit is contained in:
@@ -281,6 +281,73 @@ bool gsr_capture_base_setup_cuda_textures(gsr_capture_base *self, AVFrame *frame
|
||||
return true;
|
||||
}
|
||||
|
||||
bool gsr_capture_base_setup_textures(gsr_capture_base *self, AVFrame *frame, gsr_color_range color_range, gsr_source_color source_color, bool hdr, bool cursor_texture_is_external) {
|
||||
int res = av_frame_get_buffer(frame, 1); // TODO: Align?
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_base_setup_textures: av_frame_get_buffer failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
res = av_frame_make_writable(frame);
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_base_setup_textures: av_frame_make_writable failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
self->egl->glGenTextures(1, &self->input_texture);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, self->input_texture);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
const int target = cursor_texture_is_external ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
|
||||
self->egl->glGenTextures(1, &self->cursor_texture);
|
||||
self->egl->glBindTexture(target, self->cursor_texture);
|
||||
self->egl->glTexParameteri(target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->egl->glBindTexture(target, 0);
|
||||
|
||||
const unsigned int internal_formats_nv12[2] = { GL_R8, GL_RG8 };
|
||||
const unsigned int internal_formats_p010[2] = { GL_R16, GL_RG16 };
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
self->target_textures[i] = gl_create_texture(self->egl, self->video_codec_context->width / div[i], self->video_codec_context->height / div[i], !hdr ? internal_formats_nv12[i] : internal_formats_p010[i], formats[i]);
|
||||
if(self->target_textures[i] == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create opengl texture\n");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
gsr_color_conversion_params color_conversion_params = {0};
|
||||
color_conversion_params.color_range = color_range;
|
||||
color_conversion_params.egl = self->egl;
|
||||
color_conversion_params.source_color = source_color;
|
||||
if(!hdr)
|
||||
color_conversion_params.destination_color = GSR_DESTINATION_COLOR_NV12;
|
||||
else
|
||||
color_conversion_params.destination_color = GSR_DESTINATION_COLOR_P010;
|
||||
|
||||
color_conversion_params.destination_textures[0] = self->target_textures[0];
|
||||
color_conversion_params.destination_textures[1] = self->target_textures[1];
|
||||
color_conversion_params.num_destination_textures = 2;
|
||||
color_conversion_params.load_external_image_shader = true;
|
||||
|
||||
if(gsr_color_conversion_init(&self->color_conversion, &color_conversion_params) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create color conversion\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
gsr_color_conversion_clear(&self->color_conversion);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void gsr_capture_base_stop(gsr_capture_base *self) {
|
||||
gsr_color_conversion_deinit(&self->color_conversion);
|
||||
|
||||
@@ -306,7 +373,7 @@ void gsr_capture_base_stop(gsr_capture_base *self) {
|
||||
av_buffer_unref(&self->video_codec_context->hw_frames_ctx);
|
||||
}
|
||||
|
||||
bool drm_create_codec_context(const char *card_path, AVCodecContext *video_codec_context, int width, int height, bool hdr, VADisplay *va_dpy) {
|
||||
bool vaapi_create_codec_context(const char *card_path, AVCodecContext *video_codec_context, int width, int height, bool hdr, VADisplay *va_dpy) {
|
||||
char render_path[128];
|
||||
if(!gsr_card_path_get_render_path(card_path, render_path)) {
|
||||
fprintf(stderr, "gsr error: failed to get /dev/dri/renderDXXX file from %s\n", card_path);
|
||||
|
||||
@@ -212,7 +212,6 @@ static vec2i swap_vec2i(vec2i value) {
|
||||
|
||||
bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bool screen_plane_use_modifiers, bool cursor_texture_is_external, bool record_cursor) {
|
||||
//egl->glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
self->base.egl->glClear(0);
|
||||
|
||||
gsr_capture_kms_cleanup_kms_fds(self);
|
||||
|
||||
@@ -380,7 +379,6 @@ bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bo
|
||||
self->base.egl->glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
|
||||
self->base.egl->eglSwapBuffers(self->base.egl->egl_display, self->base.egl->egl_surface);
|
||||
//self->base.egl->glFlush();
|
||||
//self->base.egl->glFinish();
|
||||
|
||||
|
||||
@@ -85,7 +85,9 @@ static void gsr_capture_kms_unload_cuda_graphics(gsr_capture_kms_cuda *cap_kms)
|
||||
static int gsr_capture_kms_cuda_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_kms_cuda *cap_kms = cap->priv;
|
||||
|
||||
cap_kms->kms.base.egl->glClear(0);
|
||||
gsr_capture_kms_capture(&cap_kms->kms, frame, cap_kms->params.hdr, true, true, cap_kms->params.record_cursor);
|
||||
cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
|
||||
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
|
||||
129
src/capture/kms_software.c
Normal file
129
src/capture/kms_software.c
Normal file
@@ -0,0 +1,129 @@
|
||||
#include "../../include/capture/kms_software.h"
|
||||
#include "../../include/capture/kms.h"
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <assert.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_kms kms;
|
||||
gsr_capture_kms_software_params params;
|
||||
} gsr_capture_kms_software;
|
||||
|
||||
static void gsr_capture_kms_software_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
#define GL_DYNAMIC_READ 0x88E9
|
||||
|
||||
static int gsr_capture_kms_software_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
|
||||
const int res = gsr_capture_kms_start(&cap_kms->kms, cap_kms->params.display_to_capture, cap_kms->params.egl, video_codec_context, frame);
|
||||
if(res != 0) {
|
||||
gsr_capture_kms_software_stop(cap, video_codec_context);
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!gsr_capture_base_setup_textures(&cap_kms->kms.base, frame, cap_kms->params.color_range, GSR_SOURCE_COLOR_RGB, cap_kms->params.hdr, cap_kms->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA)) {
|
||||
gsr_capture_kms_software_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool gsr_capture_kms_software_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
if(cap_kms->kms.should_stop) {
|
||||
if(err)
|
||||
*err = cap_kms->kms.stop_is_error;
|
||||
return true;
|
||||
}
|
||||
|
||||
if(err)
|
||||
*err = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
static int gsr_capture_kms_software_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
|
||||
cap_kms->kms.base.egl->glClear(0);
|
||||
gsr_capture_kms_capture(&cap_kms->kms, frame, cap_kms->params.hdr, cap_kms->params.egl->gpu_info.vendor != GSR_GPU_VENDOR_AMD, cap_kms->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA, cap_kms->params.record_cursor);
|
||||
|
||||
// TODO: hdr support
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
cap_kms->params.egl->glBindTexture(GL_TEXTURE_2D, cap_kms->kms.base.target_textures[i]);
|
||||
cap_kms->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]);
|
||||
}
|
||||
cap_kms->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_software_capture_end(gsr_capture *cap, AVFrame *frame) {
|
||||
(void)frame;
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
gsr_capture_kms_cleanup_kms_fds(&cap_kms->kms);
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_software_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
gsr_capture_kms_stop(&cap_kms->kms);
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_software_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
if(cap->priv) {
|
||||
gsr_capture_kms_software_stop(cap, video_codec_context);
|
||||
free((void*)cap_kms->params.display_to_capture);
|
||||
cap_kms->params.display_to_capture = NULL;
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_kms_software_create(const gsr_capture_kms_software_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_software_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_kms_software *cap_kms = calloc(1, sizeof(gsr_capture_kms_software));
|
||||
if(!cap_kms) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const char *display_to_capture = strdup(params->display_to_capture);
|
||||
if(!display_to_capture) {
|
||||
free(cap);
|
||||
free(cap_kms);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
cap_kms->params = *params;
|
||||
cap_kms->params.display_to_capture = display_to_capture;
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_kms_software_start,
|
||||
.tick = NULL,
|
||||
.should_stop = gsr_capture_kms_software_should_stop,
|
||||
.capture = gsr_capture_kms_software_capture,
|
||||
.capture_end = gsr_capture_kms_software_capture_end,
|
||||
.destroy = gsr_capture_kms_software_destroy,
|
||||
.priv = cap_kms
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
@@ -29,7 +29,7 @@ static int gsr_capture_kms_vaapi_start(gsr_capture *cap, AVCodecContext *video_c
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!drm_create_codec_context(cap_kms->params.egl->card_path, video_codec_context, video_codec_context->width, video_codec_context->height, cap_kms->params.hdr, &cap_kms->va_dpy)) {
|
||||
if(!vaapi_create_codec_context(cap_kms->params.egl->card_path, video_codec_context, video_codec_context->width, video_codec_context->height, cap_kms->params.hdr, &cap_kms->va_dpy)) {
|
||||
gsr_capture_kms_vaapi_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
@@ -57,7 +57,9 @@ static bool gsr_capture_kms_vaapi_should_stop(gsr_capture *cap, bool *err) {
|
||||
|
||||
static int gsr_capture_kms_vaapi_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_kms_vaapi *cap_kms = cap->priv;
|
||||
cap_kms->kms.base.egl->glClear(0);
|
||||
gsr_capture_kms_capture(&cap_kms->kms, frame, cap_kms->params.hdr, cap_kms->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_INTEL, false, cap_kms->params.record_cursor);
|
||||
cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -43,13 +43,6 @@ typedef struct {
|
||||
double nvfbc_dead_start;
|
||||
} gsr_capture_nvfbc;
|
||||
|
||||
#if defined(_WIN64) || defined(__LP64__)
|
||||
typedef unsigned long long CUdeviceptr_v2;
|
||||
#else
|
||||
typedef unsigned int CUdeviceptr_v2;
|
||||
#endif
|
||||
typedef CUdeviceptr_v2 CUdeviceptr;
|
||||
|
||||
static int max_int(int a, int b) {
|
||||
return a > b ? a : b;
|
||||
}
|
||||
@@ -301,14 +294,30 @@ static int gsr_capture_nvfbc_setup_session(gsr_capture_nvfbc *cap_nvfbc) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_nvfbc_stop(gsr_capture_nvfbc *cap_nvfbc) {
|
||||
gsr_capture_nvfbc_destroy_session_and_handle(cap_nvfbc);
|
||||
gsr_capture_base_stop(&cap_nvfbc->base);
|
||||
gsr_cuda_unload(&cap_nvfbc->cuda);
|
||||
if(cap_nvfbc->library) {
|
||||
dlclose(cap_nvfbc->library);
|
||||
cap_nvfbc->library = NULL;
|
||||
}
|
||||
if(cap_nvfbc->params.display_to_capture) {
|
||||
free((void*)cap_nvfbc->params.display_to_capture);
|
||||
cap_nvfbc->params.display_to_capture = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_nvfbc *cap_nvfbc = cap->priv;
|
||||
|
||||
cap_nvfbc->base.video_codec_context = video_codec_context;
|
||||
cap_nvfbc->base.egl = cap_nvfbc->params.egl;
|
||||
|
||||
if(!gsr_cuda_load(&cap_nvfbc->cuda, cap_nvfbc->params.egl->x11.dpy, cap_nvfbc->params.overclock))
|
||||
return -1;
|
||||
if(!cap_nvfbc->params.use_software_video_encoder) {
|
||||
if(!gsr_cuda_load(&cap_nvfbc->cuda, cap_nvfbc->params.egl->x11.dpy, cap_nvfbc->params.overclock))
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!gsr_capture_nvfbc_load_library(cap)) {
|
||||
gsr_cuda_unload(&cap_nvfbc->cuda);
|
||||
@@ -366,18 +375,26 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec
|
||||
frame->width = video_codec_context->width;
|
||||
frame->height = video_codec_context->height;
|
||||
|
||||
if(!cuda_create_codec_context(cap_nvfbc->cuda.cu_ctx, video_codec_context, video_codec_context->width, video_codec_context->height, false, &cap_nvfbc->cuda_stream))
|
||||
goto error_cleanup;
|
||||
if(cap_nvfbc->params.use_software_video_encoder) {
|
||||
if(!gsr_capture_base_setup_textures(&cap_nvfbc->base, frame, cap_nvfbc->params.color_range, GSR_SOURCE_COLOR_BGR, cap_nvfbc->params.hdr, true)) {
|
||||
goto error_cleanup;
|
||||
}
|
||||
} else {
|
||||
if(!cap_nvfbc->params.use_software_video_encoder) {
|
||||
if(!cuda_create_codec_context(cap_nvfbc->cuda.cu_ctx, video_codec_context, video_codec_context->width, video_codec_context->height, false, &cap_nvfbc->cuda_stream))
|
||||
goto error_cleanup;
|
||||
}
|
||||
|
||||
gsr_cuda_context cuda_context = {
|
||||
.cuda = &cap_nvfbc->cuda,
|
||||
.cuda_graphics_resources = cap_nvfbc->cuda_graphics_resources,
|
||||
.mapped_arrays = cap_nvfbc->mapped_arrays
|
||||
};
|
||||
gsr_cuda_context cuda_context = {
|
||||
.cuda = &cap_nvfbc->cuda,
|
||||
.cuda_graphics_resources = cap_nvfbc->cuda_graphics_resources,
|
||||
.mapped_arrays = cap_nvfbc->mapped_arrays
|
||||
};
|
||||
|
||||
// TODO: Remove this, it creates shit we dont need
|
||||
if(!gsr_capture_base_setup_cuda_textures(&cap_nvfbc->base, frame, &cuda_context, cap_nvfbc->params.color_range, GSR_SOURCE_COLOR_BGR, cap_nvfbc->params.hdr)) {
|
||||
goto error_cleanup;
|
||||
// TODO: Remove this, it creates shit we dont need
|
||||
if(!gsr_capture_base_setup_cuda_textures(&cap_nvfbc->base, frame, &cuda_context, cap_nvfbc->params.color_range, GSR_SOURCE_COLOR_BGR, cap_nvfbc->params.hdr)) {
|
||||
goto error_cleanup;
|
||||
}
|
||||
}
|
||||
/* Disable vsync */
|
||||
set_vertical_sync_enabled(cap_nvfbc->params.egl, 0);
|
||||
@@ -385,9 +402,7 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec
|
||||
return 0;
|
||||
|
||||
error_cleanup:
|
||||
gsr_capture_nvfbc_destroy_session_and_handle(cap_nvfbc);
|
||||
gsr_capture_base_stop(&cap_nvfbc->base);
|
||||
gsr_cuda_unload(&cap_nvfbc->cuda);
|
||||
gsr_capture_nvfbc_stop(cap_nvfbc);
|
||||
return -1;
|
||||
}
|
||||
|
||||
@@ -443,49 +458,52 @@ static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
(vec2i){0, 0}, (vec2i){frame->width, frame->height},
|
||||
0.0f, false);
|
||||
|
||||
cap_nvfbc->params.egl->glXSwapBuffers(cap_nvfbc->params.egl->x11.dpy, cap_nvfbc->params.egl->x11.window);
|
||||
if(cap_nvfbc->params.use_software_video_encoder) {
|
||||
// TODO: Hdr?
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
cap_nvfbc->params.egl->glBindTexture(GL_TEXTURE_2D, cap_nvfbc->base.target_textures[i]);
|
||||
cap_nvfbc->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]);
|
||||
}
|
||||
cap_nvfbc->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
// TODO: HDR is broken
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
CUDA_MEMCPY2D memcpy_struct;
|
||||
memcpy_struct.srcXInBytes = 0;
|
||||
memcpy_struct.srcY = 0;
|
||||
memcpy_struct.srcMemoryType = CU_MEMORYTYPE_ARRAY;
|
||||
cap_nvfbc->params.egl->glXSwapBuffers(cap_nvfbc->params.egl->x11.dpy, cap_nvfbc->params.egl->x11.window);
|
||||
} else {
|
||||
cap_nvfbc->params.egl->glXSwapBuffers(cap_nvfbc->params.egl->x11.dpy, cap_nvfbc->params.egl->x11.window);
|
||||
|
||||
memcpy_struct.dstXInBytes = 0;
|
||||
memcpy_struct.dstY = 0;
|
||||
memcpy_struct.dstMemoryType = CU_MEMORYTYPE_DEVICE;
|
||||
// TODO: HDR is broken
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
CUDA_MEMCPY2D memcpy_struct;
|
||||
memcpy_struct.srcXInBytes = 0;
|
||||
memcpy_struct.srcY = 0;
|
||||
memcpy_struct.srcMemoryType = CU_MEMORYTYPE_ARRAY;
|
||||
|
||||
memcpy_struct.srcArray = cap_nvfbc->mapped_arrays[i];
|
||||
memcpy_struct.srcPitch = frame->width / div[i];
|
||||
memcpy_struct.dstDevice = (CUdeviceptr)frame->data[i];
|
||||
memcpy_struct.dstPitch = frame->linesize[i];
|
||||
memcpy_struct.WidthInBytes = frame->width * (cap_nvfbc->params.hdr ? 2 : 1);
|
||||
memcpy_struct.Height = frame->height / div[i];
|
||||
// TODO: Remove this copy if possible
|
||||
cap_nvfbc->cuda.cuMemcpy2DAsync_v2(&memcpy_struct, cap_nvfbc->cuda_stream);
|
||||
memcpy_struct.dstXInBytes = 0;
|
||||
memcpy_struct.dstY = 0;
|
||||
memcpy_struct.dstMemoryType = CU_MEMORYTYPE_DEVICE;
|
||||
|
||||
memcpy_struct.srcArray = cap_nvfbc->mapped_arrays[i];
|
||||
memcpy_struct.srcPitch = frame->width / div[i];
|
||||
memcpy_struct.dstDevice = (CUdeviceptr)frame->data[i];
|
||||
memcpy_struct.dstPitch = frame->linesize[i];
|
||||
memcpy_struct.WidthInBytes = frame->width * (cap_nvfbc->params.hdr ? 2 : 1);
|
||||
memcpy_struct.Height = frame->height / div[i];
|
||||
// TODO: Remove this copy if possible
|
||||
cap_nvfbc->cuda.cuMemcpy2DAsync_v2(&memcpy_struct, cap_nvfbc->cuda_stream);
|
||||
}
|
||||
|
||||
// TODO: needed?
|
||||
cap_nvfbc->cuda.cuStreamSynchronize(cap_nvfbc->cuda_stream);
|
||||
}
|
||||
|
||||
// TODO: needed?
|
||||
cap_nvfbc->cuda.cuStreamSynchronize(cap_nvfbc->cuda_stream);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_nvfbc_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_nvfbc *cap_nvfbc = cap->priv;
|
||||
gsr_capture_nvfbc_destroy_session_and_handle(cap_nvfbc);
|
||||
if(cap_nvfbc) {
|
||||
gsr_capture_base_stop(&cap_nvfbc->base);
|
||||
gsr_cuda_unload(&cap_nvfbc->cuda);
|
||||
dlclose(cap_nvfbc->library);
|
||||
free((void*)cap_nvfbc->params.display_to_capture);
|
||||
cap_nvfbc->params.display_to_capture = NULL;
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
gsr_capture_nvfbc_stop(cap_nvfbc);
|
||||
free(cap);
|
||||
}
|
||||
|
||||
|
||||
@@ -343,7 +343,6 @@ int gsr_capture_xcomposite_capture(gsr_capture_xcomposite *self, AVFrame *frame)
|
||||
}
|
||||
}
|
||||
|
||||
self->params.egl->eglSwapBuffers(self->params.egl->egl_display, self->params.egl->egl_surface);
|
||||
//self->params.egl->glFlush();
|
||||
//self->params.egl->glFinish();
|
||||
|
||||
|
||||
@@ -96,6 +96,8 @@ static int gsr_capture_xcomposite_cuda_capture(gsr_capture *cap, AVFrame *frame)
|
||||
|
||||
gsr_capture_xcomposite_capture(&cap_xcomp->xcomposite, frame);
|
||||
|
||||
cap_xcomp->xcomposite.params.egl->eglSwapBuffers(cap_xcomp->xcomposite.params.egl->egl_display, cap_xcomp->xcomposite.params.egl->egl_surface);
|
||||
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
CUDA_MEMCPY2D memcpy_struct;
|
||||
|
||||
113
src/capture/xcomposite_software.c
Normal file
113
src/capture/xcomposite_software.c
Normal file
@@ -0,0 +1,113 @@
|
||||
#include "../../include/capture/xcomposite_software.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite xcomposite;
|
||||
} gsr_capture_xcomposite_software;
|
||||
|
||||
static void gsr_capture_xcomposite_software_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
static int gsr_capture_xcomposite_software_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
|
||||
const int res = gsr_capture_xcomposite_start(&cap_xcomp->xcomposite, video_codec_context, frame);
|
||||
if(res != 0) {
|
||||
gsr_capture_xcomposite_software_stop(cap, video_codec_context);
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!gsr_capture_base_setup_textures(&cap_xcomp->xcomposite.base, frame, cap_xcomp->xcomposite.params.color_range, GSR_SOURCE_COLOR_RGB, false, false)) {
|
||||
gsr_capture_xcomposite_software_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_software_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_stop(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_software_tick(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_tick(&cap_xcomp->xcomposite, video_codec_context);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_software_is_damaged(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_is_damaged(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_software_clear_damage(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_clear_damage(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_software_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_should_stop(&cap_xcomp->xcomposite, err);
|
||||
}
|
||||
|
||||
static int gsr_capture_xcomposite_software_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
|
||||
gsr_capture_xcomposite_capture(&cap_xcomp->xcomposite, frame);
|
||||
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
cap_xcomp->xcomposite.params.egl->glBindTexture(GL_TEXTURE_2D, cap_xcomp->xcomposite.base.target_textures[i]);
|
||||
cap_xcomp->xcomposite.params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]);
|
||||
}
|
||||
cap_xcomp->xcomposite.params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
cap_xcomp->xcomposite.params.egl->eglSwapBuffers(cap_xcomp->xcomposite.params.egl->egl_display, cap_xcomp->xcomposite.params.egl->egl_surface);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_software_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
if(cap->priv) {
|
||||
gsr_capture_xcomposite_software_stop(cap, video_codec_context);
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_xcomposite_software_create(const gsr_capture_xcomposite_software_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_software_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_xcomposite_software *cap_xcomp = calloc(1, sizeof(gsr_capture_xcomposite_software));
|
||||
if(!cap_xcomp) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture_xcomposite_init(&cap_xcomp->xcomposite, ¶ms->base);
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_xcomposite_software_start,
|
||||
.tick = gsr_capture_xcomposite_software_tick,
|
||||
.is_damaged = gsr_capture_xcomposite_software_is_damaged,
|
||||
.clear_damage = gsr_capture_xcomposite_software_clear_damage,
|
||||
.should_stop = gsr_capture_xcomposite_software_should_stop,
|
||||
.capture = gsr_capture_xcomposite_software_capture,
|
||||
.capture_end = NULL,
|
||||
.destroy = gsr_capture_xcomposite_software_destroy,
|
||||
.priv = cap_xcomp
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
@@ -25,7 +25,7 @@ static int gsr_capture_xcomposite_vaapi_start(gsr_capture *cap, AVCodecContext *
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!drm_create_codec_context(cap_xcomp->xcomposite.params.egl->card_path, video_codec_context, video_codec_context->width, video_codec_context->height, false, &cap_xcomp->va_dpy)) {
|
||||
if(!vaapi_create_codec_context(cap_xcomp->xcomposite.params.egl->card_path, video_codec_context, video_codec_context->width, video_codec_context->height, false, &cap_xcomp->va_dpy)) {
|
||||
gsr_capture_xcomposite_vaapi_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
@@ -60,7 +60,9 @@ static bool gsr_capture_xcomposite_vaapi_should_stop(gsr_capture *cap, bool *err
|
||||
|
||||
static int gsr_capture_xcomposite_vaapi_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_capture(&cap_xcomp->xcomposite, frame);
|
||||
gsr_capture_xcomposite_capture(&cap_xcomp->xcomposite, frame);
|
||||
cap_xcomp->xcomposite.params.egl->eglSwapBuffers(cap_xcomp->xcomposite.params.egl->egl_display, cap_xcomp->xcomposite.params.egl->egl_surface);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_vaapi_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
|
||||
Reference in New Issue
Block a user