mirror of
https://repo.dec05eba.com/gpu-screen-recorder
synced 2026-05-06 06:50:43 +09:00
Add support for camera (yuyv, mjpeg) and multiple capture sources
This commit is contained in:
@@ -3,7 +3,6 @@
|
||||
#include "../../include/color_conversion.h"
|
||||
#include "../../include/cursor.h"
|
||||
#include "../../include/window/window.h"
|
||||
#include "../../kms/client/kms_client.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
@@ -30,9 +29,6 @@ typedef struct {
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_kms_params params;
|
||||
|
||||
gsr_kms_client kms_client;
|
||||
gsr_kms_response kms_response;
|
||||
|
||||
vec2i capture_pos;
|
||||
vec2i capture_size;
|
||||
@@ -51,7 +47,6 @@ typedef struct {
|
||||
bool hdr_metadata_set;
|
||||
|
||||
bool is_x11;
|
||||
gsr_cursor x11_cursor;
|
||||
|
||||
//int drm_fd;
|
||||
//uint64_t prev_sequence;
|
||||
@@ -61,21 +56,12 @@ typedef struct {
|
||||
vec2i prev_plane_size;
|
||||
|
||||
double last_time_monitor_check;
|
||||
} gsr_capture_kms;
|
||||
|
||||
static void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self) {
|
||||
for(int i = 0; i < self->kms_response.num_items; ++i) {
|
||||
for(int j = 0; j < self->kms_response.items[i].num_dma_bufs; ++j) {
|
||||
gsr_kms_response_dma_buf *dma_buf = &self->kms_response.items[i].dma_buf[j];
|
||||
if(dma_buf->fd > 0) {
|
||||
close(dma_buf->fd);
|
||||
dma_buf->fd = -1;
|
||||
}
|
||||
}
|
||||
self->kms_response.items[i].num_dma_bufs = 0;
|
||||
}
|
||||
self->kms_response.num_items = 0;
|
||||
}
|
||||
bool capture_is_combined_plane;
|
||||
gsr_kms_response_item *drm_fd;
|
||||
vec2i output_size;
|
||||
vec2i target_pos;
|
||||
} gsr_capture_kms;
|
||||
|
||||
static void gsr_capture_kms_stop(gsr_capture_kms *self) {
|
||||
if(self->input_texture_id) {
|
||||
@@ -97,10 +83,6 @@ static void gsr_capture_kms_stop(gsr_capture_kms *self) {
|
||||
// close(self->drm_fd);
|
||||
// self->drm_fd = -1;
|
||||
// }
|
||||
|
||||
gsr_capture_kms_cleanup_kms_fds(self);
|
||||
gsr_kms_client_deinit(&self->kms_client);
|
||||
gsr_cursor_deinit(&self->x11_cursor);
|
||||
}
|
||||
|
||||
static int max_int(int a, int b) {
|
||||
@@ -172,16 +154,8 @@ static int gsr_capture_kms_start(gsr_capture *cap, gsr_capture_metadata *capture
|
||||
gsr_monitor monitor;
|
||||
self->monitor_id.num_connector_ids = 0;
|
||||
|
||||
int kms_init_res = gsr_kms_client_init(&self->kms_client, self->params.egl->card_path);
|
||||
if(kms_init_res != 0)
|
||||
return kms_init_res;
|
||||
|
||||
self->is_x11 = gsr_window_get_display_server(self->params.egl->window) == GSR_DISPLAY_SERVER_X11;
|
||||
const gsr_connection_type connection_type = self->is_x11 ? GSR_CONNECTION_X11 : GSR_CONNECTION_DRM;
|
||||
if(self->is_x11) {
|
||||
Display *display = gsr_window_get_display(self->params.egl->window);
|
||||
gsr_cursor_init(&self->x11_cursor, self->params.egl, display);
|
||||
}
|
||||
|
||||
MonitorCallbackUserdata monitor_callback_userdata = {
|
||||
&self->monitor_id,
|
||||
@@ -209,29 +183,17 @@ static int gsr_capture_kms_start(gsr_capture *cap, gsr_capture_metadata *capture
|
||||
|
||||
if(self->params.output_resolution.x > 0 && self->params.output_resolution.y > 0) {
|
||||
self->params.output_resolution = scale_keep_aspect_ratio(self->capture_size, self->params.output_resolution);
|
||||
capture_metadata->video_width = self->params.output_resolution.x;
|
||||
capture_metadata->video_height = self->params.output_resolution.y;
|
||||
capture_metadata->video_size = self->params.output_resolution;
|
||||
} else if(self->params.region_size.x > 0 && self->params.region_size.y > 0) {
|
||||
capture_metadata->video_width = self->params.region_size.x;
|
||||
capture_metadata->video_height = self->params.region_size.y;
|
||||
capture_metadata->video_size = self->params.region_size;
|
||||
} else {
|
||||
capture_metadata->video_width = self->capture_size.x;
|
||||
capture_metadata->video_height = self->capture_size.y;
|
||||
capture_metadata->video_size = self->capture_size;
|
||||
}
|
||||
|
||||
self->last_time_monitor_check = clock_get_monotonic_seconds();
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_on_event(gsr_capture *cap, gsr_egl *egl) {
|
||||
gsr_capture_kms *self = cap->priv;
|
||||
if(!self->is_x11)
|
||||
return;
|
||||
|
||||
XEvent *xev = gsr_window_get_event_data(egl->window);
|
||||
gsr_cursor_on_event(&self->x11_cursor, xev);
|
||||
}
|
||||
|
||||
// TODO: This is disabled for now because we want to be able to record at a framerate higher than the monitor framerate
|
||||
// static void gsr_capture_kms_tick(gsr_capture *cap) {
|
||||
// gsr_capture_kms *self = cap->priv;
|
||||
@@ -397,14 +359,14 @@ static gsr_kms_response_item* find_monitor_drm(gsr_capture_kms *self, bool *capt
|
||||
gsr_kms_response_item *drm_fd = NULL;
|
||||
|
||||
for(int i = 0; i < self->monitor_id.num_connector_ids; ++i) {
|
||||
drm_fd = find_drm_by_connector_id(&self->kms_response, self->monitor_id.connector_ids[i]);
|
||||
drm_fd = find_drm_by_connector_id(self->params.kms_response, self->monitor_id.connector_ids[i]);
|
||||
if(drm_fd)
|
||||
break;
|
||||
}
|
||||
|
||||
// Will never happen on wayland unless the target monitor has been disconnected
|
||||
if(!drm_fd && self->is_x11) {
|
||||
drm_fd = find_largest_drm(&self->kms_response);
|
||||
drm_fd = find_largest_drm(self->params.kms_response);
|
||||
*capture_is_combined_plane = true;
|
||||
}
|
||||
|
||||
@@ -412,7 +374,7 @@ static gsr_kms_response_item* find_monitor_drm(gsr_capture_kms *self, bool *capt
|
||||
}
|
||||
|
||||
static gsr_kms_response_item* find_cursor_drm_if_on_monitor(gsr_capture_kms *self, uint32_t monitor_connector_id, bool capture_is_combined_plane) {
|
||||
gsr_kms_response_item *cursor_drm_fd = find_cursor_drm(&self->kms_response, monitor_connector_id);
|
||||
gsr_kms_response_item *cursor_drm_fd = find_cursor_drm(self->params.kms_response, monitor_connector_id);
|
||||
if(!capture_is_combined_plane && cursor_drm_fd && cursor_drm_fd->connector_id != monitor_connector_id)
|
||||
cursor_drm_fd = NULL;
|
||||
return cursor_drm_fd;
|
||||
@@ -431,7 +393,7 @@ static gsr_monitor_rotation sub_rotations(gsr_monitor_rotation rot1, gsr_monitor
|
||||
return remainder_int(rot1 - rot2, 4);
|
||||
}
|
||||
|
||||
static void render_drm_cursor(gsr_capture_kms *self, gsr_color_conversion *color_conversion, const gsr_kms_response_item *cursor_drm_fd, vec2i target_pos, vec2i output_size, vec2i framebuffer_size) {
|
||||
static void render_drm_cursor(gsr_capture_kms *self, gsr_color_conversion *color_conversion, gsr_capture_metadata *capture_metadata, const gsr_kms_response_item *cursor_drm_fd, vec2i target_pos, vec2i output_size, vec2i framebuffer_size) {
|
||||
const vec2d scale = {
|
||||
self->capture_size.x == 0 ? 0 : (double)output_size.x / (double)self->capture_size.x,
|
||||
self->capture_size.y == 0 ? 0 : (double)output_size.y / (double)self->capture_size.y
|
||||
@@ -508,13 +470,13 @@ static void render_drm_cursor(gsr_capture_kms *self, gsr_color_conversion *color
|
||||
gsr_color_conversion_draw(color_conversion, self->cursor_texture_id,
|
||||
cursor_pos, (vec2i){cursor_size.x * scale.x, cursor_size.y * scale.y},
|
||||
(vec2i){0, 0}, cursor_size, cursor_size,
|
||||
gsr_monitor_rotation_to_rotation(rotation), GSR_SOURCE_COLOR_RGB, cursor_texture_id_is_external);
|
||||
gsr_monitor_rotation_to_rotation(rotation), capture_metadata->flip, GSR_SOURCE_COLOR_RGB, cursor_texture_id_is_external);
|
||||
|
||||
self->params.egl->glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
|
||||
static void render_x11_cursor(gsr_capture_kms *self, gsr_color_conversion *color_conversion, vec2i capture_pos, vec2i target_pos, vec2i output_size) {
|
||||
if(!self->x11_cursor.visible)
|
||||
static void render_x11_cursor(gsr_capture_kms *self, gsr_color_conversion *color_conversion, gsr_capture_metadata *capture_metadata, vec2i capture_pos, vec2i target_pos, vec2i output_size) {
|
||||
if(!self->params.x11_cursor->visible)
|
||||
return;
|
||||
|
||||
const vec2d scale = {
|
||||
@@ -522,21 +484,18 @@ static void render_x11_cursor(gsr_capture_kms *self, gsr_color_conversion *color
|
||||
self->capture_size.y == 0 ? 0 : (double)output_size.y / (double)self->capture_size.y
|
||||
};
|
||||
|
||||
Display *display = gsr_window_get_display(self->params.egl->window);
|
||||
gsr_cursor_tick(&self->x11_cursor, DefaultRootWindow(display));
|
||||
|
||||
const vec2i cursor_pos = {
|
||||
target_pos.x + (self->x11_cursor.position.x - self->x11_cursor.hotspot.x - capture_pos.x) * scale.x,
|
||||
target_pos.y + (self->x11_cursor.position.y - self->x11_cursor.hotspot.y - capture_pos.y) * scale.y
|
||||
target_pos.x + (self->params.x11_cursor->position.x - self->params.x11_cursor->hotspot.x - capture_pos.x) * scale.x,
|
||||
target_pos.y + (self->params.x11_cursor->position.y - self->params.x11_cursor->hotspot.y - capture_pos.y) * scale.y
|
||||
};
|
||||
|
||||
self->params.egl->glEnable(GL_SCISSOR_TEST);
|
||||
self->params.egl->glScissor(target_pos.x, target_pos.y, output_size.x, output_size.y);
|
||||
|
||||
gsr_color_conversion_draw(color_conversion, self->x11_cursor.texture_id,
|
||||
cursor_pos, (vec2i){self->x11_cursor.size.x * scale.x, self->x11_cursor.size.y * scale.y},
|
||||
(vec2i){0, 0}, self->x11_cursor.size, self->x11_cursor.size,
|
||||
GSR_ROT_0, GSR_SOURCE_COLOR_RGB, false);
|
||||
gsr_color_conversion_draw(color_conversion, self->params.x11_cursor->texture_id,
|
||||
cursor_pos, (vec2i){self->params.x11_cursor->size.x * scale.x, self->params.x11_cursor->size.y * scale.y},
|
||||
(vec2i){0, 0}, self->params.x11_cursor->size, self->params.x11_cursor->size,
|
||||
GSR_ROT_0, capture_metadata->flip, GSR_SOURCE_COLOR_RGB, false);
|
||||
|
||||
self->params.egl->glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
@@ -545,7 +504,7 @@ static void gsr_capture_kms_update_capture_size_change(gsr_capture_kms *self, gs
|
||||
if(target_pos.x != self->prev_target_pos.x || target_pos.y != self->prev_target_pos.y || drm_fd->src_w != self->prev_plane_size.x || drm_fd->src_h != self->prev_plane_size.y) {
|
||||
self->prev_target_pos = target_pos;
|
||||
self->prev_plane_size = self->capture_size;
|
||||
gsr_color_conversion_clear(color_conversion);
|
||||
color_conversion->schedule_clear = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -590,48 +549,47 @@ static void gsr_capture_kms_update_connector_ids(gsr_capture_kms *self) {
|
||||
self->capture_size = rotate_capture_size_if_rotated(self, monitor.size);
|
||||
}
|
||||
|
||||
static int gsr_capture_kms_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
static void gsr_capture_kms_pre_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
gsr_capture_kms *self = cap->priv;
|
||||
|
||||
gsr_capture_kms_cleanup_kms_fds(self);
|
||||
|
||||
if(gsr_kms_client_get_kms(&self->kms_client, &self->kms_response) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_capture: failed to get kms, error: %d (%s)\n", self->kms_response.result, self->kms_response.err_msg);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(self->kms_response.num_items == 0) {
|
||||
if(self->params.kms_response->num_items == 0) {
|
||||
static bool error_shown = false;
|
||||
if(!error_shown) {
|
||||
error_shown = true;
|
||||
fprintf(stderr, "gsr error: no drm found, capture will fail\n");
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_pre_capture: no drm found, capture will fail\n");
|
||||
}
|
||||
return -1;
|
||||
return;
|
||||
}
|
||||
|
||||
gsr_capture_kms_update_connector_ids(self);
|
||||
|
||||
bool capture_is_combined_plane = false;
|
||||
const gsr_kms_response_item *drm_fd = find_monitor_drm(self, &capture_is_combined_plane);
|
||||
if(!drm_fd) {
|
||||
gsr_capture_kms_cleanup_kms_fds(self);
|
||||
return -1;
|
||||
}
|
||||
self->capture_is_combined_plane = false;
|
||||
self->drm_fd = find_monitor_drm(self, &self->capture_is_combined_plane);
|
||||
if(!self->drm_fd)
|
||||
return;
|
||||
|
||||
if(drm_fd->has_hdr_metadata && self->params.hdr && hdr_metadata_is_supported_format(&drm_fd->hdr_metadata))
|
||||
gsr_kms_set_hdr_metadata(self, drm_fd);
|
||||
if(self->drm_fd->has_hdr_metadata && self->params.hdr && hdr_metadata_is_supported_format(&self->drm_fd->hdr_metadata))
|
||||
gsr_kms_set_hdr_metadata(self, self->drm_fd);
|
||||
|
||||
self->capture_size = rotate_capture_size_if_rotated(self, (vec2i){ drm_fd->src_w, drm_fd->src_h });
|
||||
self->capture_size = rotate_capture_size_if_rotated(self, (vec2i){ self->drm_fd->src_w, self->drm_fd->src_h });
|
||||
if(self->params.region_size.x > 0 && self->params.region_size.y > 0)
|
||||
self->capture_size = self->params.region_size;
|
||||
|
||||
const vec2i output_size = scale_keep_aspect_ratio(self->capture_size, (vec2i){capture_metadata->recording_width, capture_metadata->recording_height});
|
||||
const vec2i target_pos = { max_int(0, capture_metadata->video_width / 2 - output_size.x / 2), max_int(0, capture_metadata->video_height / 2 - output_size.y / 2) };
|
||||
gsr_capture_kms_update_capture_size_change(self, color_conversion, target_pos, drm_fd);
|
||||
self->output_size = scale_keep_aspect_ratio(self->capture_size, capture_metadata->recording_size);
|
||||
self->target_pos = gsr_capture_get_target_position(self->output_size, capture_metadata);
|
||||
gsr_capture_kms_update_capture_size_change(self, color_conversion, self->target_pos, self->drm_fd);
|
||||
}
|
||||
|
||||
static int gsr_capture_kms_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
(void)capture_metadata;
|
||||
gsr_capture_kms *self = cap->priv;
|
||||
|
||||
if(self->params.kms_response->num_items == 0)
|
||||
return -1;
|
||||
|
||||
vec2i capture_pos = self->capture_pos;
|
||||
if(!capture_is_combined_plane)
|
||||
capture_pos = (vec2i){drm_fd->x, drm_fd->y};
|
||||
if(!self->capture_is_combined_plane)
|
||||
capture_pos = (vec2i){self->drm_fd->x, self->drm_fd->y};
|
||||
|
||||
capture_pos.x += self->params.region_position.x;
|
||||
capture_pos.y += self->params.region_position.y;
|
||||
@@ -639,22 +597,22 @@ static int gsr_capture_kms_capture(gsr_capture *cap, gsr_capture_metadata *captu
|
||||
//self->params.egl->glFlush();
|
||||
//self->params.egl->glFinish();
|
||||
|
||||
EGLImage image = gsr_capture_kms_create_egl_image_with_fallback(self, drm_fd);
|
||||
EGLImage image = gsr_capture_kms_create_egl_image_with_fallback(self, self->drm_fd);
|
||||
if(image) {
|
||||
gsr_capture_kms_bind_image_to_input_texture_with_fallback(self, image);
|
||||
self->params.egl->eglDestroyImage(self->params.egl->egl_display, image);
|
||||
}
|
||||
|
||||
const gsr_monitor_rotation plane_rotation = kms_rotation_to_gsr_monitor_rotation(drm_fd->rotation);
|
||||
const gsr_monitor_rotation rotation = capture_is_combined_plane ? GSR_MONITOR_ROT_0 : sub_rotations(self->monitor_rotation, plane_rotation);
|
||||
const gsr_monitor_rotation plane_rotation = kms_rotation_to_gsr_monitor_rotation(self->drm_fd->rotation);
|
||||
const gsr_monitor_rotation rotation = self->capture_is_combined_plane ? GSR_MONITOR_ROT_0 : sub_rotations(self->monitor_rotation, plane_rotation);
|
||||
|
||||
gsr_color_conversion_draw(color_conversion, self->external_texture_fallback ? self->external_input_texture_id : self->input_texture_id,
|
||||
target_pos, output_size,
|
||||
capture_pos, self->capture_size, (vec2i){ drm_fd->width, drm_fd->height },
|
||||
gsr_monitor_rotation_to_rotation(rotation), GSR_SOURCE_COLOR_RGB, self->external_texture_fallback);
|
||||
self->target_pos, self->output_size,
|
||||
capture_pos, self->capture_size, (vec2i){ self->drm_fd->width, self->drm_fd->height },
|
||||
gsr_monitor_rotation_to_rotation(rotation), capture_metadata->flip, GSR_SOURCE_COLOR_RGB, self->external_texture_fallback);
|
||||
|
||||
if(self->params.record_cursor) {
|
||||
gsr_kms_response_item *cursor_drm_fd = find_cursor_drm_if_on_monitor(self, drm_fd->connector_id, capture_is_combined_plane);
|
||||
gsr_kms_response_item *cursor_drm_fd = find_cursor_drm_if_on_monitor(self, self->drm_fd->connector_id, self->capture_is_combined_plane);
|
||||
// The cursor is handled by x11 on x11 instead of using the cursor drm plane because on prime systems with a dedicated nvidia gpu
|
||||
// the cursor plane is not available when the cursor is on the monitor controlled by the nvidia device.
|
||||
// TODO: This doesn't work properly with software cursor on x11 since it will draw the x11 cursor on top of the cursor already in the framebuffer.
|
||||
@@ -663,18 +621,16 @@ static int gsr_capture_kms_capture(gsr_capture *cap, gsr_capture_metadata *captu
|
||||
vec2i cursor_monitor_offset = self->capture_pos;
|
||||
cursor_monitor_offset.x += self->params.region_position.x;
|
||||
cursor_monitor_offset.y += self->params.region_position.y;
|
||||
render_x11_cursor(self, color_conversion, cursor_monitor_offset, target_pos, output_size);
|
||||
render_x11_cursor(self, color_conversion, capture_metadata, cursor_monitor_offset, self->target_pos, self->output_size);
|
||||
} else if(cursor_drm_fd) {
|
||||
const vec2i framebuffer_size = rotate_capture_size_if_rotated(self, (vec2i){ drm_fd->src_w, drm_fd->src_h });
|
||||
render_drm_cursor(self, color_conversion, cursor_drm_fd, target_pos, output_size, framebuffer_size);
|
||||
const vec2i framebuffer_size = rotate_capture_size_if_rotated(self, (vec2i){ self->drm_fd->src_w, self->drm_fd->src_h });
|
||||
render_drm_cursor(self, color_conversion, capture_metadata, cursor_drm_fd, self->target_pos, self->output_size, framebuffer_size);
|
||||
}
|
||||
}
|
||||
|
||||
//self->params.egl->glFlush();
|
||||
//self->params.egl->glFinish();
|
||||
|
||||
gsr_capture_kms_cleanup_kms_fds(self);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -766,9 +722,9 @@ gsr_capture* gsr_capture_kms_create(const gsr_capture_kms_params *params) {
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_kms_start,
|
||||
.on_event = gsr_capture_kms_on_event,
|
||||
//.tick = gsr_capture_kms_tick,
|
||||
.should_stop = gsr_capture_kms_should_stop,
|
||||
.pre_capture = gsr_capture_kms_pre_capture,
|
||||
.capture = gsr_capture_kms_capture,
|
||||
.uses_external_image = gsr_capture_kms_uses_external_image,
|
||||
.set_hdr_metadata = gsr_capture_kms_set_hdr_metadata,
|
||||
|
||||
@@ -284,16 +284,14 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, gsr_capture_metadata *captu
|
||||
goto error_cleanup;
|
||||
}
|
||||
|
||||
capture_metadata->video_width = self->tracking_width;
|
||||
capture_metadata->video_height = self->tracking_height;
|
||||
capture_metadata->video_size.x = self->tracking_width;
|
||||
capture_metadata->video_size.y = self->tracking_height;
|
||||
|
||||
if(self->params.output_resolution.x > 0 && self->params.output_resolution.y > 0) {
|
||||
self->params.output_resolution = scale_keep_aspect_ratio((vec2i){capture_metadata->video_width, capture_metadata->video_height}, self->params.output_resolution);
|
||||
capture_metadata->video_width = self->params.output_resolution.x;
|
||||
capture_metadata->video_height = self->params.output_resolution.y;
|
||||
self->params.output_resolution = scale_keep_aspect_ratio(capture_metadata->video_size, self->params.output_resolution);
|
||||
capture_metadata->video_size = self->params.output_resolution;
|
||||
} else if(self->params.region_size.x > 0 && self->params.region_size.y > 0) {
|
||||
capture_metadata->video_width = self->params.region_size.x;
|
||||
capture_metadata->video_height = self->params.region_size.y;
|
||||
capture_metadata->video_size = self->params.region_size;
|
||||
}
|
||||
|
||||
return 0;
|
||||
@@ -371,8 +369,8 @@ static int gsr_capture_nvfbc_capture(gsr_capture *cap, gsr_capture_metadata *cap
|
||||
if(self->params.region_size.x > 0 && self->params.region_size.y > 0)
|
||||
frame_size = self->params.region_size;
|
||||
|
||||
const vec2i output_size = scale_keep_aspect_ratio(frame_size, (vec2i){capture_metadata->recording_width, capture_metadata->recording_height});
|
||||
const vec2i target_pos = { max_int(0, capture_metadata->video_width / 2 - output_size.x / 2), max_int(0, capture_metadata->video_height / 2 - output_size.y / 2) };
|
||||
const vec2i output_size = scale_keep_aspect_ratio(frame_size, capture_metadata->recording_size);
|
||||
const vec2i target_pos = gsr_capture_get_target_position(output_size, capture_metadata);
|
||||
|
||||
NVFBC_FRAME_GRAB_INFO frame_info;
|
||||
memset(&frame_info, 0, sizeof(frame_info));
|
||||
@@ -398,7 +396,7 @@ static int gsr_capture_nvfbc_capture(gsr_capture *cap, gsr_capture_metadata *cap
|
||||
gsr_color_conversion_draw(color_conversion, self->setup_params.dwTextures[grab_params.dwTextureIndex],
|
||||
target_pos, (vec2i){output_size.x, output_size.y},
|
||||
self->params.region_position, frame_size, original_frame_size,
|
||||
GSR_ROT_0, GSR_SOURCE_COLOR_BGR, false);
|
||||
GSR_ROT_0, capture_metadata->flip, GSR_SOURCE_COLOR_BGR, false);
|
||||
|
||||
//self->params.egl->glFlush();
|
||||
//self->params.egl->glFinish();
|
||||
|
||||
@@ -35,6 +35,7 @@ typedef struct {
|
||||
|
||||
bool should_stop;
|
||||
bool stop_is_error;
|
||||
bool do_capture;
|
||||
} gsr_capture_portal;
|
||||
|
||||
static void gsr_capture_portal_cleanup_plane_fds(gsr_capture_portal *self) {
|
||||
@@ -293,12 +294,10 @@ static int gsr_capture_portal_start(gsr_capture *cap, gsr_capture_metadata *capt
|
||||
}
|
||||
|
||||
if(self->params.output_resolution.x == 0 && self->params.output_resolution.y == 0) {
|
||||
capture_metadata->video_width = self->capture_size.x;
|
||||
capture_metadata->video_height = self->capture_size.y;
|
||||
capture_metadata->video_size = self->capture_size;
|
||||
} else {
|
||||
self->params.output_resolution = scale_keep_aspect_ratio(self->capture_size, self->params.output_resolution);
|
||||
capture_metadata->video_width = self->params.output_resolution.x;
|
||||
capture_metadata->video_height = self->params.output_resolution.y;
|
||||
capture_metadata->video_size = self->params.output_resolution;
|
||||
}
|
||||
|
||||
return 0;
|
||||
@@ -322,22 +321,22 @@ static bool fourcc_has_alpha(uint32_t fourcc) {
|
||||
return false;
|
||||
}
|
||||
|
||||
static int gsr_capture_portal_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
(void)color_conversion;
|
||||
static void gsr_capture_portal_pre_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
gsr_capture_portal *self = cap->priv;
|
||||
self->do_capture = false;
|
||||
|
||||
if(self->should_stop)
|
||||
return -1;
|
||||
return;
|
||||
|
||||
if(gsr_pipewire_video_should_restart(&self->pipewire)) {
|
||||
fprintf(stderr, "gsr info: gsr_capture_portal_capture: pipewire capture was paused, trying to start capture again\n");
|
||||
fprintf(stderr, "gsr info: gsr_capture_portal_pre_capture: pipewire capture was paused, trying to start capture again\n");
|
||||
gsr_capture_portal_stop(self);
|
||||
const int result = gsr_capture_portal_setup(self, capture_metadata->fps);
|
||||
if(result != 0) {
|
||||
self->stop_is_error = result != PORTAL_CAPTURE_CANCELED_BY_USER_EXIT_CODE;
|
||||
self->should_stop = true;
|
||||
}
|
||||
return -1;
|
||||
return;
|
||||
}
|
||||
|
||||
/* TODO: Handle formats other than RGB(A) */
|
||||
@@ -346,15 +345,29 @@ static int gsr_capture_portal_capture(gsr_capture *cap, gsr_capture_metadata *ca
|
||||
if(self->pipewire_data.region.width != self->capture_size.x || self->pipewire_data.region.height != self->capture_size.y) {
|
||||
self->capture_size.x = self->pipewire_data.region.width;
|
||||
self->capture_size.y = self->pipewire_data.region.height;
|
||||
gsr_color_conversion_clear(color_conversion);
|
||||
color_conversion->schedule_clear = true;
|
||||
}
|
||||
} else {
|
||||
return -1;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const vec2i output_size = scale_keep_aspect_ratio(self->capture_size, (vec2i){capture_metadata->recording_width, capture_metadata->recording_height});
|
||||
const vec2i target_pos = { max_int(0, capture_metadata->video_width / 2 - output_size.x / 2), max_int(0, capture_metadata->video_height / 2 - output_size.y / 2) };
|
||||
const bool fourcc_alpha = fourcc_has_alpha(self->pipewire_data.fourcc);
|
||||
if(fourcc_alpha)
|
||||
color_conversion->schedule_clear = true;
|
||||
|
||||
self->do_capture = true;
|
||||
}
|
||||
|
||||
static int gsr_capture_portal_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
(void)color_conversion;
|
||||
gsr_capture_portal *self = cap->priv;
|
||||
|
||||
if(self->should_stop || !self->do_capture)
|
||||
return -1;
|
||||
|
||||
const vec2i output_size = scale_keep_aspect_ratio(self->capture_size, capture_metadata->recording_size);
|
||||
const vec2i target_pos = gsr_capture_get_target_position(output_size, capture_metadata);
|
||||
|
||||
const vec2i actual_texture_size = {self->pipewire_data.texture_width, self->pipewire_data.texture_height};
|
||||
|
||||
@@ -363,14 +376,10 @@ static int gsr_capture_portal_capture(gsr_capture *cap, gsr_capture_metadata *ca
|
||||
|
||||
// TODO: Handle region crop
|
||||
|
||||
const bool fourcc_alpha = fourcc_has_alpha(self->pipewire_data.fourcc);
|
||||
if(fourcc_alpha)
|
||||
gsr_color_conversion_clear(color_conversion);
|
||||
|
||||
gsr_color_conversion_draw(color_conversion, self->pipewire_data.using_external_image ? self->texture_map.external_texture_id : self->texture_map.texture_id,
|
||||
target_pos, output_size,
|
||||
(vec2i){self->pipewire_data.region.x, self->pipewire_data.region.y}, (vec2i){self->pipewire_data.region.width, self->pipewire_data.region.height}, actual_texture_size,
|
||||
gsr_monitor_rotation_to_rotation(self->pipewire_data.rotation), GSR_SOURCE_COLOR_RGB, self->pipewire_data.using_external_image);
|
||||
gsr_monitor_rotation_to_rotation(self->pipewire_data.rotation), capture_metadata->flip, GSR_SOURCE_COLOR_RGB, self->pipewire_data.using_external_image);
|
||||
|
||||
if(self->params.record_cursor && self->texture_map.cursor_texture_id > 0 && self->pipewire_data.cursor_region.width > 0) {
|
||||
const vec2d scale = {
|
||||
@@ -385,13 +394,15 @@ static int gsr_capture_portal_capture(gsr_capture *cap, gsr_capture_metadata *ca
|
||||
|
||||
self->params.egl->glEnable(GL_SCISSOR_TEST);
|
||||
self->params.egl->glScissor(target_pos.x, target_pos.y, output_size.x, output_size.y);
|
||||
|
||||
gsr_color_conversion_draw(color_conversion, self->texture_map.cursor_texture_id,
|
||||
(vec2i){cursor_pos.x, cursor_pos.y},
|
||||
(vec2i){self->pipewire_data.cursor_region.width * scale.x, self->pipewire_data.cursor_region.height * scale.y},
|
||||
(vec2i){0, 0},
|
||||
(vec2i){self->pipewire_data.cursor_region.width, self->pipewire_data.cursor_region.height},
|
||||
(vec2i){self->pipewire_data.cursor_region.width, self->pipewire_data.cursor_region.height},
|
||||
gsr_monitor_rotation_to_rotation(self->pipewire_data.rotation), GSR_SOURCE_COLOR_RGB, false);
|
||||
gsr_monitor_rotation_to_rotation(self->pipewire_data.rotation), capture_metadata->flip, GSR_SOURCE_COLOR_RGB, false);
|
||||
|
||||
self->params.egl->glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
|
||||
@@ -458,6 +469,7 @@ gsr_capture* gsr_capture_portal_create(const gsr_capture_portal_params *params)
|
||||
.tick = NULL,
|
||||
.should_stop = gsr_capture_portal_should_stop,
|
||||
.capture_has_synchronous_task = gsr_capture_portal_capture_has_synchronous_task,
|
||||
.pre_capture = gsr_capture_portal_pre_capture,
|
||||
.capture = gsr_capture_portal_capture,
|
||||
.uses_external_image = gsr_capture_portal_uses_external_image,
|
||||
.is_damaged = gsr_capture_portal_is_damaged,
|
||||
|
||||
656
src/capture/v4l2.c
Normal file
656
src/capture/v4l2.c
Normal file
@@ -0,0 +1,656 @@
|
||||
#include "../../include/capture/v4l2.h"
|
||||
#include "../../include/color_conversion.h"
|
||||
#include "../../include/egl.h"
|
||||
#include "../../include/utils.h"
|
||||
|
||||
#include <dlfcn.h>
|
||||
#include <fcntl.h>
|
||||
#include <unistd.h>
|
||||
#include <poll.h>
|
||||
#include <sys/ioctl.h>
|
||||
#include <sys/mman.h>
|
||||
#include <linux/videodev2.h>
|
||||
#include <linux/dma-buf.h>
|
||||
#include <drm_fourcc.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
#include <errno.h>
|
||||
#include <assert.h>
|
||||
|
||||
#define TJPF_RGB 0
|
||||
#define TJPF_RGBA 7
|
||||
#define TJFLAG_FASTDCT 2048
|
||||
|
||||
#define NUM_BUFFERS 2
|
||||
#define NUM_PBOS 2
|
||||
|
||||
typedef void* tjhandle;
|
||||
typedef tjhandle (*FUNC_tjInitDecompress)(void);
|
||||
typedef int (*FUNC_tjDestroy)(tjhandle handle);
|
||||
typedef int (*FUNC_tjDecompressHeader2)(tjhandle handle,
|
||||
unsigned char *jpegBuf, unsigned long jpegSize,
|
||||
int *width, int *height, int *jpegSubsamp);
|
||||
typedef int (*FUNC_tjDecompress2)(tjhandle handle, const unsigned char *jpegBuf,
|
||||
unsigned long jpegSize, unsigned char *dstBuf,
|
||||
int width, int pitch, int height, int pixelFormat,
|
||||
int flags);
|
||||
typedef char* (*FUNC_tjGetErrorStr2)(tjhandle handle);
|
||||
|
||||
typedef enum {
|
||||
V4L2_BUFFER_TYPE_DMABUF,
|
||||
V4L2_BUFFER_TYPE_MMAP
|
||||
} v4l2_buffer_type;
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_v4l2_params params;
|
||||
vec2i capture_size;
|
||||
|
||||
bool should_stop;
|
||||
bool stop_is_error;
|
||||
|
||||
int fd;
|
||||
int dmabuf_fd[NUM_BUFFERS];
|
||||
EGLImage dma_image[NUM_BUFFERS];
|
||||
unsigned int texture_id;
|
||||
bool got_first_frame;
|
||||
|
||||
void *dmabuf_map[NUM_BUFFERS];
|
||||
size_t dmabuf_size[NUM_BUFFERS];
|
||||
unsigned int pbos[NUM_PBOS];
|
||||
unsigned int pbo_index;
|
||||
|
||||
v4l2_buffer_type buffer_type;
|
||||
|
||||
void *libturbojpeg_lib;
|
||||
FUNC_tjInitDecompress tjInitDecompress;
|
||||
FUNC_tjDestroy tjDestroy;
|
||||
FUNC_tjDecompressHeader2 tjDecompressHeader2;
|
||||
FUNC_tjDecompress2 tjDecompress2;
|
||||
FUNC_tjGetErrorStr2 tjGetErrorStr2;
|
||||
tjhandle jpeg_decompressor;
|
||||
|
||||
double capture_start_time;
|
||||
} gsr_capture_v4l2;
|
||||
|
||||
static int xioctl(int fd, unsigned long request, void *arg) {
|
||||
int r;
|
||||
|
||||
do {
|
||||
r = ioctl(fd, request, arg);
|
||||
} while (-1 == r && EINTR == errno);
|
||||
|
||||
return r;
|
||||
}
|
||||
|
||||
static void gsr_capture_v4l2_stop(gsr_capture_v4l2 *self) {
|
||||
self->params.egl->glDeleteBuffers(NUM_PBOS, self->pbos);
|
||||
for(int i = 0; i < NUM_PBOS; ++i) {
|
||||
self->pbos[i] = 0;
|
||||
}
|
||||
|
||||
if(self->texture_id) {
|
||||
self->params.egl->glDeleteTextures(1, &self->texture_id);
|
||||
self->texture_id = 0;
|
||||
}
|
||||
|
||||
for(int i = 0; i < NUM_BUFFERS; ++i) {
|
||||
if(self->dmabuf_map[i]) {
|
||||
munmap(self->dmabuf_map[i], self->dmabuf_size[i]);
|
||||
self->dmabuf_map[i] = NULL;
|
||||
}
|
||||
|
||||
if(self->dma_image[i]) {
|
||||
self->params.egl->eglDestroyImage(self->params.egl->egl_display, self->dma_image[i]);
|
||||
self->dma_image[i] = NULL;
|
||||
}
|
||||
|
||||
if(self->dmabuf_fd[i] > 0) {
|
||||
close(self->dmabuf_fd[i]);
|
||||
self->dmabuf_fd[i] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if(self->fd > 0) {
|
||||
xioctl(self->fd, VIDIOC_STREAMOFF, &(enum v4l2_buf_type){V4L2_BUF_TYPE_VIDEO_CAPTURE});
|
||||
close(self->fd);
|
||||
self->fd = 0;
|
||||
}
|
||||
|
||||
if(self->jpeg_decompressor) {
|
||||
self->tjDestroy(self->jpeg_decompressor);
|
||||
self->jpeg_decompressor = NULL;
|
||||
}
|
||||
|
||||
if(self->libturbojpeg_lib) {
|
||||
dlclose(self->libturbojpeg_lib);
|
||||
self->libturbojpeg_lib = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static void gsr_capture_v4l2_reset_cropping(gsr_capture_v4l2 *self) {
|
||||
struct v4l2_cropcap cropcap = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE
|
||||
};
|
||||
if(xioctl(self->fd, VIDIOC_CROPCAP, &cropcap) == 0) {
|
||||
struct v4l2_crop crop = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
|
||||
.c = cropcap.defrect /* reset to default */
|
||||
};
|
||||
|
||||
if(xioctl(self->fd, VIDIOC_S_CROP, &crop) == -1) {
|
||||
switch (errno) {
|
||||
case EINVAL:
|
||||
/* Cropping not supported. */
|
||||
break;
|
||||
default:
|
||||
/* Errors ignored. */
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
/* Errors ignored. */
|
||||
}
|
||||
}
|
||||
|
||||
gsr_capture_v4l2_supported_pixfmts gsr_capture_v4l2_get_supported_pixfmts(int fd) {
|
||||
gsr_capture_v4l2_supported_pixfmts result = {0};
|
||||
|
||||
struct v4l2_fmtdesc fmt = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE
|
||||
};
|
||||
while(xioctl(fd, VIDIOC_ENUM_FMT, &fmt) == 0) {
|
||||
//fprintf(stderr, "fmt: %d, desc: %s, flags: %d\n", fmt.pixelformat, fmt.description, fmt.flags);
|
||||
switch(fmt.pixelformat) {
|
||||
case V4L2_PIX_FMT_YUYV:
|
||||
result.yuyv = true;
|
||||
break;
|
||||
case V4L2_PIX_FMT_MJPEG:
|
||||
result.mjpeg = true;
|
||||
break;
|
||||
}
|
||||
++fmt.index;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static uint32_t gsr_pixfmt_to_v4l2_pixfmt(gsr_capture_v4l2_pixfmt pixfmt) {
|
||||
switch(pixfmt) {
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_AUTO:
|
||||
assert(false);
|
||||
break;
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_YUYV:
|
||||
return V4L2_PIX_FMT_YUYV;
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_MJPEG:
|
||||
return V4L2_PIX_FMT_MJPEG;
|
||||
}
|
||||
assert(false);
|
||||
return V4L2_PIX_FMT_YUYV;
|
||||
}
|
||||
|
||||
static bool gsr_capture_v4l2_validate_pixfmt(gsr_capture_v4l2 *self, const gsr_capture_v4l2_supported_pixfmts supported_pixfmts) {
|
||||
switch(self->params.pixfmt) {
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_AUTO: {
|
||||
if(supported_pixfmts.yuyv) {
|
||||
self->params.pixfmt = GSR_CAPTURE_V4L2_PIXFMT_YUYV;
|
||||
} else if(supported_pixfmts.mjpeg) {
|
||||
self->params.pixfmt = GSR_CAPTURE_V4L2_PIXFMT_MJPEG;
|
||||
} else {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support yuyv nor mjpeg. GPU Screen Recorder supports only yuyv and mjpeg at the moment. Report this as an issue, see: https://git.dec05eba.com/?p=about\n", self->params.device_path);
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_YUYV: {
|
||||
if(!supported_pixfmts.yuyv) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support yuyv. Try recording with -pixfmt mjpeg or -pixfmt auto instead\n", self->params.device_path);
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_MJPEG: {
|
||||
if(!supported_pixfmts.mjpeg) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support mjpeg. Try recording with -pixfmt yuyv or -pixfmt auto instead\n", self->params.device_path);
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool gsr_capture_v4l2_create_pbos(gsr_capture_v4l2 *self, int width, int height) {
|
||||
self->pbo_index = 0;
|
||||
|
||||
self->params.egl->glGenBuffers(NUM_PBOS, self->pbos);
|
||||
for(int i = 0; i < NUM_PBOS; ++i) {
|
||||
if(self->pbos[i] == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create_pbos: failed to create pixel buffer objects\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
self->params.egl->glBindBuffer(GL_PIXEL_UNPACK_BUFFER, self->pbos[i]);
|
||||
self->params.egl->glBufferData(GL_PIXEL_UNPACK_BUFFER, width * height * 4, 0, GL_DYNAMIC_DRAW);
|
||||
}
|
||||
|
||||
self->params.egl->glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool gsr_capture_v4l2_map_buffer(gsr_capture_v4l2 *self, const struct v4l2_format *fmt) {
|
||||
switch(self->params.pixfmt) {
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_AUTO: {
|
||||
assert(false);
|
||||
return false;
|
||||
}
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_YUYV: {
|
||||
for(int i = 0; i < NUM_BUFFERS; ++i) {
|
||||
self->dma_image[i] = self->params.egl->eglCreateImage(self->params.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, (intptr_t[]) {
|
||||
EGL_WIDTH, fmt->fmt.pix.width,
|
||||
EGL_HEIGHT, fmt->fmt.pix.height,
|
||||
EGL_LINUX_DRM_FOURCC_EXT, DRM_FORMAT_YUYV,
|
||||
EGL_DMA_BUF_PLANE0_FD_EXT, self->dmabuf_fd[i],
|
||||
EGL_DMA_BUF_PLANE0_OFFSET_EXT, 0,
|
||||
EGL_DMA_BUF_PLANE0_PITCH_EXT, fmt->fmt.pix.bytesperline,
|
||||
EGL_NONE
|
||||
});
|
||||
if(!self->dma_image[i]) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_map_buffer: eglCreateImage failed, error: %d\n", self->params.egl->eglGetError());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
self->params.egl->glGenTextures(1, &self->texture_id);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, self->texture_id);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
if(self->texture_id == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_map_buffer: failed to create texture\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
self->buffer_type = V4L2_BUFFER_TYPE_DMABUF;
|
||||
break;
|
||||
}
|
||||
case GSR_CAPTURE_V4L2_PIXFMT_MJPEG: {
|
||||
for(int i = 0; i < NUM_BUFFERS; ++i) {
|
||||
self->dmabuf_size[i] = fmt->fmt.pix.sizeimage;
|
||||
self->dmabuf_map[i] = mmap(NULL, fmt->fmt.pix.sizeimage, PROT_READ, MAP_SHARED, self->dmabuf_fd[i], 0);
|
||||
if(self->dmabuf_map[i] == MAP_FAILED) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_map_buffer: mmap failed, error: %s\n", strerror(errno));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// GL_RGBA is intentionally used here instead of GL_RGB, because the performance is much better when using glTexSubImage2D (22% cpu usage compared to 38% cpu usage)
|
||||
self->texture_id = gl_create_texture(self->params.egl, fmt->fmt.pix.width, fmt->fmt.pix.height, GL_RGBA8, GL_RGBA, GL_LINEAR);
|
||||
if(self->texture_id == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_map_buffer: failed to create texture\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!gsr_capture_v4l2_create_pbos(self, fmt->fmt.pix.width, fmt->fmt.pix.height))
|
||||
return false;
|
||||
|
||||
self->buffer_type = V4L2_BUFFER_TYPE_MMAP;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static int gsr_capture_v4l2_setup(gsr_capture_v4l2 *self) {
|
||||
self->fd = open(self->params.device_path, O_RDWR | O_NONBLOCK);
|
||||
if(self->fd < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: failed to open %s, error: %s\n", self->params.device_path, strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
|
||||
struct v4l2_capability cap = {0};
|
||||
if(xioctl(self->fd, VIDIOC_QUERYCAP, &cap) == -1) {
|
||||
if(EINVAL == errno) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s isn't a v4l2 device\n", self->params.device_path);
|
||||
return -1;
|
||||
} else {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: VIDIOC_QUERYCAP failed, error: %s\n", strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s isn't a video capture device\n", self->params.device_path);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!(cap.capabilities & V4L2_CAP_STREAMING)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support streaming i/o\n", self->params.device_path);
|
||||
return -1;
|
||||
}
|
||||
|
||||
gsr_capture_v4l2_reset_cropping(self);
|
||||
|
||||
const gsr_capture_v4l2_supported_pixfmts supported_pixfmts = gsr_capture_v4l2_get_supported_pixfmts(self->fd);
|
||||
if(!gsr_capture_v4l2_validate_pixfmt(self, supported_pixfmts))
|
||||
return -1;
|
||||
|
||||
if(self->params.pixfmt == GSR_CAPTURE_V4L2_PIXFMT_MJPEG) {
|
||||
dlerror(); /* clear */
|
||||
self->libturbojpeg_lib = dlopen("libturbojpeg.so.0", RTLD_LAZY);
|
||||
if(!self->libturbojpeg_lib) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: failed to load libturbojpeg.so.0 which is required for camera mjpeg capture, error: %s\n", dlerror());
|
||||
return -1;
|
||||
}
|
||||
|
||||
self->tjInitDecompress = (FUNC_tjInitDecompress)dlsym(self->libturbojpeg_lib, "tjInitDecompress");
|
||||
self->tjDestroy = (FUNC_tjDestroy)dlsym(self->libturbojpeg_lib, "tjDestroy");
|
||||
self->tjDecompressHeader2 = (FUNC_tjDecompressHeader2)dlsym(self->libturbojpeg_lib, "tjDecompressHeader2");
|
||||
self->tjDecompress2 = (FUNC_tjDecompress2)dlsym(self->libturbojpeg_lib, "tjDecompress2");
|
||||
self->tjGetErrorStr2 = (FUNC_tjGetErrorStr2)dlsym(self->libturbojpeg_lib, "tjGetErrorStr2");
|
||||
|
||||
if(!self->tjInitDecompress || !self->tjDestroy || !self->tjDecompressHeader2 || !self->tjDecompress2 || !self->tjGetErrorStr2) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: libturbojpeg.so.0 is missing functions. The libturbojpeg version installed on your system might be outdated\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
self->jpeg_decompressor = self->tjInitDecompress();
|
||||
if(!self->jpeg_decompressor) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: failed to create jpeg decompressor\n");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
const uint32_t v4l2_pixfmt = gsr_pixfmt_to_v4l2_pixfmt(self->params.pixfmt);
|
||||
struct v4l2_format fmt = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
|
||||
.fmt.pix.pixelformat = v4l2_pixfmt
|
||||
};
|
||||
if(xioctl(self->fd, VIDIOC_S_FMT, &fmt) == -1) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: VIDIOC_S_FMT failed, error: %s\n", strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(fmt.fmt.pix.pixelformat != v4l2_pixfmt) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: pixel format isn't as requested (got pixel format: %u, requested: %u), error: %s\n", fmt.fmt.pix.pixelformat, v4l2_pixfmt, strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Buggy driver paranoia */
|
||||
const uint32_t min_stride = fmt.fmt.pix.width * 2; // * 2 because the stride is width (Y) + width/2 (U) + width/2 (V)
|
||||
if(fmt.fmt.pix.bytesperline < min_stride)
|
||||
fmt.fmt.pix.bytesperline = min_stride;
|
||||
|
||||
const uint32_t min_size = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
|
||||
if(fmt.fmt.pix.sizeimage < min_size)
|
||||
fmt.fmt.pix.sizeimage = min_size;
|
||||
|
||||
self->capture_size.x = fmt.fmt.pix.width;
|
||||
self->capture_size.y = fmt.fmt.pix.height;
|
||||
|
||||
struct v4l2_requestbuffers reqbuf = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
|
||||
.memory = V4L2_MEMORY_MMAP,
|
||||
.count = NUM_BUFFERS
|
||||
};
|
||||
if(xioctl(self->fd, VIDIOC_REQBUFS, &reqbuf) == -1) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: VIDIOC_REQBUFS failed, error: %s\n", strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
|
||||
for(int i = 0; i < NUM_BUFFERS; ++i) {
|
||||
struct v4l2_exportbuffer expbuf = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
|
||||
.index = i,
|
||||
.flags = O_RDONLY
|
||||
};
|
||||
if(xioctl(self->fd, VIDIOC_EXPBUF, &expbuf) == -1) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: VIDIOC_EXPBUF failed, error: %s\n", strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
self->dmabuf_fd[i] = expbuf.fd;
|
||||
}
|
||||
|
||||
if(!gsr_capture_v4l2_map_buffer(self, &fmt))
|
||||
return -1;
|
||||
|
||||
for(int i = 0; i < NUM_BUFFERS; ++i) {
|
||||
struct v4l2_buffer buf = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
|
||||
.index = i,
|
||||
.memory = V4L2_MEMORY_MMAP
|
||||
};
|
||||
xioctl(self->fd, VIDIOC_QBUF, &buf);
|
||||
}
|
||||
|
||||
if(xioctl(self->fd, VIDIOC_STREAMON, &(enum v4l2_buf_type){V4L2_BUF_TYPE_VIDEO_CAPTURE})) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: VIDIOC_STREAMON failed, error: %s\n", strerror(errno));
|
||||
return -1;
|
||||
}
|
||||
|
||||
fprintf(stderr, "gsr info: gsr_capture_v4l2_create: waiting for camera %s to be ready\n", self->params.device_path);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int gsr_capture_v4l2_start(gsr_capture *cap, gsr_capture_metadata *capture_metadata) {
|
||||
gsr_capture_v4l2 *self = cap->priv;
|
||||
|
||||
const int result = gsr_capture_v4l2_setup(self);
|
||||
if(result != 0) {
|
||||
gsr_capture_v4l2_stop(self);
|
||||
return result;
|
||||
}
|
||||
|
||||
if(self->params.output_resolution.x == 0 && self->params.output_resolution.y == 0) {
|
||||
capture_metadata->video_size = self->capture_size;
|
||||
} else {
|
||||
self->params.output_resolution = scale_keep_aspect_ratio(self->capture_size, self->params.output_resolution);
|
||||
capture_metadata->video_size = self->params.output_resolution;
|
||||
}
|
||||
|
||||
self->capture_start_time = clock_get_monotonic_seconds();
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_v4l2_tick(gsr_capture *cap) {
|
||||
gsr_capture_v4l2 *self = cap->priv;
|
||||
if(!self->got_first_frame && !self->should_stop) {
|
||||
const double timeout_sec = 5.0;
|
||||
if(clock_get_monotonic_seconds() - self->capture_start_time >= timeout_sec) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_capture: didn't receive camera data in %f seconds\n", timeout_sec);
|
||||
self->should_stop = true;
|
||||
self->stop_is_error = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void gsr_capture_v4l2_decode_jpeg_to_texture(gsr_capture_v4l2 *self, const struct v4l2_buffer *buf) {
|
||||
int jpeg_subsamp = 0;
|
||||
int jpeg_width = 0;
|
||||
int jpeg_height = 0;
|
||||
if(self->tjDecompressHeader2(self->jpeg_decompressor, self->dmabuf_map[buf->index], buf->bytesused, &jpeg_width, &jpeg_height, &jpeg_subsamp) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_capture: failed to decompress camera jpeg header data, error: %s\n", self->tjGetErrorStr2(self->jpeg_decompressor));
|
||||
return;
|
||||
}
|
||||
|
||||
if(jpeg_width != self->capture_size.x || jpeg_height != self->capture_size.y) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_capture: got jpeg data of incorrect dimensions. Expected %dx%d, got %dx%d\n", self->capture_size.x, self->capture_size.y, jpeg_width, jpeg_height);
|
||||
return;
|
||||
}
|
||||
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, self->texture_id);
|
||||
|
||||
self->pbo_index = (self->pbo_index + 1) % NUM_PBOS;
|
||||
const unsigned int next_pbo_index = (self->pbo_index + 1) % NUM_PBOS;
|
||||
|
||||
self->params.egl->glBindBuffer(GL_PIXEL_UNPACK_BUFFER, self->pbos[self->pbo_index]);
|
||||
self->params.egl->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, self->capture_size.x, self->capture_size.y, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
|
||||
|
||||
self->params.egl->glBindBuffer(GL_PIXEL_UNPACK_BUFFER, self->pbos[next_pbo_index]);
|
||||
self->params.egl->glBufferData(GL_PIXEL_UNPACK_BUFFER, self->capture_size.x * self->capture_size.y * 4, 0, GL_DYNAMIC_DRAW);
|
||||
|
||||
void *mapped_buffer = self->params.egl->glMapBuffer(GL_PIXEL_UNPACK_BUFFER, GL_WRITE_ONLY);
|
||||
if(mapped_buffer) {
|
||||
if(self->tjDecompress2(self->jpeg_decompressor, self->dmabuf_map[buf->index], buf->bytesused, mapped_buffer, jpeg_width, 0, jpeg_height, TJPF_RGBA, TJFLAG_FASTDCT) != 0)
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_capture: failed to decompress camera jpeg data, error: %s\n", self->tjGetErrorStr2(self->jpeg_decompressor));
|
||||
self->params.egl->glUnmapBuffer(GL_PIXEL_UNPACK_BUFFER);
|
||||
}
|
||||
|
||||
self->params.egl->glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
static int gsr_capture_v4l2_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
(void)color_conversion;
|
||||
gsr_capture_v4l2 *self = cap->priv;
|
||||
|
||||
struct v4l2_buffer buf = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
|
||||
.memory = V4L2_MEMORY_MMAP
|
||||
};
|
||||
|
||||
xioctl(self->fd, VIDIOC_DQBUF, &buf);
|
||||
if(buf.bytesused > 0 && !(buf.flags & V4L2_BUF_FLAG_ERROR)) {
|
||||
if(!self->got_first_frame)
|
||||
fprintf(stderr, "gsr info: gsr_capture_v4l2_capture: camera %s is now ready\n", self->params.device_path);
|
||||
self->got_first_frame = true;
|
||||
|
||||
switch(self->buffer_type) {
|
||||
case V4L2_BUFFER_TYPE_DMABUF: {
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, self->texture_id);
|
||||
self->params.egl->glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, self->dma_image[buf.index]);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
break;
|
||||
}
|
||||
case V4L2_BUFFER_TYPE_MMAP: {
|
||||
//xioctl(self->dmabuf_fd[buf.index], DMA_BUF_IOCTL_SYNC, &(struct dma_buf_sync){ .flags = DMA_BUF_SYNC_START });
|
||||
gsr_capture_v4l2_decode_jpeg_to_texture(self, &buf);
|
||||
//xioctl(self->dmabuf_fd[buf.index], DMA_BUF_IOCTL_SYNC, &(struct dma_buf_sync){ .flags = DMA_BUF_SYNC_END });
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
xioctl(self->fd, VIDIOC_QBUF, &buf);
|
||||
|
||||
const vec2i output_size = scale_keep_aspect_ratio(self->capture_size, capture_metadata->recording_size);
|
||||
const vec2i target_pos = gsr_capture_get_target_position(output_size, capture_metadata);
|
||||
|
||||
//if(self->got_first_frame) {
|
||||
gsr_color_conversion_draw(color_conversion, self->texture_id,
|
||||
target_pos, output_size,
|
||||
(vec2i){0, 0}, self->capture_size, self->capture_size,
|
||||
GSR_ROT_0, capture_metadata->flip, GSR_SOURCE_COLOR_RGB, self->buffer_type == V4L2_BUFFER_TYPE_DMABUF);
|
||||
//}
|
||||
|
||||
return self->got_first_frame ? 0 : -1;
|
||||
}
|
||||
|
||||
static bool gsr_capture_v4l2_uses_external_image(gsr_capture *cap) {
|
||||
(void)cap;
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool gsr_capture_v4l2_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_v4l2 *self = cap->priv;
|
||||
if(err)
|
||||
*err = self->stop_is_error;
|
||||
return self->should_stop;
|
||||
}
|
||||
|
||||
static bool gsr_capture_v4l2_is_damaged(gsr_capture *cap) {
|
||||
gsr_capture_v4l2 *self = cap->priv;
|
||||
struct pollfd poll_data = {
|
||||
.fd = self->fd,
|
||||
.events = POLLIN,
|
||||
.revents = 0
|
||||
};
|
||||
return poll(&poll_data, 1, 0) > 0 && (poll_data.revents & POLLIN);
|
||||
}
|
||||
|
||||
static void gsr_capture_v4l2_clear_damage(gsr_capture *cap) {
|
||||
gsr_capture_v4l2 *self = cap->priv;
|
||||
(void)self;
|
||||
}
|
||||
|
||||
static void gsr_capture_v4l2_destroy(gsr_capture *cap) {
|
||||
gsr_capture_v4l2 *self = cap->priv;
|
||||
if(cap->priv) {
|
||||
gsr_capture_v4l2_stop(self);
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_v4l2_create(const gsr_capture_v4l2_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_v4l2_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_v4l2 *cap_camera = calloc(1, sizeof(gsr_capture_v4l2));
|
||||
if(!cap_camera) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
cap_camera->params = *params;
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_v4l2_start,
|
||||
.tick = gsr_capture_v4l2_tick,
|
||||
.should_stop = gsr_capture_v4l2_should_stop,
|
||||
.capture = gsr_capture_v4l2_capture,
|
||||
.uses_external_image = gsr_capture_v4l2_uses_external_image,
|
||||
.is_damaged = gsr_capture_v4l2_is_damaged,
|
||||
.clear_damage = gsr_capture_v4l2_clear_damage,
|
||||
.destroy = gsr_capture_v4l2_destroy,
|
||||
.priv = cap_camera
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
|
||||
void gsr_capture_v4l2_list_devices(v4l2_devices_query_callback callback, void *userdata) {
|
||||
void *libturbojpeg_lib = dlopen("libturbojpeg.so.0", RTLD_LAZY);
|
||||
const bool has_libturbojpeg_lib = libturbojpeg_lib != NULL;
|
||||
if(libturbojpeg_lib)
|
||||
dlclose(libturbojpeg_lib);
|
||||
|
||||
char v4l2_device_path[128];
|
||||
for(int i = 0; i < 8; ++i) {
|
||||
snprintf(v4l2_device_path, sizeof(v4l2_device_path), "/dev/video%d", i);
|
||||
|
||||
const int fd = open(v4l2_device_path, O_RDWR | O_NONBLOCK);
|
||||
if(fd < 0)
|
||||
continue;
|
||||
|
||||
struct v4l2_capability cap = {0};
|
||||
if(xioctl(fd, VIDIOC_QUERYCAP, &cap) == -1)
|
||||
goto next;
|
||||
|
||||
if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
|
||||
goto next;
|
||||
|
||||
if(!(cap.capabilities & V4L2_CAP_STREAMING))
|
||||
goto next;
|
||||
|
||||
struct v4l2_format fmt = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE
|
||||
};
|
||||
if(xioctl(fd, VIDIOC_G_FMT, &fmt) == -1)
|
||||
goto next;
|
||||
|
||||
const gsr_capture_v4l2_supported_pixfmts supported_pixfmts = gsr_capture_v4l2_get_supported_pixfmts(fd);
|
||||
if(supported_pixfmts.yuyv || (supported_pixfmts.mjpeg && has_libturbojpeg_lib))
|
||||
callback(v4l2_device_path, supported_pixfmts, (vec2i){ fmt.fmt.pix.width, fmt.fmt.pix.height }, userdata);
|
||||
|
||||
next:
|
||||
close(fd);
|
||||
}
|
||||
}
|
||||
@@ -23,6 +23,7 @@ typedef struct {
|
||||
bool init_new_window;
|
||||
|
||||
Window window;
|
||||
vec2i window_pos;
|
||||
vec2i window_size;
|
||||
vec2i texture_size;
|
||||
double window_resize_timer;
|
||||
@@ -31,14 +32,11 @@ typedef struct {
|
||||
|
||||
Atom net_active_window_atom;
|
||||
|
||||
gsr_cursor cursor;
|
||||
|
||||
bool clear_background;
|
||||
} gsr_capture_xcomposite;
|
||||
|
||||
static void gsr_capture_xcomposite_stop(gsr_capture_xcomposite *self) {
|
||||
window_texture_deinit(&self->window_texture);
|
||||
gsr_cursor_deinit(&self->cursor);
|
||||
}
|
||||
|
||||
static int max_int(int a, int b) {
|
||||
@@ -81,6 +79,9 @@ static int gsr_capture_xcomposite_start(gsr_capture *cap, gsr_capture_metadata *
|
||||
return -1;
|
||||
}
|
||||
|
||||
self->window_pos.x = attr.x;
|
||||
self->window_pos.y = attr.y;
|
||||
|
||||
self->window_size.x = max_int(attr.width, 0);
|
||||
self->window_size.y = max_int(attr.height, 0);
|
||||
|
||||
@@ -95,20 +96,13 @@ static int gsr_capture_xcomposite_start(gsr_capture *cap, gsr_capture_metadata *
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(gsr_cursor_init(&self->cursor, self->params.egl, self->display) != 0) {
|
||||
gsr_capture_xcomposite_stop(self);
|
||||
return -1;
|
||||
}
|
||||
|
||||
self->texture_size.x = self->window_texture.window_width;
|
||||
self->texture_size.y = self->window_texture.window_height;
|
||||
|
||||
if(self->params.output_resolution.x == 0 && self->params.output_resolution.y == 0) {
|
||||
capture_metadata->video_width = self->texture_size.x;
|
||||
capture_metadata->video_height = self->texture_size.y;
|
||||
capture_metadata->video_size = self->texture_size;
|
||||
} else {
|
||||
capture_metadata->video_width = self->params.output_resolution.x;
|
||||
capture_metadata->video_height = self->params.output_resolution.y;
|
||||
capture_metadata->video_size = self->params.output_resolution;
|
||||
}
|
||||
|
||||
self->window_resize_timer = clock_get_monotonic_seconds();
|
||||
@@ -137,6 +131,9 @@ static void gsr_capture_xcomposite_tick(gsr_capture *cap) {
|
||||
if(!XGetWindowAttributes(self->display, self->window, &attr))
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_tick failed: invalid window id: %lu\n", self->window);
|
||||
|
||||
self->window_pos.x = attr.x;
|
||||
self->window_pos.y = attr.y;
|
||||
|
||||
self->window_size.x = max_int(attr.width, 0);
|
||||
self->window_size.y = max_int(attr.height, 0);
|
||||
|
||||
@@ -190,6 +187,9 @@ static void gsr_capture_xcomposite_on_event(gsr_capture *cap, gsr_egl *egl) {
|
||||
break;
|
||||
}
|
||||
case ConfigureNotify: {
|
||||
self->window_pos.x = xev->xconfigure.x;
|
||||
self->window_pos.y = xev->xconfigure.y;
|
||||
|
||||
/* Window resized */
|
||||
if(xev->xconfigure.window == self->window && (xev->xconfigure.width != self->window_size.x || xev->xconfigure.height != self->window_size.y)) {
|
||||
self->window_size.x = max_int(xev->xconfigure.width, 0);
|
||||
@@ -207,8 +207,6 @@ static void gsr_capture_xcomposite_on_event(gsr_capture *cap, gsr_egl *egl) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
gsr_cursor_on_event(&self->cursor, xev);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_should_stop(gsr_capture *cap, bool *err) {
|
||||
@@ -224,16 +222,21 @@ static bool gsr_capture_xcomposite_should_stop(gsr_capture *cap, bool *err) {
|
||||
return false;
|
||||
}
|
||||
|
||||
static int gsr_capture_xcomposite_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
static void gsr_capture_xcomposite_pre_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
(void)capture_metadata;
|
||||
gsr_capture_xcomposite *self = cap->priv;
|
||||
|
||||
if(self->clear_background) {
|
||||
self->clear_background = false;
|
||||
gsr_color_conversion_clear(color_conversion);
|
||||
color_conversion->schedule_clear = true;
|
||||
}
|
||||
}
|
||||
|
||||
const vec2i output_size = scale_keep_aspect_ratio(self->texture_size, (vec2i){capture_metadata->recording_width, capture_metadata->recording_height});
|
||||
const vec2i target_pos = { max_int(0, capture_metadata->video_width / 2 - output_size.x / 2), max_int(0, capture_metadata->video_height / 2 - output_size.y / 2) };
|
||||
static int gsr_capture_xcomposite_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
gsr_capture_xcomposite *self = cap->priv;
|
||||
|
||||
const vec2i output_size = scale_keep_aspect_ratio(self->texture_size, capture_metadata->recording_size);
|
||||
const vec2i target_pos = gsr_capture_get_target_position(output_size, capture_metadata);
|
||||
|
||||
//self->params.egl->glFlush();
|
||||
//self->params.egl->glFinish();
|
||||
@@ -241,28 +244,28 @@ static int gsr_capture_xcomposite_capture(gsr_capture *cap, gsr_capture_metadata
|
||||
gsr_color_conversion_draw(color_conversion, window_texture_get_opengl_texture_id(&self->window_texture),
|
||||
target_pos, output_size,
|
||||
(vec2i){0, 0}, self->texture_size, self->texture_size,
|
||||
GSR_ROT_0, GSR_SOURCE_COLOR_RGB, false);
|
||||
GSR_ROT_0, capture_metadata->flip, GSR_SOURCE_COLOR_RGB, false);
|
||||
|
||||
if(self->params.record_cursor && self->cursor.visible) {
|
||||
if(self->params.record_cursor && self->params.cursor->visible) {
|
||||
const vec2d scale = {
|
||||
self->texture_size.x == 0 ? 0 : (double)output_size.x / (double)self->texture_size.x,
|
||||
self->texture_size.y == 0 ? 0 : (double)output_size.y / (double)self->texture_size.y
|
||||
};
|
||||
|
||||
gsr_cursor_tick(&self->cursor, self->window);
|
||||
|
||||
const vec2i cursor_pos = {
|
||||
target_pos.x + (self->cursor.position.x - self->cursor.hotspot.x) * scale.x,
|
||||
target_pos.y + (self->cursor.position.y - self->cursor.hotspot.y) * scale.y
|
||||
target_pos.x + (self->params.cursor->position.x - self->params.cursor->hotspot.x - self->window_pos.x) * scale.x,
|
||||
target_pos.y + (self->params.cursor->position.y - self->params.cursor->hotspot.y - self->window_pos.y) * scale.y
|
||||
};
|
||||
|
||||
if(cursor_pos.x < target_pos.x || cursor_pos.x + self->cursor.size.x > target_pos.x + output_size.x || cursor_pos.y < target_pos.y || cursor_pos.y + self->cursor.size.y > target_pos.y + output_size.y)
|
||||
self->clear_background = true;
|
||||
self->params.egl->glEnable(GL_SCISSOR_TEST);
|
||||
self->params.egl->glScissor(target_pos.x, target_pos.y, output_size.x, output_size.y);
|
||||
|
||||
gsr_color_conversion_draw(color_conversion, self->cursor.texture_id,
|
||||
cursor_pos, (vec2i){self->cursor.size.x * scale.x, self->cursor.size.y * scale.y},
|
||||
(vec2i){0, 0}, self->cursor.size, self->cursor.size,
|
||||
GSR_ROT_0, GSR_SOURCE_COLOR_RGB, false);
|
||||
gsr_color_conversion_draw(color_conversion, self->params.cursor->texture_id,
|
||||
cursor_pos, (vec2i){self->params.cursor->size.x * scale.x, self->params.cursor->size.y * scale.y},
|
||||
(vec2i){0, 0}, self->params.cursor->size, self->params.cursor->size,
|
||||
GSR_ROT_0, capture_metadata->flip, GSR_SOURCE_COLOR_RGB, false);
|
||||
|
||||
self->params.egl->glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
|
||||
//self->params.egl->glFlush();
|
||||
@@ -309,6 +312,7 @@ gsr_capture* gsr_capture_xcomposite_create(const gsr_capture_xcomposite_params *
|
||||
.on_event = gsr_capture_xcomposite_on_event,
|
||||
.tick = gsr_capture_xcomposite_tick,
|
||||
.should_stop = gsr_capture_xcomposite_should_stop,
|
||||
.pre_capture = gsr_capture_xcomposite_pre_capture,
|
||||
.capture = gsr_capture_xcomposite_capture,
|
||||
.uses_external_image = NULL,
|
||||
.get_window_id = gsr_capture_xcomposite_get_window_id,
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
typedef struct {
|
||||
gsr_capture_ximage_params params;
|
||||
Display *display;
|
||||
gsr_cursor cursor;
|
||||
gsr_monitor monitor;
|
||||
vec2i capture_pos;
|
||||
vec2i capture_size;
|
||||
@@ -25,7 +24,6 @@ typedef struct {
|
||||
} gsr_capture_ximage;
|
||||
|
||||
static void gsr_capture_ximage_stop(gsr_capture_ximage *self) {
|
||||
gsr_cursor_deinit(&self->cursor);
|
||||
if(self->texture_id) {
|
||||
self->params.egl->glDeleteTextures(1, &self->texture_id);
|
||||
self->texture_id = 0;
|
||||
@@ -40,11 +38,6 @@ static int gsr_capture_ximage_start(gsr_capture *cap, gsr_capture_metadata *capt
|
||||
gsr_capture_ximage *self = cap->priv;
|
||||
self->root_window = DefaultRootWindow(self->display);
|
||||
|
||||
if(gsr_cursor_init(&self->cursor, self->params.egl, self->display) != 0) {
|
||||
gsr_capture_ximage_stop(self);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!get_monitor_by_name(self->params.egl, GSR_CONNECTION_X11, self->params.display_to_capture, &self->monitor)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_ximage_start: failed to find monitor by name \"%s\"\n", self->params.display_to_capture);
|
||||
gsr_capture_ximage_stop(self);
|
||||
@@ -59,14 +52,11 @@ static int gsr_capture_ximage_start(gsr_capture *cap, gsr_capture_metadata *capt
|
||||
|
||||
if(self->params.output_resolution.x > 0 && self->params.output_resolution.y > 0) {
|
||||
self->params.output_resolution = scale_keep_aspect_ratio(self->capture_size, self->params.output_resolution);
|
||||
capture_metadata->video_width = self->params.output_resolution.x;
|
||||
capture_metadata->video_height = self->params.output_resolution.y;
|
||||
capture_metadata->video_size = self->params.output_resolution;
|
||||
} else if(self->params.region_size.x > 0 && self->params.region_size.y > 0) {
|
||||
capture_metadata->video_width = self->params.region_size.x;
|
||||
capture_metadata->video_height = self->params.region_size.y;
|
||||
capture_metadata->video_size = self->params.region_size;
|
||||
} else {
|
||||
capture_metadata->video_width = self->capture_size.x;
|
||||
capture_metadata->video_height = self->capture_size.y;
|
||||
capture_metadata->video_size = self->capture_size;
|
||||
}
|
||||
|
||||
self->texture_id = gl_create_texture(self->params.egl, self->capture_size.x, self->capture_size.y, GL_RGB8, GL_RGB, GL_LINEAR);
|
||||
@@ -79,12 +69,6 @@ static int gsr_capture_ximage_start(gsr_capture *cap, gsr_capture_metadata *capt
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_ximage_on_event(gsr_capture *cap, gsr_egl *egl) {
|
||||
gsr_capture_ximage *self = cap->priv;
|
||||
XEvent *xev = gsr_window_get_event_data(egl->window);
|
||||
gsr_cursor_on_event(&self->cursor, xev);
|
||||
}
|
||||
|
||||
static bool gsr_capture_ximage_upload_to_texture(gsr_capture_ximage *self, int x, int y, int width, int height) {
|
||||
const int max_width = XWidthOfScreen(DefaultScreenOfDisplay(self->display));
|
||||
const int max_height = XHeightOfScreen(DefaultScreenOfDisplay(self->display));
|
||||
@@ -137,6 +121,7 @@ static bool gsr_capture_ximage_upload_to_texture(gsr_capture_ximage *self, int x
|
||||
}
|
||||
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, self->texture_id);
|
||||
// TODO: Change to GL_RGBA for better performance? image_data needs alpha then as well
|
||||
self->params.egl->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, image->width, image->height, GL_RGB, GL_UNSIGNED_BYTE, image_data);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
success = true;
|
||||
@@ -150,35 +135,33 @@ static bool gsr_capture_ximage_upload_to_texture(gsr_capture_ximage *self, int x
|
||||
static int gsr_capture_ximage_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
|
||||
gsr_capture_ximage *self = cap->priv;
|
||||
|
||||
const vec2i output_size = scale_keep_aspect_ratio(self->capture_size, (vec2i){capture_metadata->recording_width, capture_metadata->recording_height});
|
||||
const vec2i target_pos = { max_int(0, capture_metadata->video_width / 2 - output_size.x / 2), max_int(0, capture_metadata->video_height / 2 - output_size.y / 2) };
|
||||
const vec2i output_size = scale_keep_aspect_ratio(self->capture_size, capture_metadata->recording_size);
|
||||
const vec2i target_pos = gsr_capture_get_target_position(output_size, capture_metadata);
|
||||
gsr_capture_ximage_upload_to_texture(self, self->capture_pos.x + self->params.region_position.x, self->capture_pos.y + self->params.region_position.y, self->capture_size.x, self->capture_size.y);
|
||||
|
||||
gsr_color_conversion_draw(color_conversion, self->texture_id,
|
||||
target_pos, output_size,
|
||||
(vec2i){0, 0}, self->capture_size, self->capture_size,
|
||||
GSR_ROT_0, GSR_SOURCE_COLOR_RGB, false);
|
||||
GSR_ROT_0, capture_metadata->flip, GSR_SOURCE_COLOR_RGB, false);
|
||||
|
||||
if(self->params.record_cursor && self->cursor.visible) {
|
||||
if(self->params.record_cursor && self->params.cursor->visible) {
|
||||
const vec2d scale = {
|
||||
self->capture_size.x == 0 ? 0 : (double)output_size.x / (double)self->capture_size.x,
|
||||
self->capture_size.y == 0 ? 0 : (double)output_size.y / (double)self->capture_size.y
|
||||
};
|
||||
|
||||
gsr_cursor_tick(&self->cursor, self->root_window);
|
||||
|
||||
const vec2i cursor_pos = {
|
||||
target_pos.x + (self->cursor.position.x - self->cursor.hotspot.x) * scale.x - self->capture_pos.x - self->params.region_position.x,
|
||||
target_pos.y + (self->cursor.position.y - self->cursor.hotspot.y) * scale.y - self->capture_pos.y - self->params.region_position.y
|
||||
target_pos.x + (self->params.cursor->position.x - self->params.cursor->hotspot.x) * scale.x - self->capture_pos.x - self->params.region_position.x,
|
||||
target_pos.y + (self->params.cursor->position.y - self->params.cursor->hotspot.y) * scale.y - self->capture_pos.y - self->params.region_position.y
|
||||
};
|
||||
|
||||
self->params.egl->glEnable(GL_SCISSOR_TEST);
|
||||
self->params.egl->glScissor(target_pos.x, target_pos.y, output_size.x, output_size.y);
|
||||
|
||||
gsr_color_conversion_draw(color_conversion, self->cursor.texture_id,
|
||||
cursor_pos, (vec2i){self->cursor.size.x * scale.x, self->cursor.size.y * scale.y},
|
||||
(vec2i){0, 0}, self->cursor.size, self->cursor.size,
|
||||
GSR_ROT_0, GSR_SOURCE_COLOR_RGB, false);
|
||||
gsr_color_conversion_draw(color_conversion, self->params.cursor->texture_id,
|
||||
cursor_pos, (vec2i){self->params.cursor->size.x * scale.x, self->params.cursor->size.y * scale.y},
|
||||
(vec2i){0, 0}, self->params.cursor->size, self->params.cursor->size,
|
||||
GSR_ROT_0, capture_metadata->flip, GSR_SOURCE_COLOR_RGB, false);
|
||||
|
||||
self->params.egl->glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
@@ -230,7 +213,6 @@ gsr_capture* gsr_capture_ximage_create(const gsr_capture_ximage_params *params)
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_ximage_start,
|
||||
.on_event = gsr_capture_ximage_on_event,
|
||||
.tick = NULL,
|
||||
.should_stop = NULL,
|
||||
.capture = gsr_capture_ximage_capture,
|
||||
|
||||
Reference in New Issue
Block a user