v4l2: select best camera option by default

Display all camera options in --info and --list-v4l2-devices.
Add camera_width, camera_height and camera_fps v4l2 capture options.
This commit is contained in:
dec05eba
2026-01-15 20:03:23 +01:00
parent b2f0d13512
commit 8af761f9bd
5 changed files with 406 additions and 70 deletions

View File

@@ -43,6 +43,11 @@ typedef enum {
V4L2_BUFFER_TYPE_MMAP
} v4l2_buffer_type;
typedef struct {
bool yuyv;
bool mjpeg;
} gsr_capture_v4l2_supported_pixfmts;
typedef struct {
gsr_capture_v4l2_params params;
vec2i capture_size;
@@ -157,7 +162,35 @@ static void gsr_capture_v4l2_reset_cropping(gsr_capture_v4l2 *self) {
}
}
gsr_capture_v4l2_supported_pixfmts gsr_capture_v4l2_get_supported_pixfmts(int fd) {
static uint32_t gsr_pixfmt_to_v4l2_pixfmt(gsr_capture_v4l2_pixfmt pixfmt) {
switch(pixfmt) {
case GSR_CAPTURE_V4L2_PIXFMT_AUTO:
assert(false);
break;
case GSR_CAPTURE_V4L2_PIXFMT_YUYV:
return V4L2_PIX_FMT_YUYV;
case GSR_CAPTURE_V4L2_PIXFMT_MJPEG:
return V4L2_PIX_FMT_MJPEG;
}
assert(false);
return V4L2_PIX_FMT_YUYV;
}
const char* gsr_capture_v4l2_pixfmt_to_string(gsr_capture_v4l2_pixfmt pixfmt) {
switch(pixfmt) {
case GSR_CAPTURE_V4L2_PIXFMT_AUTO:
assert(false);
break;
case GSR_CAPTURE_V4L2_PIXFMT_YUYV:
return "yuyv";
case GSR_CAPTURE_V4L2_PIXFMT_MJPEG:
return "mjpeg";
}
assert(false);
return "";
}
static gsr_capture_v4l2_supported_pixfmts gsr_capture_v4l2_get_supported_pixfmts(int fd) {
gsr_capture_v4l2_supported_pixfmts result = {0};
struct v4l2_fmtdesc fmt = {
@@ -179,28 +212,189 @@ gsr_capture_v4l2_supported_pixfmts gsr_capture_v4l2_get_supported_pixfmts(int fd
return result;
}
static uint32_t gsr_pixfmt_to_v4l2_pixfmt(gsr_capture_v4l2_pixfmt pixfmt) {
switch(pixfmt) {
case GSR_CAPTURE_V4L2_PIXFMT_AUTO:
assert(false);
break;
case GSR_CAPTURE_V4L2_PIXFMT_YUYV:
return V4L2_PIX_FMT_YUYV;
case GSR_CAPTURE_V4L2_PIXFMT_MJPEG:
return V4L2_PIX_FMT_MJPEG;
/* Returns the number of resolutions added */
static size_t gsr_capture_v4l2_get_supported_resolutions(int fd, gsr_capture_v4l2_pixfmt pixfmt, gsr_capture_v4l2_resolution *resolutions, size_t max_resolutions) {
size_t resolution_index = 0;
struct v4l2_frmsizeenum fmt = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.pixel_format = gsr_pixfmt_to_v4l2_pixfmt(pixfmt),
};
while(xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &fmt) == 0) {
if(fmt.type == V4L2_FRMSIZE_TYPE_DISCRETE && resolution_index < max_resolutions) {
resolutions[resolution_index] = (gsr_capture_v4l2_resolution){
.width = fmt.discrete.width,
.height = fmt.discrete.height,
};
++resolution_index;
}
++fmt.index;
}
assert(false);
return V4L2_PIX_FMT_YUYV;
return resolution_index;
}
static bool gsr_capture_v4l2_validate_pixfmt(gsr_capture_v4l2 *self, const gsr_capture_v4l2_supported_pixfmts supported_pixfmts) {
/* Returns the number of framerates added */
static size_t gsr_capture_v4l2_get_supported_framerates(int fd, gsr_capture_v4l2_pixfmt pixfmt, gsr_capture_v4l2_resolution resolution, gsr_capture_v4l2_framerate *framerates, size_t max_framerates) {
size_t framerate_index = 0;
struct v4l2_frmivalenum fmt = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.pixel_format = gsr_pixfmt_to_v4l2_pixfmt(pixfmt),
.width = resolution.width,
.height = resolution.height,
};
while(xioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &fmt) == 0) {
if(fmt.type == V4L2_FRMIVAL_TYPE_DISCRETE && fmt.discrete.denominator > 0 && fmt.discrete.numerator > 0 && framerate_index < max_framerates) {
framerates[framerate_index] = (gsr_capture_v4l2_framerate){
.denominator = fmt.discrete.denominator,
.numerator = fmt.discrete.numerator,
};
++framerate_index;
}
++fmt.index;
}
return framerate_index;
}
/* Returns the number of setups added */
static size_t gsr_capture_v4l2_get_supported_setups(int fd, gsr_capture_v4l2_supported_setup *supported_setups, size_t max_supported_setups, bool has_libturbojpeg_lib) {
const gsr_capture_v4l2_supported_pixfmts supported_pixfmts = gsr_capture_v4l2_get_supported_pixfmts(fd);
size_t num_pixfmts = 0;
gsr_capture_v4l2_pixfmt pixfmts[2];
if(supported_pixfmts.yuyv)
pixfmts[num_pixfmts++] = GSR_CAPTURE_V4L2_PIXFMT_YUYV;
if(supported_pixfmts.mjpeg && has_libturbojpeg_lib)
pixfmts[num_pixfmts++] = GSR_CAPTURE_V4L2_PIXFMT_MJPEG;
gsr_capture_v4l2_resolution resolutions[32];
gsr_capture_v4l2_framerate framerates[32];
size_t supported_setup_index = 0;
for(size_t pixfmt_index = 0; pixfmt_index < num_pixfmts; ++pixfmt_index) {
const gsr_capture_v4l2_pixfmt pixfmt = pixfmts[pixfmt_index];
const size_t num_resolutions = gsr_capture_v4l2_get_supported_resolutions(fd, pixfmt, resolutions, 32);
for(size_t resolution_index = 0; resolution_index < num_resolutions; ++resolution_index) {
const gsr_capture_v4l2_resolution resolution = resolutions[resolution_index];
const size_t num_framerates = gsr_capture_v4l2_get_supported_framerates(fd, pixfmt, resolution, framerates, 32);
for(size_t framerate_index = 0; framerate_index < num_framerates; ++framerate_index) {
const gsr_capture_v4l2_framerate framerate = framerates[framerate_index];
if(supported_setup_index < max_supported_setups) {
supported_setups[supported_setup_index] = (gsr_capture_v4l2_supported_setup){
.pixfmt = pixfmt,
.resolution = resolution,
.framerate = framerate,
};
++supported_setup_index;
}
}
}
}
return supported_setup_index;
}
uint32_t gsr_capture_v4l2_framerate_to_number(gsr_capture_v4l2_framerate framerate) {
return (uint32_t)((double)framerate.denominator / (double)framerate.numerator);
}
static bool gsr_capture_v4l2_get_best_matching_setup(
const gsr_capture_v4l2_supported_setup *supported_setups,
size_t num_supported_setups,
gsr_capture_v4l2_pixfmt pixfmt,
uint32_t camera_fps,
gsr_capture_v4l2_resolution camera_resolution,
gsr_capture_v4l2_supported_setup *best_supported_setup)
{
memset(best_supported_setup, 0, sizeof(*best_supported_setup));
int best_match_index = -1;
uint64_t best_match_score = 0;
for(size_t i = 0; i < num_supported_setups; ++i) {
const gsr_capture_v4l2_supported_setup *setup = &supported_setups[i];
if(pixfmt != GSR_CAPTURE_V4L2_PIXFMT_AUTO && pixfmt != setup->pixfmt)
continue;
uint64_t setup_resolution_width = (uint64_t)setup->resolution.width;
uint64_t setup_resolution_height = (uint64_t)setup->resolution.height;
uint64_t setup_framerate = gsr_capture_v4l2_framerate_to_number(setup->framerate);
if(setup_resolution_width == camera_resolution.width && setup_resolution_height == camera_resolution.height) {
setup_resolution_width = 50000;
setup_resolution_height = 50000;
}
if(setup_framerate == camera_fps) {
setup_framerate = 50000;
}
const uint64_t match_score = setup_resolution_width * setup_resolution_height * setup_framerate;
if(match_score > best_match_score) {
best_match_score = match_score;
best_match_index = i;
}
//fprintf(stderr, "supported setup[%d]: pixfmt: %d, size: %ux%u, fps: %u/%u\n", (int)i, setup->pixfmt, setup->resolution.width, setup->resolution.height, setup->framerate.denominator, setup->framerate.numerator);
}
if(best_match_index == -1)
return false;
//fprintf(stderr, "best match index: %d\n", best_match_index);
*best_supported_setup = supported_setups[best_match_index];
return true;
}
/* Seems like some cameras need this? */
static void gsr_capture_v4l2_update_params(int fd) {
struct v4l2_streamparm streamparm = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
};
if(xioctl(fd, VIDIOC_G_PARM, &streamparm) == -1) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_set_framerate: VIDIOC_G_PARM failed, error: %s\n", strerror(errno));
return;
}
if(xioctl(fd, VIDIOC_S_PARM, &streamparm) == -1) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_set_framerate: VIDIOC_S_PARM failed, error: %s\n", strerror(errno));
return;
}
}
static void gsr_capture_v4l2_set_framerate(int fd, gsr_capture_v4l2_framerate framerate) {
struct v4l2_streamparm streamparm = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
};
if(xioctl(fd, VIDIOC_G_PARM, &streamparm) == -1) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_set_framerate: VIDIOC_G_PARM failed, error: %s\n", strerror(errno));
return;
}
streamparm.parm.capture.timeperframe.denominator = framerate.denominator;
streamparm.parm.capture.timeperframe.numerator = framerate.numerator;
if(xioctl(fd, VIDIOC_S_PARM, &streamparm) == -1) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_set_framerate: VIDIOC_S_PARM failed, error: %s\n", strerror(errno));
return;
}
if(streamparm.parm.capture.timeperframe.denominator == 0 || streamparm.parm.capture.timeperframe.numerator == 0) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_set_framerate: VIDIOC_S_PARM failed, error: invalid framerate: %u/%u\n", framerate.denominator, framerate.numerator);;
return;
}
}
static bool gsr_capture_v4l2_validate_pixfmt(const gsr_capture_v4l2 *self, const gsr_capture_v4l2_supported_pixfmts supported_pixfmts) {
switch(self->params.pixfmt) {
case GSR_CAPTURE_V4L2_PIXFMT_AUTO: {
if(supported_pixfmts.yuyv) {
self->params.pixfmt = GSR_CAPTURE_V4L2_PIXFMT_YUYV;
} else if(supported_pixfmts.mjpeg) {
self->params.pixfmt = GSR_CAPTURE_V4L2_PIXFMT_MJPEG;
} else {
if(!supported_pixfmts.yuyv && !supported_pixfmts.mjpeg) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support yuyv nor mjpeg. GPU Screen Recorder supports only yuyv and mjpeg at the moment. Report this as an issue, see: https://git.dec05eba.com/?p=about\n", self->params.device_path);
return false;
}
@@ -208,14 +402,14 @@ static bool gsr_capture_v4l2_validate_pixfmt(gsr_capture_v4l2 *self, const gsr_c
}
case GSR_CAPTURE_V4L2_PIXFMT_YUYV: {
if(!supported_pixfmts.yuyv) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support yuyv. Try recording with -pixfmt mjpeg or -pixfmt auto instead\n", self->params.device_path);
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support yuyv. Try recording with pixfmt=mjpeg or pixfmt=auto instead\n", self->params.device_path);
return false;
}
break;
}
case GSR_CAPTURE_V4L2_PIXFMT_MJPEG: {
if(!supported_pixfmts.mjpeg) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support mjpeg. Try recording with -pixfmt yuyv or -pixfmt auto instead\n", self->params.device_path);
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't support mjpeg. Try recording with pixfmt=yuyv or pixfmt=auto instead\n", self->params.device_path);
return false;
}
break;
@@ -321,6 +515,14 @@ static bool gsr_capture_v4l2_map_buffer(gsr_capture_v4l2 *self, const struct v4l
return true;
}
static bool is_libturbojpeg_library_available(void) {
void *libturbojpeg_lib = dlopen("libturbojpeg.so.0", RTLD_LAZY);
const bool has_libturbojpeg_lib = libturbojpeg_lib != NULL;
if(libturbojpeg_lib)
dlclose(libturbojpeg_lib);
return has_libturbojpeg_lib;
}
static int gsr_capture_v4l2_setup(gsr_capture_v4l2 *self) {
self->fd = open(self->params.device_path, O_RDWR | O_NONBLOCK);
if(self->fd < 0) {
@@ -351,10 +553,35 @@ static int gsr_capture_v4l2_setup(gsr_capture_v4l2 *self) {
gsr_capture_v4l2_reset_cropping(self);
const bool has_libturbojpeg_lib = is_libturbojpeg_library_available();
if(!has_libturbojpeg_lib && self->params.pixfmt == GSR_CAPTURE_V4L2_PIXFMT_AUTO) {
fprintf(stderr, "gsr warning: gsr_capture_v4l2_create: libturbojpeg.so.0 isn't available on the system, yuyv camera capture will be used\n");
self->params.pixfmt = GSR_CAPTURE_V4L2_PIXFMT_YUYV;
}
const gsr_capture_v4l2_supported_pixfmts supported_pixfmts = gsr_capture_v4l2_get_supported_pixfmts(self->fd);
if(!gsr_capture_v4l2_validate_pixfmt(self, supported_pixfmts))
return -1;
gsr_capture_v4l2_supported_setup supported_setups[128];
const size_t num_supported_setups = gsr_capture_v4l2_get_supported_setups(self->fd, supported_setups, 128, has_libturbojpeg_lib);
gsr_capture_v4l2_supported_setup best_supported_setup = {0};
if(!gsr_capture_v4l2_get_best_matching_setup(supported_setups, num_supported_setups, self->params.pixfmt, self->params.camera_fps, self->params.camera_resolution, &best_supported_setup)) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: %s doesn't report any frame resolutions and framerates\n", self->params.device_path);
return -1;
}
fprintf(stderr, "gsr info: gsr_capture_v4l2_create: capturing %s at %ux%u@%dhz, pixfmt: %s\n",
self->params.device_path,
best_supported_setup.resolution.width,
best_supported_setup.resolution.height,
gsr_capture_v4l2_framerate_to_number(best_supported_setup.framerate),
gsr_capture_v4l2_pixfmt_to_string(best_supported_setup.pixfmt));
gsr_capture_v4l2_update_params(self->fd);
self->params.pixfmt = best_supported_setup.pixfmt;
if(self->params.pixfmt == GSR_CAPTURE_V4L2_PIXFMT_MJPEG) {
dlerror(); /* clear */
self->libturbojpeg_lib = dlopen("libturbojpeg.so.0", RTLD_LAZY);
@@ -384,7 +611,9 @@ static int gsr_capture_v4l2_setup(gsr_capture_v4l2 *self) {
const uint32_t v4l2_pixfmt = gsr_pixfmt_to_v4l2_pixfmt(self->params.pixfmt);
struct v4l2_format fmt = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.fmt.pix.pixelformat = v4l2_pixfmt
.fmt.pix.pixelformat = v4l2_pixfmt,
.fmt.pix.width = best_supported_setup.resolution.width,
.fmt.pix.height = best_supported_setup.resolution.height,
};
if(xioctl(self->fd, VIDIOC_S_FMT, &fmt) == -1) {
fprintf(stderr, "gsr error: gsr_capture_v4l2_create: VIDIOC_S_FMT failed, error: %s\n", strerror(errno));
@@ -399,6 +628,8 @@ static int gsr_capture_v4l2_setup(gsr_capture_v4l2 *self) {
self->capture_size.x = fmt.fmt.pix.width;
self->capture_size.y = fmt.fmt.pix.height;
gsr_capture_v4l2_set_framerate(self->fd, best_supported_setup.framerate);
struct v4l2_requestbuffers reqbuf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.memory = V4L2_MEMORY_MMAP,
@@ -645,12 +876,10 @@ gsr_capture* gsr_capture_v4l2_create(const gsr_capture_v4l2_params *params) {
}
void gsr_capture_v4l2_list_devices(v4l2_devices_query_callback callback, void *userdata) {
void *libturbojpeg_lib = dlopen("libturbojpeg.so.0", RTLD_LAZY);
const bool has_libturbojpeg_lib = libturbojpeg_lib != NULL;
if(libturbojpeg_lib)
dlclose(libturbojpeg_lib);
const bool has_libturbojpeg_lib = is_libturbojpeg_library_available();
char v4l2_device_path[128];
gsr_capture_v4l2_supported_setup supported_setups[128];
for(int i = 0; i < 8; ++i) {
snprintf(v4l2_device_path, sizeof(v4l2_device_path), "/dev/video%d", i);
@@ -674,12 +903,14 @@ void gsr_capture_v4l2_list_devices(v4l2_devices_query_callback callback, void *u
if(xioctl(fd, VIDIOC_G_FMT, &fmt) == -1)
goto next;
gsr_capture_v4l2_supported_pixfmts supported_pixfmts = gsr_capture_v4l2_get_supported_pixfmts(fd);
if(!has_libturbojpeg_lib)
supported_pixfmts.mjpeg = false;
const size_t num_supported_setups = gsr_capture_v4l2_get_supported_setups(fd, supported_setups, 128, has_libturbojpeg_lib);
if(num_supported_setups == 0)
continue;
if(supported_pixfmts.yuyv || supported_pixfmts.mjpeg)
callback(v4l2_device_path, supported_pixfmts, (vec2i){ fmt.fmt.pix.width, fmt.fmt.pix.height }, userdata);
for(size_t j = 0; j < num_supported_setups; ++j) {
const gsr_capture_v4l2_supported_setup *setup = &supported_setups[j];
callback(v4l2_device_path, setup, userdata);
}
next:
close(fd);

View File

@@ -1194,6 +1194,8 @@ struct CaptureSource {
vec2i region_size = {0, 0};
bool region_set = false;
int64_t window_id = 0;
int camera_fps = 0;
vec2i camera_resolution = {0, 0};
};
struct VideoSource {
@@ -1869,18 +1871,9 @@ static void output_monitor_info(const gsr_monitor *monitor, void *userdata) {
++options->num_monitors;
}
static void camera_query_callback(const char *path, gsr_capture_v4l2_supported_pixfmts supported_pixfmts, vec2i size, void *userdata) {
static void camera_query_callback(const char *path, const gsr_capture_v4l2_supported_setup *setup, void *userdata) {
(void)userdata;
char pixfmt_str[32];
if(supported_pixfmts.yuyv && supported_pixfmts.mjpeg)
snprintf(pixfmt_str, sizeof(pixfmt_str), "yuyv,mjpeg");
else if(supported_pixfmts.yuyv)
snprintf(pixfmt_str, sizeof(pixfmt_str), "yuyv");
else if(supported_pixfmts.mjpeg)
snprintf(pixfmt_str, sizeof(pixfmt_str), "mjpeg");
printf("%s|%dx%d|%s\n", path, size.x, size.y, pixfmt_str);
printf("%s|%ux%u@%uhz|%s\n", path, setup->resolution.width, setup->resolution.height, gsr_capture_v4l2_framerate_to_number(setup->framerate), gsr_capture_v4l2_pixfmt_to_string(setup->pixfmt));
}
static void list_supported_capture_options(const gsr_window *window, const char *card_path, bool list_monitors) {
@@ -2316,7 +2309,9 @@ static gsr_capture* create_capture_impl(const args_parser &arg_parser, gsr_egl *
v4l2_params.output_resolution = arg_parser.output_resolution;
v4l2_params.device_path = capture_source.name.c_str();
v4l2_params.pixfmt = capture_source.v4l2_pixfmt;
v4l2_params.fps = arg_parser.fps;
v4l2_params.camera_fps = capture_source.camera_fps;
v4l2_params.camera_resolution.width = capture_source.camera_resolution.x;
v4l2_params.camera_resolution.height = capture_source.camera_resolution.y;
capture = gsr_capture_v4l2_create(&v4l2_params);
if(!capture)
_exit(1);
@@ -2398,13 +2393,28 @@ static std::vector<VideoSource> create_video_sources(const args_parser &arg_pars
}
}
// TODO: Video size should be end pos - start pos, where start pos = pos and end pos = pos + size
video_size = {0, 0};
vec2i start_pos = {99999, 99999};
vec2i end_pos = {-99999, -99999};
for(const VideoSource &video_source : video_sources) {
video_size.x = std::max(video_size.x, video_source.metadata.video_size.x);
video_size.y = std::max(video_size.y, video_source.metadata.video_size.y);
// TODO: Skip scalar positions for now, but this should be handled in a better way
if(video_source.capture_source->pos.x_type == VVEC2I_TYPE_SCALAR || video_source.capture_source->pos.y_type == VVEC2I_TYPE_SCALAR/*
|| video_source.capture_source->size.x_type == VVEC2I_TYPE_SCALAR || video_source.capture_source->size.y_type == VVEC2I_TYPE_SCALAR*/)
{
continue;
}
const vec2i video_source_start_pos = {video_source.capture_source->pos.x, video_source.capture_source->pos.y};
const vec2i video_source_end_pos = {video_source_start_pos.x + video_source.metadata.video_size.x, video_source_start_pos.y + video_source.metadata.video_size.y};
start_pos.x = std::min(start_pos.x, video_source_start_pos.x);
start_pos.y = std::min(start_pos.y, video_source_start_pos.y);
end_pos.x = std::max(end_pos.x, video_source_end_pos.x);
end_pos.y = std::max(end_pos.y, video_source_end_pos.y);
}
video_size.x = std::max(0, end_pos.x - start_pos.x);
video_size.y = std::max(0, end_pos.y - start_pos.y);
for(VideoSource &video_source : video_sources) {
video_source.metadata.video_size = video_size;
}
@@ -2761,6 +2771,15 @@ static bool string_to_bool(const char *str, size_t len, bool *value) {
}
}
static int clamp_scalar(int value) {
if(value < 0)
return 0;
else if(value > 100)
return 100;
else
return value;
}
static void parse_capture_source_options(const std::string &capture_source_str, CaptureSource &capture_source) {
bool is_first_column = true;
@@ -2782,6 +2801,9 @@ static void parse_capture_source_options(const std::string &capture_source_str,
fprintf(stderr, "gsr error: invalid capture target value for option x: \"%.*s\", expected a number\n", (int)size, sub);
_exit(1);
}
if(capture_source.pos.x_type == VVEC2I_TYPE_SCALAR)
capture_source.pos.x = clamp_scalar(capture_source.pos.x);
} else if(string_starts_with(sub, size, "y=")) {
capture_source.pos.y_type = sub[size - 1] == '%' ? VVEC2I_TYPE_SCALAR : VVEC2I_TYPE_PIXELS;
sub += 2;
@@ -2790,6 +2812,9 @@ static void parse_capture_source_options(const std::string &capture_source_str,
fprintf(stderr, "gsr error: invalid capture target value for option y: \"%.*s\", expected a number\n", (int)size, sub);
_exit(1);
}
if(capture_source.pos.y_type == VVEC2I_TYPE_SCALAR)
capture_source.pos.y = clamp_scalar(capture_source.pos.y);
} else if(string_starts_with(sub, size, "width=")) {
capture_source.size.x_type = sub[size - 1] == '%' ? VVEC2I_TYPE_SCALAR : VVEC2I_TYPE_PIXELS;
sub += 6;
@@ -2798,6 +2823,9 @@ static void parse_capture_source_options(const std::string &capture_source_str,
fprintf(stderr, "gsr error: invalid capture target value for option width: \"%.*s\", expected a number\n", (int)size, sub);
_exit(1);
}
if(capture_source.size.x_type == VVEC2I_TYPE_SCALAR)
capture_source.size.x = clamp_scalar(capture_source.size.x);
} else if(string_starts_with(sub, size, "height=")) {
capture_source.size.y_type = sub[size - 1] == '%' ? VVEC2I_TYPE_SCALAR : VVEC2I_TYPE_PIXELS;
sub += 7;
@@ -2806,6 +2834,9 @@ static void parse_capture_source_options(const std::string &capture_source_str,
fprintf(stderr, "gsr error: invalid capture target value for option height: \"%.*s\", expected a number\n", (int)size, sub);
_exit(1);
}
if(capture_source.size.y_type == VVEC2I_TYPE_SCALAR)
capture_source.size.y = clamp_scalar(capture_source.size.y);
} else if(string_starts_with(sub, size, "halign=")) {
sub += 7;
size -= 7;
@@ -2849,8 +2880,29 @@ static void parse_capture_source_options(const std::string &capture_source_str,
if(vflip)
capture_source.flip |= GSR_FLIP_VERTICAL;
} else if(string_starts_with(sub, size, "camera_fps=")) {
sub += 11;
size -= 11;
if(!string_to_int(sub, size, &capture_source.camera_fps)) {
fprintf(stderr, "gsr error: invalid capture target value for option camera_fps: \"%.*s\", expected a number\n", (int)size, sub);
_exit(1);
}
} else if(string_starts_with(sub, size, "camera_width=")) {
sub += 13;
size -= 13;
if(!string_to_int(sub, size, &capture_source.camera_resolution.x)) {
fprintf(stderr, "gsr error: invalid capture target value for option camera_width: \"%.*s\", expected a number\n", (int)size, sub);
_exit(1);
}
} else if(string_starts_with(sub, size, "camera_height=")) {
sub += 14;
size -= 14;
if(!string_to_int(sub, size, &capture_source.camera_resolution.y)) {
fprintf(stderr, "gsr error: invalid capture target value for option camera_height: \"%.*s\", expected a number\n", (int)size, sub);
_exit(1);
}
} else {
fprintf(stderr, "gsr error: invalid capture target option \"%.*s\", expected x, y, width, height, halign, valign, pixfmt, hflip or vflip\n", (int)size, sub);
fprintf(stderr, "gsr error: invalid capture target option \"%.*s\", expected x, y, width, height, halign, valign, pixfmt, hflip, vflip, camera_fps, camera_width or camera_height\n", (int)size, sub);
_exit(1);
}
@@ -2860,6 +2912,7 @@ static void parse_capture_source_options(const std::string &capture_source_str,
static std::vector<CaptureSource> parse_capture_source_arg(const char *capture_source_arg, const args_parser &arg_parser) {
std::vector<CaptureSource> requested_capture_sources;
const bool has_multiple_capture_sources = strchr(capture_source_arg, '|') != nullptr;
split_string(capture_source_arg, '|', [&](const char *sub, size_t size) {
if(size == 0)
@@ -2901,12 +2954,10 @@ static std::vector<CaptureSource> parse_capture_source_arg(const char *capture_s
}
}
/* We want good default values for v4l2 (webcam) capture, by setting webcam at bottom right, offset by -10%,-10% pixels and at size 30%,30% */
if(capture_source.type == GSR_CAPTURE_SOURCE_TYPE_V4L2 && !requested_capture_sources.empty()) {
if(has_multiple_capture_sources) {
capture_source.halign = GSR_CAPTURE_ALIGN_START;
capture_source.valign = GSR_CAPTURE_ALIGN_END;
capture_source.valign = GSR_CAPTURE_ALIGN_START;
capture_source.pos = {0, 0, VVEC2I_TYPE_PIXELS, VVEC2I_TYPE_PIXELS};
capture_source.size = {30, 30, VVEC2I_TYPE_SCALAR, VVEC2I_TYPE_SCALAR};
}
parse_capture_source_options(std::string(substr_start, size), capture_source);
@@ -3271,12 +3322,12 @@ static gsr_video_codec select_appropriate_video_codec_automatically(vec2i video_
fprintf(stderr, "gsr info: using h264 encoder because a codec was not specified\n");
return GSR_VIDEO_CODEC_H264;
} else if(supported_video_codecs->hevc.supported && codec_supports_resolution(supported_video_codecs->hevc.max_resolution, video_size)) {
fprintf(stderr, "gsr info: using hevc encoder because a codec was not specified and h264 supported max resolution (%dx%d) is less than capture resolution (%dx%d)\n",
fprintf(stderr, "gsr info: using hevc encoder because a codec was not specified and h264 supported max resolution (%dx%d) is less than the capture resolution (%dx%d)\n",
supported_video_codecs->h264.max_resolution.x, supported_video_codecs->h264.max_resolution.y,
video_size.x, video_size.y);
return GSR_VIDEO_CODEC_HEVC;
} else if(supported_video_codecs->av1.supported && codec_supports_resolution(supported_video_codecs->av1.max_resolution, video_size)) {
fprintf(stderr, "gsr info: using av1 encoder because a codec was not specified and hevc supported max resolution (%dx%d) is less than capture resolution (%dx%d)\n",
fprintf(stderr, "gsr info: using av1 encoder because a codec was not specified and hevc supported max resolution (%dx%d) is less than the capture resolution (%dx%d)\n",
supported_video_codecs->hevc.max_resolution.x, supported_video_codecs->hevc.max_resolution.y,
video_size.x, video_size.y);
return GSR_VIDEO_CODEC_AV1;
@@ -3314,7 +3365,6 @@ static const AVCodec* select_video_codec_with_fallback(vec2i video_size, args_pa
}
}
// TODO: Allow hevc, vp9 and av1 in (enhanced) flv (supported since ffmpeg 6.1)
if(LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(60, 10, 100) && strcmp(file_extension, "flv") == 0) {
if(args_parser->video_codec != GSR_VIDEO_CODEC_H264) {
args_parser->video_codec = GSR_VIDEO_CODEC_H264;