mirror of
https://repo.dec05eba.com/gpu-screen-recorder
synced 2026-03-31 09:07:13 +09:00
Separate video encoding method from capture method
With this instead of kms_cuda/kms_vaapi/kms_software and xcomposite_cuda/xcomposite_vaapi/xcomposite_software there is now only kms and xcomposite.
This commit is contained in:
@@ -6,15 +6,6 @@
|
||||
|
||||
typedef struct AVCodecContext AVCodecContext;
|
||||
typedef struct AVFrame AVFrame;
|
||||
typedef void* VADisplay;
|
||||
typedef struct _VADRMPRIMESurfaceDescriptor VADRMPRIMESurfaceDescriptor;
|
||||
typedef struct gsr_cuda gsr_cuda;
|
||||
typedef struct AVFrame AVFrame;
|
||||
typedef struct CUgraphicsResource_st *CUgraphicsResource;
|
||||
typedef struct CUarray_st *CUarray;
|
||||
typedef struct CUctx_st *CUcontext;
|
||||
typedef struct CUstream_st *CUstream;
|
||||
|
||||
typedef struct gsr_capture gsr_capture;
|
||||
|
||||
struct gsr_capture {
|
||||
@@ -23,49 +14,24 @@ struct gsr_capture {
|
||||
void (*tick)(gsr_capture *cap, AVCodecContext *video_codec_context); /* can be NULL */
|
||||
bool (*is_damaged)(gsr_capture *cap); /* can be NULL */
|
||||
void (*clear_damage)(gsr_capture *cap); /* can be NULL */
|
||||
bool (*should_stop)(gsr_capture *cap, bool *err); /* can be NULL */
|
||||
int (*capture)(gsr_capture *cap, AVFrame *frame);
|
||||
bool (*should_stop)(gsr_capture *cap, bool *err); /* can be NULL. If NULL, return false */
|
||||
int (*capture)(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion);
|
||||
void (*capture_end)(gsr_capture *cap, AVFrame *frame); /* can be NULL */
|
||||
gsr_source_color (*get_source_color)(gsr_capture *cap);
|
||||
bool (*uses_external_image)(gsr_capture *cap); /* can be NULL. If NULL, return false */
|
||||
void (*destroy)(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
void *priv; /* can be NULL */
|
||||
bool started;
|
||||
};
|
||||
|
||||
typedef struct gsr_capture_base gsr_capture_base;
|
||||
|
||||
struct gsr_capture_base {
|
||||
gsr_egl *egl;
|
||||
|
||||
unsigned int input_texture;
|
||||
unsigned int target_textures[2];
|
||||
unsigned int cursor_texture;
|
||||
|
||||
gsr_color_conversion color_conversion;
|
||||
|
||||
AVCodecContext *video_codec_context;
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
gsr_cuda *cuda;
|
||||
CUgraphicsResource *cuda_graphics_resources;
|
||||
CUarray *mapped_arrays;
|
||||
} gsr_cuda_context;
|
||||
|
||||
int gsr_capture_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame);
|
||||
void gsr_capture_tick(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
bool gsr_capture_should_stop(gsr_capture *cap, bool *err);
|
||||
int gsr_capture_capture(gsr_capture *cap, AVFrame *frame);
|
||||
void gsr_capture_end(gsr_capture *cap, AVFrame *frame);
|
||||
/* Calls |gsr_capture_stop| as well */
|
||||
int gsr_capture_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion);
|
||||
void gsr_capture_capture_end(gsr_capture *cap, AVFrame *frame);
|
||||
gsr_source_color gsr_capture_get_source_color(gsr_capture *cap);
|
||||
bool gsr_capture_uses_external_image(gsr_capture *cap);
|
||||
void gsr_capture_destroy(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
bool gsr_capture_base_setup_vaapi_textures(gsr_capture_base *self, AVFrame *frame, VADisplay va_dpy, VADRMPRIMESurfaceDescriptor *prime, gsr_color_range color_range);
|
||||
bool gsr_capture_base_setup_cuda_textures(gsr_capture_base *self, AVFrame *frame, gsr_cuda_context *cuda_context, gsr_color_range color_range, gsr_source_color source_color, bool hdr);
|
||||
bool gsr_capture_base_setup_textures(gsr_capture_base *self, AVFrame *frame, gsr_color_range color_range, gsr_source_color source_color, bool hdr, bool cursor_texture_is_external);
|
||||
void gsr_capture_base_stop(gsr_capture_base *self);
|
||||
|
||||
bool vaapi_create_codec_context(const char *card_path, AVCodecContext *video_codec_context, int width, int height, bool hdr, VADisplay *va_dpy);
|
||||
bool cuda_create_codec_context(CUcontext cu_ctx, AVCodecContext *video_codec_context, int width, int height, bool hdr, CUstream *cuda_stream);
|
||||
|
||||
#endif /* GSR_CAPTURE_CAPTURE_H */
|
||||
|
||||
@@ -2,49 +2,15 @@
|
||||
#define GSR_CAPTURE_KMS_H
|
||||
|
||||
#include "capture.h"
|
||||
#include "../../kms/client/kms_client.h"
|
||||
#include "../color_conversion.h"
|
||||
#include "../vec2.h"
|
||||
#include "../defs.h"
|
||||
#include <stdbool.h>
|
||||
|
||||
typedef struct AVCodecContext AVCodecContext;
|
||||
typedef struct AVMasteringDisplayMetadata AVMasteringDisplayMetadata;
|
||||
typedef struct AVContentLightMetadata AVContentLightMetadata;
|
||||
typedef struct gsr_capture_kms gsr_capture_kms;
|
||||
typedef struct gsr_egl gsr_egl;
|
||||
typedef struct AVFrame AVFrame;
|
||||
|
||||
#define MAX_CONNECTOR_IDS 32
|
||||
|
||||
typedef struct {
|
||||
uint32_t connector_ids[MAX_CONNECTOR_IDS];
|
||||
int num_connector_ids;
|
||||
} MonitorId;
|
||||
gsr_egl *egl;
|
||||
const char *display_to_capture; /* if this is "screen", then the first monitor is captured. A copy is made of this */
|
||||
gsr_color_range color_range;
|
||||
bool hdr;
|
||||
bool record_cursor;
|
||||
} gsr_capture_kms_params;
|
||||
|
||||
struct gsr_capture_kms {
|
||||
gsr_capture_base base;
|
||||
|
||||
bool should_stop;
|
||||
bool stop_is_error;
|
||||
|
||||
gsr_kms_client kms_client;
|
||||
gsr_kms_response kms_response;
|
||||
|
||||
vec2i capture_pos;
|
||||
vec2i capture_size;
|
||||
MonitorId monitor_id;
|
||||
|
||||
AVMasteringDisplayMetadata *mastering_display_metadata;
|
||||
AVContentLightMetadata *light_metadata;
|
||||
|
||||
gsr_monitor_rotation monitor_rotation;
|
||||
};
|
||||
|
||||
/* Returns 0 on success */
|
||||
int gsr_capture_kms_start(gsr_capture_kms *self, const char *display_to_capture, gsr_egl *egl, AVCodecContext *video_codec_context, AVFrame *frame);
|
||||
void gsr_capture_kms_stop(gsr_capture_kms *self);
|
||||
bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bool screen_plane_use_modifiers, bool cursor_texture_is_external, bool record_cursor);
|
||||
void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self);
|
||||
gsr_capture* gsr_capture_kms_create(const gsr_capture_kms_params *params);
|
||||
|
||||
#endif /* GSR_CAPTURE_KMS_H */
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
#ifndef GSR_CAPTURE_KMS_CUDA_H
|
||||
#define GSR_CAPTURE_KMS_CUDA_H
|
||||
|
||||
#include "../vec2.h"
|
||||
#include "../utils.h"
|
||||
#include "../color_conversion.h"
|
||||
#include "capture.h"
|
||||
|
||||
typedef struct {
|
||||
gsr_egl *egl;
|
||||
const char *display_to_capture; /* if this is "screen", then the first monitor is captured. A copy is made of this */
|
||||
bool hdr;
|
||||
gsr_color_range color_range;
|
||||
bool record_cursor;
|
||||
} gsr_capture_kms_cuda_params;
|
||||
|
||||
gsr_capture* gsr_capture_kms_cuda_create(const gsr_capture_kms_cuda_params *params);
|
||||
|
||||
#endif /* GSR_CAPTURE_KMS_CUDA_H */
|
||||
@@ -1,19 +0,0 @@
|
||||
#ifndef GSR_CAPTURE_KMS_SOFTWARE_H
|
||||
#define GSR_CAPTURE_KMS_SOFTWARE_H
|
||||
|
||||
#include "../vec2.h"
|
||||
#include "../utils.h"
|
||||
#include "../color_conversion.h"
|
||||
#include "capture.h"
|
||||
|
||||
typedef struct {
|
||||
gsr_egl *egl;
|
||||
const char *display_to_capture; /* if this is "screen", then the first monitor is captured. A copy is made of this */
|
||||
bool hdr;
|
||||
gsr_color_range color_range;
|
||||
bool record_cursor;
|
||||
} gsr_capture_kms_software_params;
|
||||
|
||||
gsr_capture* gsr_capture_kms_software_create(const gsr_capture_kms_software_params *params);
|
||||
|
||||
#endif /* GSR_CAPTURE_KMS_SOFTWARE_H */
|
||||
@@ -1,19 +0,0 @@
|
||||
#ifndef GSR_CAPTURE_KMS_VAAPI_H
|
||||
#define GSR_CAPTURE_KMS_VAAPI_H
|
||||
|
||||
#include "../vec2.h"
|
||||
#include "../utils.h"
|
||||
#include "../color_conversion.h"
|
||||
#include "capture.h"
|
||||
|
||||
typedef struct {
|
||||
gsr_egl *egl;
|
||||
const char *display_to_capture; /* if this is "screen", then the first monitor is captured. A copy is made of this */
|
||||
bool hdr;
|
||||
gsr_color_range color_range;
|
||||
bool record_cursor;
|
||||
} gsr_capture_kms_vaapi_params;
|
||||
|
||||
gsr_capture* gsr_capture_kms_vaapi_create(const gsr_capture_kms_vaapi_params *params);
|
||||
|
||||
#endif /* GSR_CAPTURE_KMS_VAAPI_H */
|
||||
@@ -2,15 +2,11 @@
|
||||
#define GSR_CAPTURE_XCOMPOSITE_H
|
||||
|
||||
#include "capture.h"
|
||||
#include "../egl.h"
|
||||
#include "../vec2.h"
|
||||
#include "../color_conversion.h"
|
||||
#include "../window_texture.h"
|
||||
#include "../cursor.h"
|
||||
|
||||
typedef struct {
|
||||
gsr_egl *egl;
|
||||
Window window;
|
||||
unsigned long window;
|
||||
bool follow_focused; /* If this is set then |window| is ignored */
|
||||
vec2i region_size; /* This is currently only used with |follow_focused| */
|
||||
gsr_color_range color_range;
|
||||
@@ -18,41 +14,6 @@ typedef struct {
|
||||
bool track_damage;
|
||||
} gsr_capture_xcomposite_params;
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_base base;
|
||||
gsr_capture_xcomposite_params params;
|
||||
XEvent xev;
|
||||
|
||||
bool should_stop;
|
||||
bool stop_is_error;
|
||||
bool window_resized;
|
||||
bool follow_focused_initialized;
|
||||
|
||||
Window window;
|
||||
vec2i window_size;
|
||||
vec2i texture_size;
|
||||
double window_resize_timer;
|
||||
|
||||
WindowTexture window_texture;
|
||||
|
||||
Atom net_active_window_atom;
|
||||
|
||||
gsr_cursor cursor;
|
||||
|
||||
int damage_event;
|
||||
int damage_error;
|
||||
XID damage;
|
||||
bool damaged;
|
||||
} gsr_capture_xcomposite;
|
||||
|
||||
void gsr_capture_xcomposite_init(gsr_capture_xcomposite *self, const gsr_capture_xcomposite_params *params);
|
||||
|
||||
int gsr_capture_xcomposite_start(gsr_capture_xcomposite *self, AVCodecContext *video_codec_context, AVFrame *frame);
|
||||
void gsr_capture_xcomposite_stop(gsr_capture_xcomposite *self);
|
||||
void gsr_capture_xcomposite_tick(gsr_capture_xcomposite *self, AVCodecContext *video_codec_context);
|
||||
bool gsr_capture_xcomposite_is_damaged(gsr_capture_xcomposite *self);
|
||||
void gsr_capture_xcomposite_clear_damage(gsr_capture_xcomposite *self);
|
||||
bool gsr_capture_xcomposite_should_stop(gsr_capture_xcomposite *self, bool *err);
|
||||
int gsr_capture_xcomposite_capture(gsr_capture_xcomposite *self, AVFrame *frame);
|
||||
gsr_capture* gsr_capture_xcomposite_create(const gsr_capture_xcomposite_params *params);
|
||||
|
||||
#endif /* GSR_CAPTURE_XCOMPOSITE_H */
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
#ifndef GSR_CAPTURE_XCOMPOSITE_CUDA_H
|
||||
#define GSR_CAPTURE_XCOMPOSITE_CUDA_H
|
||||
|
||||
#include "capture.h"
|
||||
#include "xcomposite.h"
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite_params base;
|
||||
bool overclock;
|
||||
} gsr_capture_xcomposite_cuda_params;
|
||||
|
||||
gsr_capture* gsr_capture_xcomposite_cuda_create(const gsr_capture_xcomposite_cuda_params *params);
|
||||
|
||||
#endif /* GSR_CAPTURE_XCOMPOSITE_CUDA_H */
|
||||
@@ -1,13 +0,0 @@
|
||||
#ifndef GSR_CAPTURE_XCOMPOSITE_SOFTWARE_H
|
||||
#define GSR_CAPTURE_XCOMPOSITE_SOFTWARE_H
|
||||
|
||||
#include "capture.h"
|
||||
#include "xcomposite.h"
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite_params base;
|
||||
} gsr_capture_xcomposite_software_params;
|
||||
|
||||
gsr_capture* gsr_capture_xcomposite_software_create(const gsr_capture_xcomposite_software_params *params);
|
||||
|
||||
#endif /* GSR_CAPTURE_XCOMPOSITE_SOFTWARE_H */
|
||||
@@ -1,13 +0,0 @@
|
||||
#ifndef GSR_CAPTURE_XCOMPOSITE_VAAPI_H
|
||||
#define GSR_CAPTURE_XCOMPOSITE_VAAPI_H
|
||||
|
||||
#include "capture.h"
|
||||
#include "xcomposite.h"
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite_params base;
|
||||
} gsr_capture_xcomposite_vaapi_params;
|
||||
|
||||
gsr_capture* gsr_capture_xcomposite_vaapi_create(const gsr_capture_xcomposite_vaapi_params *params);
|
||||
|
||||
#endif /* GSR_CAPTURE_XCOMPOSITE_VAAPI_H */
|
||||
16
include/encoder/video/cuda.h
Normal file
16
include/encoder/video/cuda.h
Normal file
@@ -0,0 +1,16 @@
|
||||
#ifndef GSR_ENCODER_VIDEO_CUDA_H
|
||||
#define GSR_ENCODER_VIDEO_CUDA_H
|
||||
|
||||
#include "video.h"
|
||||
|
||||
typedef struct gsr_egl gsr_egl;
|
||||
|
||||
typedef struct {
|
||||
gsr_egl *egl;
|
||||
bool overclock;
|
||||
bool hdr;
|
||||
} gsr_video_encoder_cuda_params;
|
||||
|
||||
gsr_video_encoder* gsr_video_encoder_cuda_create(const gsr_video_encoder_cuda_params *params);
|
||||
|
||||
#endif /* GSR_ENCODER_VIDEO_CUDA_H */
|
||||
15
include/encoder/video/software.h
Normal file
15
include/encoder/video/software.h
Normal file
@@ -0,0 +1,15 @@
|
||||
#ifndef GSR_ENCODER_VIDEO_SOFTWARE_H
|
||||
#define GSR_ENCODER_VIDEO_SOFTWARE_H
|
||||
|
||||
#include "video.h"
|
||||
|
||||
typedef struct gsr_egl gsr_egl;
|
||||
|
||||
typedef struct {
|
||||
gsr_egl *egl;
|
||||
bool hdr;
|
||||
} gsr_video_encoder_software_params;
|
||||
|
||||
gsr_video_encoder* gsr_video_encoder_software_create(const gsr_video_encoder_software_params *params);
|
||||
|
||||
#endif /* GSR_ENCODER_VIDEO_SOFTWARE_H */
|
||||
15
include/encoder/video/vaapi.h
Normal file
15
include/encoder/video/vaapi.h
Normal file
@@ -0,0 +1,15 @@
|
||||
#ifndef GSR_ENCODER_VIDEO_VAAPI_H
|
||||
#define GSR_ENCODER_VIDEO_VAAPI_H
|
||||
|
||||
#include "video.h"
|
||||
|
||||
typedef struct gsr_egl gsr_egl;
|
||||
|
||||
typedef struct {
|
||||
gsr_egl *egl;
|
||||
bool hdr;
|
||||
} gsr_video_encoder_vaapi_params;
|
||||
|
||||
gsr_video_encoder* gsr_video_encoder_vaapi_create(const gsr_video_encoder_vaapi_params *params);
|
||||
|
||||
#endif /* GSR_ENCODER_VIDEO_VAAPI_H */
|
||||
27
include/encoder/video/video.h
Normal file
27
include/encoder/video/video.h
Normal file
@@ -0,0 +1,27 @@
|
||||
#ifndef GSR_ENCODER_VIDEO_H
|
||||
#define GSR_ENCODER_VIDEO_H
|
||||
|
||||
#include "../../color_conversion.h"
|
||||
#include <stdbool.h>
|
||||
|
||||
typedef struct gsr_video_encoder gsr_video_encoder;
|
||||
typedef struct AVCodecContext AVCodecContext;
|
||||
typedef struct AVFrame AVFrame;
|
||||
|
||||
struct gsr_video_encoder {
|
||||
bool (*start)(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame);
|
||||
void (*copy_textures_to_frame)(gsr_video_encoder *encoder, AVFrame *frame); /* Can be NULL */
|
||||
/* |textures| should be able to fit 2 elements */
|
||||
void (*get_textures)(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color);
|
||||
void (*destroy)(gsr_video_encoder *encoder, AVCodecContext *video_codec_context);
|
||||
|
||||
void *priv;
|
||||
bool started;
|
||||
};
|
||||
|
||||
bool gsr_video_encoder_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame);
|
||||
void gsr_video_encoder_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame);
|
||||
void gsr_video_encoder_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color);
|
||||
void gsr_video_encoder_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context);
|
||||
|
||||
#endif /* GSR_ENCODER_VIDEO_H */
|
||||
@@ -12,12 +12,6 @@ src = [
|
||||
'src/capture/capture.c',
|
||||
'src/capture/nvfbc.c',
|
||||
'src/capture/xcomposite.c',
|
||||
'src/capture/xcomposite_cuda.c',
|
||||
'src/capture/xcomposite_vaapi.c',
|
||||
'src/capture/xcomposite_software.c',
|
||||
'src/capture/kms_cuda.c',
|
||||
'src/capture/kms_vaapi.c',
|
||||
'src/capture/kms_software.c',
|
||||
'src/capture/kms.c',
|
||||
'src/egl.c',
|
||||
'src/cuda.c',
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
option('systemd', type : 'boolean', value : false, description : 'Install systemd service file')
|
||||
option('capabilities', type : 'boolean', value : true, description : 'Set binary admin capabilities to remove password prompt and increase performance')
|
||||
option('capabilities', type : 'boolean', value : true, description : 'Set binary admin capability to remove password prompt and nice capability to allow gpu screen recorder to run at a higher framerate than the game you are recording')
|
||||
|
||||
@@ -1,20 +1,8 @@
|
||||
#include "../../include/capture/capture.h"
|
||||
#include "../../include/egl.h"
|
||||
#include "../../include/cuda.h"
|
||||
#include "../../include/utils.h"
|
||||
#include <stdio.h>
|
||||
#include <stdint.h>
|
||||
#include <va/va.h>
|
||||
#include <va/va_drmcommon.h>
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavutil/hwcontext_vaapi.h>
|
||||
#include <libavutil/hwcontext_cuda.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <assert.h>
|
||||
|
||||
int gsr_capture_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
if(cap->started)
|
||||
return -1;
|
||||
|
||||
assert(!cap->started);
|
||||
int res = cap->start(cap, video_codec_context, frame);
|
||||
if(res == 0)
|
||||
cap->started = true;
|
||||
@@ -23,444 +11,41 @@ int gsr_capture_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVF
|
||||
}
|
||||
|
||||
void gsr_capture_tick(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
if(!cap->started) {
|
||||
fprintf(stderr, "gsr error: gsp_capture_tick failed: the gsr capture has not been started\n");
|
||||
return;
|
||||
}
|
||||
|
||||
assert(cap->started);
|
||||
if(cap->tick)
|
||||
cap->tick(cap, video_codec_context);
|
||||
}
|
||||
|
||||
bool gsr_capture_should_stop(gsr_capture *cap, bool *err) {
|
||||
if(!cap->started) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_should_stop failed: the gsr capture has not been started\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!cap->should_stop)
|
||||
return false;
|
||||
|
||||
assert(cap->started);
|
||||
if(cap->should_stop)
|
||||
return cap->should_stop(cap, err);
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
int gsr_capture_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
if(!cap->started) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_capture failed: the gsr capture has not been started\n");
|
||||
return -1;
|
||||
}
|
||||
return cap->capture(cap, frame);
|
||||
int gsr_capture_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
|
||||
assert(cap->started);
|
||||
return cap->capture(cap, frame, color_conversion);
|
||||
}
|
||||
|
||||
void gsr_capture_end(gsr_capture *cap, AVFrame *frame) {
|
||||
if(!cap->started) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_end failed: the gsr capture has not been started\n");
|
||||
return;
|
||||
}
|
||||
|
||||
if(!cap->capture_end)
|
||||
return;
|
||||
|
||||
void gsr_capture_capture_end(gsr_capture *cap, AVFrame *frame) {
|
||||
assert(cap->started);
|
||||
if(cap->capture_end)
|
||||
cap->capture_end(cap, frame);
|
||||
}
|
||||
|
||||
gsr_source_color gsr_capture_get_source_color(gsr_capture *cap) {
|
||||
return cap->get_source_color(cap);
|
||||
}
|
||||
|
||||
bool gsr_capture_uses_external_image(gsr_capture *cap) {
|
||||
if(cap->uses_external_image)
|
||||
return cap->uses_external_image(cap);
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
void gsr_capture_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
cap->destroy(cap, video_codec_context);
|
||||
}
|
||||
|
||||
static uint32_t fourcc(uint32_t a, uint32_t b, uint32_t c, uint32_t d) {
|
||||
return (d << 24) | (c << 16) | (b << 8) | a;
|
||||
}
|
||||
|
||||
bool gsr_capture_base_setup_vaapi_textures(gsr_capture_base *self, AVFrame *frame, VADisplay va_dpy, VADRMPRIMESurfaceDescriptor *prime, gsr_color_range color_range) {
|
||||
const int res = av_hwframe_get_buffer(self->video_codec_context->hw_frames_ctx, frame, 0);
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_vaapi_textures: av_hwframe_get_buffer failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
VASurfaceID target_surface_id = (uintptr_t)frame->data[3];
|
||||
|
||||
VAStatus va_status = vaExportSurfaceHandle(va_dpy, target_surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2, VA_EXPORT_SURFACE_WRITE_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS, prime);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_vaapi_textures: vaExportSurfaceHandle failed, error: %d\n", va_status);
|
||||
return false;
|
||||
}
|
||||
vaSyncSurface(va_dpy, target_surface_id);
|
||||
|
||||
self->egl->glGenTextures(1, &self->input_texture);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, self->input_texture);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
self->egl->glGenTextures(1, &self->cursor_texture);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, self->cursor_texture);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
const uint32_t formats_nv12[2] = { fourcc('R', '8', ' ', ' '), fourcc('G', 'R', '8', '8') };
|
||||
const uint32_t formats_p010[2] = { fourcc('R', '1', '6', ' '), fourcc('G', 'R', '3', '2') };
|
||||
|
||||
if(prime->fourcc == VA_FOURCC_NV12 || prime->fourcc == VA_FOURCC_P010) {
|
||||
const uint32_t *formats = prime->fourcc == VA_FOURCC_NV12 ? formats_nv12 : formats_p010;
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
|
||||
self->egl->glGenTextures(2, self->target_textures);
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
const int layer = i;
|
||||
const int plane = 0;
|
||||
|
||||
const uint64_t modifier = prime->objects[prime->layers[layer].object_index[plane]].drm_format_modifier;
|
||||
const intptr_t img_attr[] = {
|
||||
EGL_LINUX_DRM_FOURCC_EXT, formats[i],
|
||||
EGL_WIDTH, prime->width / div[i],
|
||||
EGL_HEIGHT, prime->height / div[i],
|
||||
EGL_DMA_BUF_PLANE0_FD_EXT, prime->objects[prime->layers[layer].object_index[plane]].fd,
|
||||
EGL_DMA_BUF_PLANE0_OFFSET_EXT, prime->layers[layer].offset[plane],
|
||||
EGL_DMA_BUF_PLANE0_PITCH_EXT, prime->layers[layer].pitch[plane],
|
||||
EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT, modifier & 0xFFFFFFFFULL,
|
||||
EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, modifier >> 32ULL,
|
||||
EGL_NONE
|
||||
};
|
||||
|
||||
while(self->egl->eglGetError() != EGL_SUCCESS){}
|
||||
EGLImage image = self->egl->eglCreateImage(self->egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr);
|
||||
if(!image) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_vaapi_textures: failed to create egl image from drm fd for output drm fd, error: %d\n", self->egl->eglGetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, self->target_textures[i]);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
|
||||
while(self->egl->glGetError()) {}
|
||||
while(self->egl->eglGetError() != EGL_SUCCESS){}
|
||||
self->egl->glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
|
||||
if(self->egl->glGetError() != 0 || self->egl->eglGetError() != EGL_SUCCESS) {
|
||||
// TODO: Get the error properly
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_vaapi_textures: failed to bind egl image to gl texture, error: %d\n", self->egl->eglGetError());
|
||||
self->egl->eglDestroyImage(self->egl->egl_display, image);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
return false;
|
||||
}
|
||||
|
||||
self->egl->eglDestroyImage(self->egl->egl_display, image);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
gsr_color_conversion_params color_conversion_params = {0};
|
||||
color_conversion_params.color_range = color_range;
|
||||
color_conversion_params.egl = self->egl;
|
||||
color_conversion_params.source_color = GSR_SOURCE_COLOR_RGB;
|
||||
if(prime->fourcc == VA_FOURCC_NV12)
|
||||
color_conversion_params.destination_color = GSR_DESTINATION_COLOR_NV12;
|
||||
else
|
||||
color_conversion_params.destination_color = GSR_DESTINATION_COLOR_P010;
|
||||
|
||||
color_conversion_params.destination_textures[0] = self->target_textures[0];
|
||||
color_conversion_params.destination_textures[1] = self->target_textures[1];
|
||||
color_conversion_params.num_destination_textures = 2;
|
||||
|
||||
if(gsr_color_conversion_init(&self->color_conversion, &color_conversion_params) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_vaapi_textures: failed to create color conversion\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
gsr_color_conversion_clear(&self->color_conversion);
|
||||
|
||||
return true;
|
||||
} else {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_vaapi_textures: unexpected fourcc %u for output drm fd, expected nv12 or p010\n", prime->fourcc);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format) {
|
||||
unsigned int texture_id = 0;
|
||||
egl->glGenTextures(1, &texture_id);
|
||||
egl->glBindTexture(GL_TEXTURE_2D, texture_id);
|
||||
egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL);
|
||||
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
||||
|
||||
egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
return texture_id;
|
||||
}
|
||||
|
||||
static bool cuda_register_opengl_texture(gsr_cuda *cuda, CUgraphicsResource *cuda_graphics_resource, CUarray *mapped_array, unsigned int texture_id) {
|
||||
CUresult res;
|
||||
res = cuda->cuGraphicsGLRegisterImage(cuda_graphics_resource, texture_id, GL_TEXTURE_2D, CU_GRAPHICS_REGISTER_FLAGS_NONE);
|
||||
if (res != CUDA_SUCCESS) {
|
||||
const char *err_str = "unknown";
|
||||
cuda->cuGetErrorString(res, &err_str);
|
||||
fprintf(stderr, "gsr error: cuda_register_opengl_texture: cuGraphicsGLRegisterImage failed, error: %s, texture " "id: %u\n", err_str, texture_id);
|
||||
return false;
|
||||
}
|
||||
|
||||
res = cuda->cuGraphicsResourceSetMapFlags(*cuda_graphics_resource, CU_GRAPHICS_MAP_RESOURCE_FLAGS_NONE);
|
||||
res = cuda->cuGraphicsMapResources(1, cuda_graphics_resource, 0);
|
||||
|
||||
res = cuda->cuGraphicsSubResourceGetMappedArray(mapped_array, *cuda_graphics_resource, 0, 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool gsr_capture_base_setup_cuda_textures(gsr_capture_base *self, AVFrame *frame, gsr_cuda_context *cuda_context, gsr_color_range color_range, gsr_source_color source_color, bool hdr) {
|
||||
// TODO:
|
||||
const int res = av_hwframe_get_buffer(self->video_codec_context->hw_frames_ctx, frame, 0);
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: av_hwframe_get_buffer failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
self->egl->glGenTextures(1, &self->input_texture);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, self->input_texture);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
self->egl->glGenTextures(1, &self->cursor_texture);
|
||||
self->egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, self->cursor_texture);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
|
||||
const unsigned int internal_formats_nv12[2] = { GL_R8, GL_RG8 };
|
||||
const unsigned int internal_formats_p010[2] = { GL_R16, GL_RG16 };
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
self->target_textures[i] = gl_create_texture(self->egl, self->video_codec_context->width / div[i], self->video_codec_context->height / div[i], !hdr ? internal_formats_nv12[i] : internal_formats_p010[i], formats[i]);
|
||||
if(self->target_textures[i] == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create opengl texture\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!cuda_register_opengl_texture(cuda_context->cuda, &cuda_context->cuda_graphics_resources[i], &cuda_context->mapped_arrays[i], self->target_textures[i])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
gsr_color_conversion_params color_conversion_params = {0};
|
||||
color_conversion_params.color_range = color_range;
|
||||
color_conversion_params.egl = self->egl;
|
||||
color_conversion_params.source_color = source_color;
|
||||
if(!hdr)
|
||||
color_conversion_params.destination_color = GSR_DESTINATION_COLOR_NV12;
|
||||
else
|
||||
color_conversion_params.destination_color = GSR_DESTINATION_COLOR_P010;
|
||||
|
||||
color_conversion_params.destination_textures[0] = self->target_textures[0];
|
||||
color_conversion_params.destination_textures[1] = self->target_textures[1];
|
||||
color_conversion_params.num_destination_textures = 2;
|
||||
color_conversion_params.load_external_image_shader = true;
|
||||
|
||||
if(gsr_color_conversion_init(&self->color_conversion, &color_conversion_params) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create color conversion\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
gsr_color_conversion_clear(&self->color_conversion);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool gsr_capture_base_setup_textures(gsr_capture_base *self, AVFrame *frame, gsr_color_range color_range, gsr_source_color source_color, bool hdr, bool cursor_texture_is_external) {
|
||||
int res = av_frame_get_buffer(frame, 1); // TODO: Align?
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_base_setup_textures: av_frame_get_buffer failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
res = av_frame_make_writable(frame);
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_base_setup_textures: av_frame_make_writable failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
self->egl->glGenTextures(1, &self->input_texture);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, self->input_texture);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
const int target = cursor_texture_is_external ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
|
||||
self->egl->glGenTextures(1, &self->cursor_texture);
|
||||
self->egl->glBindTexture(target, self->cursor_texture);
|
||||
self->egl->glTexParameteri(target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->egl->glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->egl->glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->egl->glBindTexture(target, 0);
|
||||
|
||||
const unsigned int internal_formats_nv12[2] = { GL_R8, GL_RG8 };
|
||||
const unsigned int internal_formats_p010[2] = { GL_R16, GL_RG16 };
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
self->target_textures[i] = gl_create_texture(self->egl, self->video_codec_context->width / div[i], self->video_codec_context->height / div[i], !hdr ? internal_formats_nv12[i] : internal_formats_p010[i], formats[i]);
|
||||
if(self->target_textures[i] == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create opengl texture\n");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
gsr_color_conversion_params color_conversion_params = {0};
|
||||
color_conversion_params.color_range = color_range;
|
||||
color_conversion_params.egl = self->egl;
|
||||
color_conversion_params.source_color = source_color;
|
||||
if(!hdr)
|
||||
color_conversion_params.destination_color = GSR_DESTINATION_COLOR_NV12;
|
||||
else
|
||||
color_conversion_params.destination_color = GSR_DESTINATION_COLOR_P010;
|
||||
|
||||
color_conversion_params.destination_textures[0] = self->target_textures[0];
|
||||
color_conversion_params.destination_textures[1] = self->target_textures[1];
|
||||
color_conversion_params.num_destination_textures = 2;
|
||||
color_conversion_params.load_external_image_shader = true;
|
||||
|
||||
if(gsr_color_conversion_init(&self->color_conversion, &color_conversion_params) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create color conversion\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
gsr_color_conversion_clear(&self->color_conversion);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void gsr_capture_base_stop(gsr_capture_base *self) {
|
||||
gsr_color_conversion_deinit(&self->color_conversion);
|
||||
|
||||
if(self->egl->egl_context) {
|
||||
if(self->input_texture) {
|
||||
self->egl->glDeleteTextures(1, &self->input_texture);
|
||||
self->input_texture = 0;
|
||||
}
|
||||
|
||||
if(self->cursor_texture) {
|
||||
self->egl->glDeleteTextures(1, &self->cursor_texture);
|
||||
self->cursor_texture = 0;
|
||||
}
|
||||
|
||||
self->egl->glDeleteTextures(2, self->target_textures);
|
||||
self->target_textures[0] = 0;
|
||||
self->target_textures[1] = 0;
|
||||
}
|
||||
|
||||
if(self->video_codec_context->hw_device_ctx)
|
||||
av_buffer_unref(&self->video_codec_context->hw_device_ctx);
|
||||
if(self->video_codec_context->hw_frames_ctx)
|
||||
av_buffer_unref(&self->video_codec_context->hw_frames_ctx);
|
||||
}
|
||||
|
||||
bool vaapi_create_codec_context(const char *card_path, AVCodecContext *video_codec_context, int width, int height, bool hdr, VADisplay *va_dpy) {
|
||||
char render_path[128];
|
||||
if(!gsr_card_path_get_render_path(card_path, render_path)) {
|
||||
fprintf(stderr, "gsr error: failed to get /dev/dri/renderDXXX file from %s\n", card_path);
|
||||
return false;
|
||||
}
|
||||
|
||||
AVBufferRef *device_ctx;
|
||||
if(av_hwdevice_ctx_create(&device_ctx, AV_HWDEVICE_TYPE_VAAPI, render_path, NULL, 0) < 0) {
|
||||
fprintf(stderr, "Error: Failed to create hardware device context\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
AVBufferRef *frame_context = av_hwframe_ctx_alloc(device_ctx);
|
||||
if(!frame_context) {
|
||||
fprintf(stderr, "Error: Failed to create hwframe context\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
return false;
|
||||
}
|
||||
|
||||
AVHWFramesContext *hw_frame_context =
|
||||
(AVHWFramesContext *)frame_context->data;
|
||||
hw_frame_context->width = width;
|
||||
hw_frame_context->height = height;
|
||||
hw_frame_context->sw_format = hdr ? AV_PIX_FMT_P010LE : AV_PIX_FMT_NV12;
|
||||
hw_frame_context->format = video_codec_context->pix_fmt;
|
||||
hw_frame_context->device_ref = device_ctx;
|
||||
hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
|
||||
|
||||
//hw_frame_context->initial_pool_size = 20;
|
||||
|
||||
AVVAAPIDeviceContext *vactx =((AVHWDeviceContext*)device_ctx->data)->hwctx;
|
||||
*va_dpy = vactx->display;
|
||||
|
||||
if (av_hwframe_ctx_init(frame_context) < 0) {
|
||||
fprintf(stderr, "Error: Failed to initialize hardware frame context "
|
||||
"(note: ffmpeg version needs to be > 4.0)\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
//av_buffer_unref(&frame_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
video_codec_context->hw_device_ctx = av_buffer_ref(device_ctx);
|
||||
video_codec_context->hw_frames_ctx = av_buffer_ref(frame_context);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool cuda_create_codec_context(CUcontext cu_ctx, AVCodecContext *video_codec_context, int width, int height, bool hdr, CUstream *cuda_stream) {
|
||||
AVBufferRef *device_ctx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_CUDA);
|
||||
if(!device_ctx) {
|
||||
fprintf(stderr, "gsr error: cuda_create_codec_context failed: failed to create hardware device context\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
AVHWDeviceContext *hw_device_context = (AVHWDeviceContext*)device_ctx->data;
|
||||
AVCUDADeviceContext *cuda_device_context = (AVCUDADeviceContext*)hw_device_context->hwctx;
|
||||
cuda_device_context->cuda_ctx = cu_ctx;
|
||||
if(av_hwdevice_ctx_init(device_ctx) < 0) {
|
||||
fprintf(stderr, "gsr error: cuda_create_codec_context failed: failed to create hardware device context\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
return false;
|
||||
}
|
||||
|
||||
AVBufferRef *frame_context = av_hwframe_ctx_alloc(device_ctx);
|
||||
if(!frame_context) {
|
||||
fprintf(stderr, "gsr error: cuda_create_codec_context failed: failed to create hwframe context\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
return false;
|
||||
}
|
||||
|
||||
AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)frame_context->data;
|
||||
hw_frame_context->width = width;
|
||||
hw_frame_context->height = height;
|
||||
hw_frame_context->sw_format = hdr ? AV_PIX_FMT_P010LE : AV_PIX_FMT_NV12;
|
||||
hw_frame_context->format = video_codec_context->pix_fmt;
|
||||
hw_frame_context->device_ref = device_ctx;
|
||||
hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
|
||||
|
||||
if (av_hwframe_ctx_init(frame_context) < 0) {
|
||||
fprintf(stderr, "gsr error: cuda_create_codec_context failed: failed to initialize hardware frame context "
|
||||
"(note: ffmpeg version needs to be > 4.0)\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
//av_buffer_unref(&frame_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
*cuda_stream = cuda_device_context->stream;
|
||||
video_codec_context->hw_device_ctx = av_buffer_ref(device_ctx);
|
||||
video_codec_context->hw_frames_ctx = av_buffer_ref(frame_context);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1,19 +1,93 @@
|
||||
#include "../../include/capture/kms.h"
|
||||
#include "../../include/capture/capture.h"
|
||||
#include "../../include/utils.h"
|
||||
#include "../../include/color_conversion.h"
|
||||
#include "../../kms/client/kms_client.h"
|
||||
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/mastering_display_metadata.h>
|
||||
|
||||
#define HDMI_STATIC_METADATA_TYPE1 0
|
||||
#define HDMI_EOTF_SMPTE_ST2084 2
|
||||
|
||||
#define MAX_CONNECTOR_IDS 32
|
||||
|
||||
typedef struct {
|
||||
uint32_t connector_ids[MAX_CONNECTOR_IDS];
|
||||
int num_connector_ids;
|
||||
} MonitorId;
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_kms_params params;
|
||||
|
||||
bool should_stop;
|
||||
bool stop_is_error;
|
||||
|
||||
gsr_kms_client kms_client;
|
||||
gsr_kms_response kms_response;
|
||||
|
||||
vec2i capture_pos;
|
||||
vec2i capture_size;
|
||||
MonitorId monitor_id;
|
||||
|
||||
AVMasteringDisplayMetadata *mastering_display_metadata;
|
||||
AVContentLightMetadata *light_metadata;
|
||||
|
||||
gsr_monitor_rotation monitor_rotation;
|
||||
|
||||
unsigned int input_texture;
|
||||
unsigned int cursor_texture;
|
||||
} gsr_capture_kms;
|
||||
|
||||
static void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self) {
|
||||
for(int i = 0; i < self->kms_response.num_fds; ++i) {
|
||||
if(self->kms_response.fds[i].fd > 0)
|
||||
close(self->kms_response.fds[i].fd);
|
||||
self->kms_response.fds[i].fd = 0;
|
||||
}
|
||||
self->kms_response.num_fds = 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_stop(gsr_capture_kms *self) {
|
||||
if(self->input_texture) {
|
||||
self->params.egl->glDeleteTextures(1, &self->input_texture);
|
||||
self->input_texture = 0;
|
||||
}
|
||||
|
||||
if(self->cursor_texture) {
|
||||
self->params.egl->glDeleteTextures(1, &self->cursor_texture);
|
||||
self->cursor_texture = 0;
|
||||
}
|
||||
|
||||
gsr_capture_kms_cleanup_kms_fds(self);
|
||||
gsr_kms_client_deinit(&self->kms_client);
|
||||
}
|
||||
|
||||
static int max_int(int a, int b) {
|
||||
return a > b ? a : b;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_create_input_textures(gsr_capture_kms *self) {
|
||||
self->params.egl->glGenTextures(1, &self->input_texture);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, self->input_texture);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
self->params.egl->glGenTextures(1, &self->cursor_texture);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, self->cursor_texture);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
/* TODO: On monitor reconfiguration, find monitor x, y, width and height again. Do the same for nvfbc. */
|
||||
|
||||
typedef struct {
|
||||
@@ -39,32 +113,33 @@ static void monitor_callback(const gsr_monitor *monitor, void *userdata) {
|
||||
fprintf(stderr, "gsr warning: reached max connector ids\n");
|
||||
}
|
||||
|
||||
int gsr_capture_kms_start(gsr_capture_kms *self, const char *display_to_capture, gsr_egl *egl, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
memset(self, 0, sizeof(*self));
|
||||
self->base.video_codec_context = video_codec_context;
|
||||
self->base.egl = egl;
|
||||
static int gsr_capture_kms_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_kms *self = cap->priv;
|
||||
|
||||
gsr_capture_kms_create_input_textures(self);
|
||||
|
||||
gsr_monitor monitor;
|
||||
self->monitor_id.num_connector_ids = 0;
|
||||
|
||||
int kms_init_res = gsr_kms_client_init(&self->kms_client, egl->card_path);
|
||||
int kms_init_res = gsr_kms_client_init(&self->kms_client, self->params.egl->card_path);
|
||||
if(kms_init_res != 0)
|
||||
return kms_init_res;
|
||||
|
||||
MonitorCallbackUserdata monitor_callback_userdata = {
|
||||
&self->monitor_id,
|
||||
display_to_capture, strlen(display_to_capture),
|
||||
self->params.display_to_capture, strlen(self->params.display_to_capture),
|
||||
0,
|
||||
};
|
||||
for_each_active_monitor_output(egl, GSR_CONNECTION_DRM, monitor_callback, &monitor_callback_userdata);
|
||||
for_each_active_monitor_output(self->params.egl, GSR_CONNECTION_DRM, monitor_callback, &monitor_callback_userdata);
|
||||
|
||||
if(!get_monitor_by_name(egl, GSR_CONNECTION_DRM, display_to_capture, &monitor)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_start: failed to find monitor by name \"%s\"\n", display_to_capture);
|
||||
if(!get_monitor_by_name(self->params.egl, GSR_CONNECTION_DRM, self->params.display_to_capture, &monitor)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_start: failed to find monitor by name \"%s\"\n", self->params.display_to_capture);
|
||||
gsr_capture_kms_stop(self);
|
||||
return -1;
|
||||
}
|
||||
|
||||
monitor.name = display_to_capture;
|
||||
self->monitor_rotation = drm_monitor_get_display_server_rotation(egl, &monitor);
|
||||
monitor.name = self->params.display_to_capture;
|
||||
self->monitor_rotation = drm_monitor_get_display_server_rotation(self->params.egl, &monitor);
|
||||
|
||||
self->capture_pos = monitor.pos;
|
||||
if(self->monitor_rotation == GSR_MONITOR_ROT_90 || self->monitor_rotation == GSR_MONITOR_ROT_270) {
|
||||
@@ -75,39 +150,33 @@ int gsr_capture_kms_start(gsr_capture_kms *self, const char *display_to_capture,
|
||||
}
|
||||
|
||||
/* Disable vsync */
|
||||
egl->eglSwapInterval(egl->egl_display, 0);
|
||||
self->params.egl->eglSwapInterval(self->params.egl->egl_display, 0);
|
||||
|
||||
// TODO: Move this and xcomposite equivalent to a common section unrelated to capture method
|
||||
if(egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && video_codec_context->codec_id == AV_CODEC_ID_HEVC) {
|
||||
if(self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && video_codec_context->codec_id == AV_CODEC_ID_HEVC) {
|
||||
// TODO: dont do this if using ffmpeg reports that this is not needed (AMD driver bug that was fixed recently)
|
||||
self->base.video_codec_context->width = FFALIGN(self->capture_size.x, 64);
|
||||
self->base.video_codec_context->height = FFALIGN(self->capture_size.y, 16);
|
||||
} else if(egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && video_codec_context->codec_id == AV_CODEC_ID_AV1) {
|
||||
video_codec_context->width = FFALIGN(self->capture_size.x, 64);
|
||||
video_codec_context->height = FFALIGN(self->capture_size.y, 16);
|
||||
} else if(self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && video_codec_context->codec_id == AV_CODEC_ID_AV1) {
|
||||
// TODO: Dont do this for VCN 5 and forward which should fix this hardware bug
|
||||
self->base.video_codec_context->width = FFALIGN(self->capture_size.x, 64);
|
||||
video_codec_context->width = FFALIGN(self->capture_size.x, 64);
|
||||
// AMD driver has special case handling for 1080 height to set it to 1082 instead of 1088 (1080 aligned to 16).
|
||||
// TODO: Set height to 1082 in this case, but it wont work because it will be aligned to 1088.
|
||||
if(self->capture_size.y == 1080) {
|
||||
self->base.video_codec_context->height = 1080;
|
||||
video_codec_context->height = 1080;
|
||||
} else {
|
||||
self->base.video_codec_context->height = FFALIGN(self->capture_size.y, 16);
|
||||
video_codec_context->height = FFALIGN(self->capture_size.y, 16);
|
||||
}
|
||||
} else {
|
||||
self->base.video_codec_context->width = FFALIGN(self->capture_size.x, 2);
|
||||
self->base.video_codec_context->height = FFALIGN(self->capture_size.y, 2);
|
||||
video_codec_context->width = FFALIGN(self->capture_size.x, 2);
|
||||
video_codec_context->height = FFALIGN(self->capture_size.y, 2);
|
||||
}
|
||||
|
||||
frame->width = self->base.video_codec_context->width;
|
||||
frame->height = self->base.video_codec_context->height;
|
||||
frame->width = video_codec_context->width;
|
||||
frame->height = video_codec_context->height;
|
||||
return 0;
|
||||
}
|
||||
|
||||
void gsr_capture_kms_stop(gsr_capture_kms *self) {
|
||||
gsr_capture_kms_cleanup_kms_fds(self);
|
||||
gsr_kms_client_deinit(&self->kms_client);
|
||||
gsr_capture_base_stop(&self->base);
|
||||
}
|
||||
|
||||
static float monitor_rotation_to_radians(gsr_monitor_rotation rot) {
|
||||
switch(rot) {
|
||||
case GSR_MONITOR_ROT_0: return 0.0f;
|
||||
@@ -210,8 +279,13 @@ static vec2i swap_vec2i(vec2i value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bool screen_plane_use_modifiers, bool cursor_texture_is_external, bool record_cursor) {
|
||||
static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
|
||||
gsr_capture_kms *self = cap->priv;
|
||||
const bool screen_plane_use_modifiers = self->params.egl->gpu_info.vendor != GSR_GPU_VENDOR_AMD;
|
||||
const bool cursor_texture_is_external = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA;
|
||||
|
||||
//egl->glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
self->params.egl->glClear(0);
|
||||
|
||||
gsr_capture_kms_cleanup_kms_fds(self);
|
||||
|
||||
@@ -221,7 +295,7 @@ bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bo
|
||||
|
||||
if(gsr_kms_client_get_kms(&self->kms_client, &self->kms_response) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_capture: failed to get kms, error: %d (%s)\n", self->kms_response.result, self->kms_response.err_msg);
|
||||
return false;
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(self->kms_response.num_fds == 0) {
|
||||
@@ -230,7 +304,7 @@ bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bo
|
||||
error_shown = true;
|
||||
fprintf(stderr, "gsr error: no drm found, capture will fail\n");
|
||||
}
|
||||
return false;
|
||||
return -1;
|
||||
}
|
||||
|
||||
for(int i = 0; i < self->monitor_id.num_connector_ids; ++i) {
|
||||
@@ -250,12 +324,12 @@ bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bo
|
||||
cursor_drm_fd = find_cursor_drm(&self->kms_response);
|
||||
|
||||
if(!drm_fd)
|
||||
return false;
|
||||
return -1;
|
||||
|
||||
if(!capture_is_combined_plane && cursor_drm_fd && cursor_drm_fd->connector_id != drm_fd->connector_id)
|
||||
cursor_drm_fd = NULL;
|
||||
|
||||
if(drm_fd->has_hdr_metadata && hdr && hdr_metadata_is_supported_format(&drm_fd->hdr_metadata))
|
||||
if(drm_fd->has_hdr_metadata && self->params.hdr && hdr_metadata_is_supported_format(&drm_fd->hdr_metadata))
|
||||
gsr_kms_set_hdr_metadata(self, frame, drm_fd);
|
||||
|
||||
// TODO: This causes a crash sometimes on steam deck, why? is it a driver bug? a vaapi pure version doesn't cause a crash.
|
||||
@@ -299,11 +373,11 @@ bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bo
|
||||
img_attr[13] = EGL_NONE;
|
||||
}
|
||||
|
||||
EGLImage image = self->base.egl->eglCreateImage(self->base.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr);
|
||||
self->base.egl->glBindTexture(GL_TEXTURE_2D, self->base.input_texture);
|
||||
self->base.egl->glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
|
||||
self->base.egl->eglDestroyImage(self->base.egl->egl_display, image);
|
||||
self->base.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
EGLImage image = self->params.egl->eglCreateImage(self->params.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, self->input_texture);
|
||||
self->params.egl->glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
|
||||
self->params.egl->eglDestroyImage(self->params.egl->egl_display, image);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
vec2i capture_pos = self->capture_pos;
|
||||
if(!capture_is_combined_plane)
|
||||
@@ -314,12 +388,12 @@ bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bo
|
||||
const int target_x = max_int(0, frame->width / 2 - self->capture_size.x / 2);
|
||||
const int target_y = max_int(0, frame->height / 2 - self->capture_size.y / 2);
|
||||
|
||||
gsr_color_conversion_draw(&self->base.color_conversion, self->base.input_texture,
|
||||
gsr_color_conversion_draw(color_conversion, self->input_texture,
|
||||
(vec2i){target_x, target_y}, self->capture_size,
|
||||
capture_pos, self->capture_size,
|
||||
texture_rotation, false);
|
||||
|
||||
if(record_cursor && cursor_drm_fd) {
|
||||
if(self->params.record_cursor && cursor_drm_fd) {
|
||||
const vec2i cursor_size = {cursor_drm_fd->width, cursor_drm_fd->height};
|
||||
vec2i cursor_pos = {cursor_drm_fd->x, cursor_drm_fd->y};
|
||||
switch(self->monitor_rotation) {
|
||||
@@ -361,35 +435,112 @@ bool gsr_capture_kms_capture(gsr_capture_kms *self, AVFrame *frame, bool hdr, bo
|
||||
EGL_NONE
|
||||
};
|
||||
|
||||
EGLImage cursor_image = self->base.egl->eglCreateImage(self->base.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr_cursor);
|
||||
EGLImage cursor_image = self->params.egl->eglCreateImage(self->params.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr_cursor);
|
||||
const int target = cursor_texture_is_external ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
|
||||
self->base.egl->glBindTexture(target, self->base.cursor_texture);
|
||||
self->base.egl->glEGLImageTargetTexture2DOES(target, cursor_image);
|
||||
self->base.egl->eglDestroyImage(self->base.egl->egl_display, cursor_image);
|
||||
self->base.egl->glBindTexture(target, 0);
|
||||
self->params.egl->glBindTexture(target, self->cursor_texture);
|
||||
self->params.egl->glEGLImageTargetTexture2DOES(target, cursor_image);
|
||||
self->params.egl->eglDestroyImage(self->params.egl->egl_display, cursor_image);
|
||||
self->params.egl->glBindTexture(target, 0);
|
||||
|
||||
self->base.egl->glEnable(GL_SCISSOR_TEST);
|
||||
self->base.egl->glScissor(target_x, target_y, self->capture_size.x, self->capture_size.y);
|
||||
self->params.egl->glEnable(GL_SCISSOR_TEST);
|
||||
self->params.egl->glScissor(target_x, target_y, self->capture_size.x, self->capture_size.y);
|
||||
|
||||
gsr_color_conversion_draw(&self->base.color_conversion, self->base.cursor_texture,
|
||||
gsr_color_conversion_draw(color_conversion, self->cursor_texture,
|
||||
cursor_pos, cursor_size,
|
||||
(vec2i){0, 0}, cursor_size,
|
||||
texture_rotation, cursor_texture_is_external);
|
||||
|
||||
self->base.egl->glDisable(GL_SCISSOR_TEST);
|
||||
self->params.egl->glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
|
||||
//self->base.egl->glFlush();
|
||||
//self->base.egl->glFinish();
|
||||
self->params.egl->eglSwapBuffers(self->params.egl->egl_display, self->params.egl->egl_surface);
|
||||
|
||||
// TODO: Do software specific video encoder conversion here
|
||||
|
||||
//self->params.egl->glFlush();
|
||||
//self->params.egl->glFinish();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool gsr_capture_kms_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_kms *cap_kms = cap->priv;
|
||||
if(cap_kms->should_stop) {
|
||||
if(err)
|
||||
*err = cap_kms->stop_is_error;
|
||||
return true;
|
||||
}
|
||||
|
||||
void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self) {
|
||||
for(int i = 0; i < self->kms_response.num_fds; ++i) {
|
||||
if(self->kms_response.fds[i].fd > 0)
|
||||
close(self->kms_response.fds[i].fd);
|
||||
self->kms_response.fds[i].fd = 0;
|
||||
if(err)
|
||||
*err = false;
|
||||
return false;
|
||||
}
|
||||
self->kms_response.num_fds = 0;
|
||||
|
||||
static void gsr_capture_kms_capture_end(gsr_capture *cap, AVFrame *frame) {
|
||||
(void)frame;
|
||||
gsr_capture_kms_cleanup_kms_fds(cap->priv);
|
||||
}
|
||||
|
||||
static gsr_source_color gsr_capture_kms_get_source_color(gsr_capture *cap) {
|
||||
(void)cap;
|
||||
return GSR_SOURCE_COLOR_RGB;
|
||||
}
|
||||
|
||||
static bool gsr_capture_kms_uses_external_image(gsr_capture *cap) {
|
||||
gsr_capture_kms *cap_kms = cap->priv;
|
||||
return cap_kms->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms *cap_kms = cap->priv;
|
||||
if(cap->priv) {
|
||||
gsr_capture_kms_stop(cap_kms);
|
||||
free((void*)cap_kms->params.display_to_capture);
|
||||
cap_kms->params.display_to_capture = NULL;
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_kms_create(const gsr_capture_kms_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_kms *cap_kms = calloc(1, sizeof(gsr_capture_kms));
|
||||
if(!cap_kms) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const char *display_to_capture = strdup(params->display_to_capture);
|
||||
if(!display_to_capture) {
|
||||
free(cap);
|
||||
free(cap_kms);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
cap_kms->params = *params;
|
||||
cap_kms->params.display_to_capture = display_to_capture;
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_kms_start,
|
||||
.tick = NULL,
|
||||
.should_stop = gsr_capture_kms_should_stop,
|
||||
.capture = gsr_capture_kms_capture,
|
||||
.capture_end = gsr_capture_kms_capture_end,
|
||||
.get_source_color = gsr_capture_kms_get_source_color,
|
||||
.uses_external_image = gsr_capture_kms_uses_external_image,
|
||||
.destroy = gsr_capture_kms_destroy,
|
||||
.priv = cap_kms
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
|
||||
@@ -1,183 +0,0 @@
|
||||
#include "../../include/capture/kms_cuda.h"
|
||||
#include "../../include/capture/kms.h"
|
||||
#include "../../include/cuda.h"
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <assert.h>
|
||||
#include <libavutil/hwcontext.h>
|
||||
#include <libavutil/hwcontext_cuda.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_kms kms;
|
||||
|
||||
gsr_capture_kms_cuda_params params;
|
||||
|
||||
gsr_cuda cuda;
|
||||
CUgraphicsResource cuda_graphics_resources[2];
|
||||
CUarray mapped_arrays[2];
|
||||
CUstream cuda_stream;
|
||||
} gsr_capture_kms_cuda;
|
||||
|
||||
static void gsr_capture_kms_cuda_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
static int gsr_capture_kms_cuda_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_kms_cuda *cap_kms = cap->priv;
|
||||
|
||||
const int res = gsr_capture_kms_start(&cap_kms->kms, cap_kms->params.display_to_capture, cap_kms->params.egl, video_codec_context, frame);
|
||||
if(res != 0) {
|
||||
gsr_capture_kms_cuda_stop(cap, video_codec_context);
|
||||
return res;
|
||||
}
|
||||
|
||||
// TODO: overclocking is not supported on wayland...
|
||||
if(!gsr_cuda_load(&cap_kms->cuda, NULL, false)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_cuda_start: failed to load cuda\n");
|
||||
gsr_capture_kms_cuda_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!cuda_create_codec_context(cap_kms->cuda.cu_ctx, video_codec_context, video_codec_context->width, video_codec_context->height, cap_kms->params.hdr, &cap_kms->cuda_stream)) {
|
||||
gsr_capture_kms_cuda_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
gsr_cuda_context cuda_context = {
|
||||
.cuda = &cap_kms->cuda,
|
||||
.cuda_graphics_resources = cap_kms->cuda_graphics_resources,
|
||||
.mapped_arrays = cap_kms->mapped_arrays
|
||||
};
|
||||
|
||||
if(!gsr_capture_base_setup_cuda_textures(&cap_kms->kms.base, frame, &cuda_context, cap_kms->params.color_range, GSR_SOURCE_COLOR_RGB, cap_kms->params.hdr)) {
|
||||
gsr_capture_kms_cuda_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool gsr_capture_kms_cuda_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_kms_cuda *cap_kms = cap->priv;
|
||||
if(cap_kms->kms.should_stop) {
|
||||
if(err)
|
||||
*err = cap_kms->kms.stop_is_error;
|
||||
return true;
|
||||
}
|
||||
|
||||
if(err)
|
||||
*err = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_unload_cuda_graphics(gsr_capture_kms_cuda *cap_kms) {
|
||||
if(cap_kms->cuda.cu_ctx) {
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
if(cap_kms->cuda_graphics_resources[i]) {
|
||||
cap_kms->cuda.cuGraphicsUnmapResources(1, &cap_kms->cuda_graphics_resources[i], 0);
|
||||
cap_kms->cuda.cuGraphicsUnregisterResource(cap_kms->cuda_graphics_resources[i]);
|
||||
cap_kms->cuda_graphics_resources[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static int gsr_capture_kms_cuda_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_kms_cuda *cap_kms = cap->priv;
|
||||
|
||||
cap_kms->kms.base.egl->glClear(0);
|
||||
gsr_capture_kms_capture(&cap_kms->kms, frame, cap_kms->params.hdr, true, true, cap_kms->params.record_cursor);
|
||||
cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
|
||||
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
CUDA_MEMCPY2D memcpy_struct;
|
||||
memcpy_struct.srcXInBytes = 0;
|
||||
memcpy_struct.srcY = 0;
|
||||
memcpy_struct.srcMemoryType = CU_MEMORYTYPE_ARRAY;
|
||||
|
||||
memcpy_struct.dstXInBytes = 0;
|
||||
memcpy_struct.dstY = 0;
|
||||
memcpy_struct.dstMemoryType = CU_MEMORYTYPE_DEVICE;
|
||||
|
||||
memcpy_struct.srcArray = cap_kms->mapped_arrays[i];
|
||||
memcpy_struct.srcPitch = frame->width / div[i];
|
||||
memcpy_struct.dstDevice = (CUdeviceptr)frame->data[i];
|
||||
memcpy_struct.dstPitch = frame->linesize[i];
|
||||
memcpy_struct.WidthInBytes = frame->width * (cap_kms->params.hdr ? 2 : 1);
|
||||
memcpy_struct.Height = frame->height / div[i];
|
||||
// TODO: Remove this copy if possible
|
||||
cap_kms->cuda.cuMemcpy2DAsync_v2(&memcpy_struct, cap_kms->cuda_stream);
|
||||
}
|
||||
|
||||
// TODO: needed?
|
||||
cap_kms->cuda.cuStreamSynchronize(cap_kms->cuda_stream);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_cuda_capture_end(gsr_capture *cap, AVFrame *frame) {
|
||||
(void)frame;
|
||||
gsr_capture_kms_cuda *cap_kms = cap->priv;
|
||||
gsr_capture_kms_cleanup_kms_fds(&cap_kms->kms);
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_cuda_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms_cuda *cap_kms = cap->priv;
|
||||
gsr_capture_kms_unload_cuda_graphics(cap_kms);
|
||||
gsr_cuda_unload(&cap_kms->cuda);
|
||||
gsr_capture_kms_stop(&cap_kms->kms);
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_cuda_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms_cuda *cap_kms = cap->priv;
|
||||
if(cap->priv) {
|
||||
gsr_capture_kms_cuda_stop(cap, video_codec_context);
|
||||
free((void*)cap_kms->params.display_to_capture);
|
||||
cap_kms->params.display_to_capture = NULL;
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_kms_cuda_create(const gsr_capture_kms_cuda_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_cuda_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_kms_cuda *cap_kms = calloc(1, sizeof(gsr_capture_kms_cuda));
|
||||
if(!cap_kms) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const char *display_to_capture = strdup(params->display_to_capture);
|
||||
if(!display_to_capture) {
|
||||
free(cap);
|
||||
free(cap_kms);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
cap_kms->params = *params;
|
||||
cap_kms->params.display_to_capture = display_to_capture;
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_kms_cuda_start,
|
||||
.tick = NULL,
|
||||
.should_stop = gsr_capture_kms_cuda_should_stop,
|
||||
.capture = gsr_capture_kms_cuda_capture,
|
||||
.capture_end = gsr_capture_kms_cuda_capture_end,
|
||||
.destroy = gsr_capture_kms_cuda_destroy,
|
||||
.priv = cap_kms
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
@@ -1,129 +0,0 @@
|
||||
#include "../../include/capture/kms_software.h"
|
||||
#include "../../include/capture/kms.h"
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <assert.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_kms kms;
|
||||
gsr_capture_kms_software_params params;
|
||||
} gsr_capture_kms_software;
|
||||
|
||||
static void gsr_capture_kms_software_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
#define GL_DYNAMIC_READ 0x88E9
|
||||
|
||||
static int gsr_capture_kms_software_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
|
||||
const int res = gsr_capture_kms_start(&cap_kms->kms, cap_kms->params.display_to_capture, cap_kms->params.egl, video_codec_context, frame);
|
||||
if(res != 0) {
|
||||
gsr_capture_kms_software_stop(cap, video_codec_context);
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!gsr_capture_base_setup_textures(&cap_kms->kms.base, frame, cap_kms->params.color_range, GSR_SOURCE_COLOR_RGB, cap_kms->params.hdr, cap_kms->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA)) {
|
||||
gsr_capture_kms_software_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool gsr_capture_kms_software_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
if(cap_kms->kms.should_stop) {
|
||||
if(err)
|
||||
*err = cap_kms->kms.stop_is_error;
|
||||
return true;
|
||||
}
|
||||
|
||||
if(err)
|
||||
*err = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
static int gsr_capture_kms_software_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
|
||||
cap_kms->kms.base.egl->glClear(0);
|
||||
gsr_capture_kms_capture(&cap_kms->kms, frame, cap_kms->params.hdr, cap_kms->params.egl->gpu_info.vendor != GSR_GPU_VENDOR_AMD, cap_kms->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA, cap_kms->params.record_cursor);
|
||||
|
||||
// TODO: hdr support
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
cap_kms->params.egl->glBindTexture(GL_TEXTURE_2D, cap_kms->kms.base.target_textures[i]);
|
||||
cap_kms->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]);
|
||||
}
|
||||
cap_kms->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_software_capture_end(gsr_capture *cap, AVFrame *frame) {
|
||||
(void)frame;
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
gsr_capture_kms_cleanup_kms_fds(&cap_kms->kms);
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_software_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
gsr_capture_kms_stop(&cap_kms->kms);
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_software_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms_software *cap_kms = cap->priv;
|
||||
if(cap->priv) {
|
||||
gsr_capture_kms_software_stop(cap, video_codec_context);
|
||||
free((void*)cap_kms->params.display_to_capture);
|
||||
cap_kms->params.display_to_capture = NULL;
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_kms_software_create(const gsr_capture_kms_software_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_software_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_kms_software *cap_kms = calloc(1, sizeof(gsr_capture_kms_software));
|
||||
if(!cap_kms) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const char *display_to_capture = strdup(params->display_to_capture);
|
||||
if(!display_to_capture) {
|
||||
free(cap);
|
||||
free(cap_kms);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
cap_kms->params = *params;
|
||||
cap_kms->params.display_to_capture = display_to_capture;
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_kms_software_start,
|
||||
.tick = NULL,
|
||||
.should_stop = gsr_capture_kms_software_should_stop,
|
||||
.capture = gsr_capture_kms_software_capture,
|
||||
.capture_end = gsr_capture_kms_software_capture_end,
|
||||
.destroy = gsr_capture_kms_software_destroy,
|
||||
.priv = cap_kms
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
#include "../../include/capture/kms_vaapi.h"
|
||||
#include "../../include/capture/kms.h"
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <assert.h>
|
||||
#include <libavutil/hwcontext.h>
|
||||
#include <libavutil/hwcontext_vaapi.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <va/va_drmcommon.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_kms kms;
|
||||
|
||||
gsr_capture_kms_vaapi_params params;
|
||||
|
||||
VADisplay va_dpy;
|
||||
VADRMPRIMESurfaceDescriptor prime;
|
||||
} gsr_capture_kms_vaapi;
|
||||
|
||||
static void gsr_capture_kms_vaapi_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
static int gsr_capture_kms_vaapi_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_kms_vaapi *cap_kms = cap->priv;
|
||||
|
||||
int res = gsr_capture_kms_start(&cap_kms->kms, cap_kms->params.display_to_capture, cap_kms->params.egl, video_codec_context, frame);
|
||||
if(res != 0) {
|
||||
gsr_capture_kms_vaapi_stop(cap, video_codec_context);
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!vaapi_create_codec_context(cap_kms->params.egl->card_path, video_codec_context, video_codec_context->width, video_codec_context->height, cap_kms->params.hdr, &cap_kms->va_dpy)) {
|
||||
gsr_capture_kms_vaapi_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!gsr_capture_base_setup_vaapi_textures(&cap_kms->kms.base, frame, cap_kms->va_dpy, &cap_kms->prime, cap_kms->params.color_range)) {
|
||||
gsr_capture_kms_vaapi_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool gsr_capture_kms_vaapi_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_kms_vaapi *cap_kms = cap->priv;
|
||||
if(cap_kms->kms.should_stop) {
|
||||
if(err)
|
||||
*err = cap_kms->kms.stop_is_error;
|
||||
return true;
|
||||
}
|
||||
|
||||
if(err)
|
||||
*err = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
static int gsr_capture_kms_vaapi_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_kms_vaapi *cap_kms = cap->priv;
|
||||
cap_kms->kms.base.egl->glClear(0);
|
||||
gsr_capture_kms_capture(&cap_kms->kms, frame, cap_kms->params.hdr, cap_kms->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_INTEL, false, cap_kms->params.record_cursor);
|
||||
cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_vaapi_capture_end(gsr_capture *cap, AVFrame *frame) {
|
||||
(void)frame;
|
||||
gsr_capture_kms_vaapi *cap_kms = cap->priv;
|
||||
gsr_capture_kms_cleanup_kms_fds(&cap_kms->kms);
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_vaapi_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms_vaapi *cap_kms = cap->priv;
|
||||
|
||||
for(uint32_t i = 0; i < cap_kms->prime.num_objects; ++i) {
|
||||
if(cap_kms->prime.objects[i].fd > 0) {
|
||||
close(cap_kms->prime.objects[i].fd);
|
||||
cap_kms->prime.objects[i].fd = 0;
|
||||
}
|
||||
}
|
||||
|
||||
gsr_capture_kms_stop(&cap_kms->kms);
|
||||
}
|
||||
|
||||
static void gsr_capture_kms_vaapi_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_kms_vaapi *cap_kms = cap->priv;
|
||||
if(cap->priv) {
|
||||
gsr_capture_kms_vaapi_stop(cap, video_codec_context);
|
||||
free((void*)cap_kms->params.display_to_capture);
|
||||
cap_kms->params.display_to_capture = NULL;
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_kms_vaapi_create(const gsr_capture_kms_vaapi_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_vaapi_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_kms_vaapi *cap_kms = calloc(1, sizeof(gsr_capture_kms_vaapi));
|
||||
if(!cap_kms) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const char *display_to_capture = strdup(params->display_to_capture);
|
||||
if(!display_to_capture) {
|
||||
/* TODO XCloseDisplay */
|
||||
free(cap);
|
||||
free(cap_kms);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
cap_kms->params = *params;
|
||||
cap_kms->params.display_to_capture = display_to_capture;
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_kms_vaapi_start,
|
||||
.tick = NULL,
|
||||
.should_stop = gsr_capture_kms_vaapi_should_stop,
|
||||
.capture = gsr_capture_kms_vaapi_capture,
|
||||
.capture_end = gsr_capture_kms_vaapi_capture_end,
|
||||
.destroy = gsr_capture_kms_vaapi_destroy,
|
||||
.priv = cap_kms
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
@@ -3,20 +3,18 @@
|
||||
#include "../../include/cuda.h"
|
||||
#include "../../include/egl.h"
|
||||
#include "../../include/utils.h"
|
||||
#include "../../include/color_conversion.h"
|
||||
|
||||
#include <dlfcn.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
#include <math.h>
|
||||
|
||||
#include <X11/Xlib.h>
|
||||
#include <libavutil/hwcontext.h>
|
||||
#include <libavutil/hwcontext_cuda.h>
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavutil/version.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_base base;
|
||||
gsr_capture_nvfbc_params params;
|
||||
void *library;
|
||||
|
||||
@@ -296,7 +294,6 @@ static int gsr_capture_nvfbc_setup_session(gsr_capture_nvfbc *cap_nvfbc) {
|
||||
|
||||
static void gsr_capture_nvfbc_stop(gsr_capture_nvfbc *cap_nvfbc) {
|
||||
gsr_capture_nvfbc_destroy_session_and_handle(cap_nvfbc);
|
||||
gsr_capture_base_stop(&cap_nvfbc->base);
|
||||
gsr_cuda_unload(&cap_nvfbc->cuda);
|
||||
if(cap_nvfbc->library) {
|
||||
dlclose(cap_nvfbc->library);
|
||||
@@ -311,9 +308,6 @@ static void gsr_capture_nvfbc_stop(gsr_capture_nvfbc *cap_nvfbc) {
|
||||
static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_nvfbc *cap_nvfbc = cap->priv;
|
||||
|
||||
cap_nvfbc->base.video_codec_context = video_codec_context;
|
||||
cap_nvfbc->base.egl = cap_nvfbc->params.egl;
|
||||
|
||||
if(!cap_nvfbc->params.use_software_video_encoder) {
|
||||
if(!gsr_cuda_load(&cap_nvfbc->cuda, cap_nvfbc->params.egl->x11.dpy, cap_nvfbc->params.overclock))
|
||||
return -1;
|
||||
@@ -375,27 +369,6 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec
|
||||
frame->width = video_codec_context->width;
|
||||
frame->height = video_codec_context->height;
|
||||
|
||||
if(cap_nvfbc->params.use_software_video_encoder) {
|
||||
if(!gsr_capture_base_setup_textures(&cap_nvfbc->base, frame, cap_nvfbc->params.color_range, GSR_SOURCE_COLOR_BGR, cap_nvfbc->params.hdr, true)) {
|
||||
goto error_cleanup;
|
||||
}
|
||||
} else {
|
||||
if(!cap_nvfbc->params.use_software_video_encoder) {
|
||||
if(!cuda_create_codec_context(cap_nvfbc->cuda.cu_ctx, video_codec_context, video_codec_context->width, video_codec_context->height, false, &cap_nvfbc->cuda_stream))
|
||||
goto error_cleanup;
|
||||
}
|
||||
|
||||
gsr_cuda_context cuda_context = {
|
||||
.cuda = &cap_nvfbc->cuda,
|
||||
.cuda_graphics_resources = cap_nvfbc->cuda_graphics_resources,
|
||||
.mapped_arrays = cap_nvfbc->mapped_arrays
|
||||
};
|
||||
|
||||
// TODO: Remove this, it creates shit we dont need
|
||||
if(!gsr_capture_base_setup_cuda_textures(&cap_nvfbc->base, frame, &cuda_context, cap_nvfbc->params.color_range, GSR_SOURCE_COLOR_BGR, cap_nvfbc->params.hdr)) {
|
||||
goto error_cleanup;
|
||||
}
|
||||
}
|
||||
/* Disable vsync */
|
||||
set_vertical_sync_enabled(cap_nvfbc->params.egl, 0);
|
||||
|
||||
@@ -406,7 +379,7 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec
|
||||
return -1;
|
||||
}
|
||||
|
||||
static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
|
||||
gsr_capture_nvfbc *cap_nvfbc = cap->priv;
|
||||
|
||||
const double nvfbc_recreate_retry_time_seconds = 1.0;
|
||||
@@ -453,53 +426,21 @@ static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
//cap_nvfbc->params.egl->glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
cap_nvfbc->params.egl->glClear(0);
|
||||
|
||||
gsr_color_conversion_draw(&cap_nvfbc->base.color_conversion, cap_nvfbc->setup_params.dwTextures[grab_params.dwTextureIndex],
|
||||
gsr_color_conversion_draw(color_conversion, cap_nvfbc->setup_params.dwTextures[grab_params.dwTextureIndex],
|
||||
(vec2i){0, 0}, (vec2i){frame->width, frame->height},
|
||||
(vec2i){0, 0}, (vec2i){frame->width, frame->height},
|
||||
0.0f, false);
|
||||
|
||||
if(cap_nvfbc->params.use_software_video_encoder) {
|
||||
// TODO: Hdr?
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
cap_nvfbc->params.egl->glBindTexture(GL_TEXTURE_2D, cap_nvfbc->base.target_textures[i]);
|
||||
cap_nvfbc->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]);
|
||||
}
|
||||
cap_nvfbc->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
cap_nvfbc->params.egl->glXSwapBuffers(cap_nvfbc->params.egl->x11.dpy, cap_nvfbc->params.egl->x11.window);
|
||||
} else {
|
||||
cap_nvfbc->params.egl->glXSwapBuffers(cap_nvfbc->params.egl->x11.dpy, cap_nvfbc->params.egl->x11.window);
|
||||
|
||||
// TODO: HDR is broken
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
CUDA_MEMCPY2D memcpy_struct;
|
||||
memcpy_struct.srcXInBytes = 0;
|
||||
memcpy_struct.srcY = 0;
|
||||
memcpy_struct.srcMemoryType = CU_MEMORYTYPE_ARRAY;
|
||||
|
||||
memcpy_struct.dstXInBytes = 0;
|
||||
memcpy_struct.dstY = 0;
|
||||
memcpy_struct.dstMemoryType = CU_MEMORYTYPE_DEVICE;
|
||||
|
||||
memcpy_struct.srcArray = cap_nvfbc->mapped_arrays[i];
|
||||
memcpy_struct.srcPitch = frame->width / div[i];
|
||||
memcpy_struct.dstDevice = (CUdeviceptr)frame->data[i];
|
||||
memcpy_struct.dstPitch = frame->linesize[i];
|
||||
memcpy_struct.WidthInBytes = frame->width * (cap_nvfbc->params.hdr ? 2 : 1);
|
||||
memcpy_struct.Height = frame->height / div[i];
|
||||
// TODO: Remove this copy if possible
|
||||
cap_nvfbc->cuda.cuMemcpy2DAsync_v2(&memcpy_struct, cap_nvfbc->cuda_stream);
|
||||
}
|
||||
|
||||
// TODO: needed?
|
||||
cap_nvfbc->cuda.cuStreamSynchronize(cap_nvfbc->cuda_stream);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static gsr_source_color gsr_capture_nvfbc_get_source_color(gsr_capture *cap) {
|
||||
(void)cap;
|
||||
return GSR_SOURCE_COLOR_BGR;
|
||||
}
|
||||
|
||||
static void gsr_capture_nvfbc_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_nvfbc *cap_nvfbc = cap->priv;
|
||||
@@ -545,6 +486,8 @@ gsr_capture* gsr_capture_nvfbc_create(const gsr_capture_nvfbc_params *params) {
|
||||
.should_stop = NULL,
|
||||
.capture = gsr_capture_nvfbc_capture,
|
||||
.capture_end = NULL,
|
||||
.get_source_color = gsr_capture_nvfbc_get_source_color,
|
||||
.uses_external_image = NULL,
|
||||
.destroy = gsr_capture_nvfbc_destroy,
|
||||
.priv = cap_nvfbc
|
||||
};
|
||||
|
||||
@@ -1,28 +1,62 @@
|
||||
#include "../../include/capture/xcomposite.h"
|
||||
#include "../../include/window_texture.h"
|
||||
#include "../../include/utils.h"
|
||||
#include "../../include/cursor.h"
|
||||
#include "../../include/color_conversion.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
|
||||
#include <X11/Xlib.h>
|
||||
#include <X11/extensions/Xdamage.h>
|
||||
#include <libavutil/hwcontext.h>
|
||||
#include <libavutil/hwcontext.h>
|
||||
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <va/va.h>
|
||||
#include <va/va_drmcommon.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite_params params;
|
||||
XEvent xev;
|
||||
|
||||
bool should_stop;
|
||||
bool stop_is_error;
|
||||
bool window_resized;
|
||||
bool follow_focused_initialized;
|
||||
|
||||
Window window;
|
||||
vec2i window_size;
|
||||
vec2i texture_size;
|
||||
double window_resize_timer;
|
||||
|
||||
WindowTexture window_texture;
|
||||
|
||||
Atom net_active_window_atom;
|
||||
|
||||
gsr_cursor cursor;
|
||||
|
||||
int damage_event;
|
||||
int damage_error;
|
||||
XID damage;
|
||||
bool damaged;
|
||||
|
||||
bool clear_background;
|
||||
} gsr_capture_xcomposite;
|
||||
|
||||
static void gsr_capture_xcomposite_stop(gsr_capture_xcomposite *self) {
|
||||
if(self->damage) {
|
||||
XDamageDestroy(self->params.egl->x11.dpy, self->damage);
|
||||
self->damage = None;
|
||||
}
|
||||
|
||||
window_texture_deinit(&self->window_texture);
|
||||
gsr_cursor_deinit(&self->cursor);
|
||||
}
|
||||
|
||||
static int max_int(int a, int b) {
|
||||
return a > b ? a : b;
|
||||
}
|
||||
|
||||
void gsr_capture_xcomposite_init(gsr_capture_xcomposite *self, const gsr_capture_xcomposite_params *params) {
|
||||
memset(self, 0, sizeof(*self));
|
||||
self->params = *params;
|
||||
}
|
||||
|
||||
static Window get_focused_window(Display *display, Atom net_active_window_atom) {
|
||||
Atom type;
|
||||
int format = 0;
|
||||
@@ -54,9 +88,8 @@ static void gsr_capture_xcomposite_setup_damage(gsr_capture_xcomposite *self, Wi
|
||||
}
|
||||
}
|
||||
|
||||
int gsr_capture_xcomposite_start(gsr_capture_xcomposite *self, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
self->base.video_codec_context = video_codec_context;
|
||||
self->base.egl = self->params.egl;
|
||||
static int gsr_capture_xcomposite_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_xcomposite *self = cap->priv;
|
||||
|
||||
if(self->params.follow_focused) {
|
||||
self->net_active_window_atom = XInternAtom(self->params.egl->x11.dpy, "_NET_ACTIVE_WINDOW", False);
|
||||
@@ -161,21 +194,9 @@ int gsr_capture_xcomposite_start(gsr_capture_xcomposite *self, AVCodecContext *v
|
||||
return 0;
|
||||
}
|
||||
|
||||
void gsr_capture_xcomposite_stop(gsr_capture_xcomposite *self) {
|
||||
if(self->damage) {
|
||||
XDamageDestroy(self->params.egl->x11.dpy, self->damage);
|
||||
self->damage = None;
|
||||
}
|
||||
|
||||
window_texture_deinit(&self->window_texture);
|
||||
gsr_cursor_deinit(&self->cursor);
|
||||
gsr_capture_base_stop(&self->base);
|
||||
}
|
||||
|
||||
void gsr_capture_xcomposite_tick(gsr_capture_xcomposite *self, AVCodecContext *video_codec_context) {
|
||||
static void gsr_capture_xcomposite_tick(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
//self->params.egl->glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
self->params.egl->glClear(0);
|
||||
gsr_capture_xcomposite *self = cap->priv;
|
||||
|
||||
bool init_new_window = false;
|
||||
while(XPending(self->params.egl->x11.dpy)) {
|
||||
@@ -280,20 +301,23 @@ void gsr_capture_xcomposite_tick(gsr_capture_xcomposite *self, AVCodecContext *v
|
||||
self->params.egl->glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &self->texture_size.y);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
gsr_color_conversion_clear(&self->base.color_conversion);
|
||||
self->clear_background = true;
|
||||
gsr_capture_xcomposite_setup_damage(self, self->window);
|
||||
}
|
||||
}
|
||||
|
||||
bool gsr_capture_xcomposite_is_damaged(gsr_capture_xcomposite *self) {
|
||||
static bool gsr_capture_xcomposite_is_damaged(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite *self = cap->priv;
|
||||
return self->damage_event ? self->damaged : true;
|
||||
}
|
||||
|
||||
void gsr_capture_xcomposite_clear_damage(gsr_capture_xcomposite *self) {
|
||||
static void gsr_capture_xcomposite_clear_damage(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite *self = cap->priv;
|
||||
self->damaged = false;
|
||||
}
|
||||
|
||||
bool gsr_capture_xcomposite_should_stop(gsr_capture_xcomposite *self, bool *err) {
|
||||
static bool gsr_capture_xcomposite_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_xcomposite *self = cap->priv;
|
||||
if(self->should_stop) {
|
||||
if(err)
|
||||
*err = self->stop_is_error;
|
||||
@@ -305,9 +329,18 @@ bool gsr_capture_xcomposite_should_stop(gsr_capture_xcomposite *self, bool *err)
|
||||
return false;
|
||||
}
|
||||
|
||||
int gsr_capture_xcomposite_capture(gsr_capture_xcomposite *self, AVFrame *frame) {
|
||||
static int gsr_capture_xcomposite_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
|
||||
gsr_capture_xcomposite *self = cap->priv;
|
||||
(void)frame;
|
||||
|
||||
//self->params.egl->glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
self->params.egl->glClear(0);
|
||||
|
||||
if(self->clear_background) {
|
||||
self->clear_background = false;
|
||||
gsr_color_conversion_clear(color_conversion);
|
||||
}
|
||||
|
||||
const int target_x = max_int(0, frame->width / 2 - self->texture_size.x / 2);
|
||||
const int target_y = max_int(0, frame->height / 2 - self->texture_size.y / 2);
|
||||
|
||||
@@ -316,7 +349,7 @@ int gsr_capture_xcomposite_capture(gsr_capture_xcomposite *self, AVFrame *frame)
|
||||
target_y + self->cursor.position.y - self->cursor.hotspot.y
|
||||
};
|
||||
|
||||
gsr_color_conversion_draw(&self->base.color_conversion, window_texture_get_opengl_texture_id(&self->window_texture),
|
||||
gsr_color_conversion_draw(color_conversion, window_texture_get_opengl_texture_id(&self->window_texture),
|
||||
(vec2i){target_x, target_y}, self->texture_size,
|
||||
(vec2i){0, 0}, self->texture_size,
|
||||
0.0f, false);
|
||||
@@ -331,20 +364,74 @@ int gsr_capture_xcomposite_capture(gsr_capture_xcomposite *self, AVFrame *frame)
|
||||
cursor_pos.y <= target_y + self->texture_size.y;
|
||||
|
||||
if(cursor_inside_window) {
|
||||
self->base.egl->glEnable(GL_SCISSOR_TEST);
|
||||
self->base.egl->glScissor(target_x, target_y, self->texture_size.x, self->texture_size.y);
|
||||
self->params.egl->glEnable(GL_SCISSOR_TEST);
|
||||
self->params.egl->glScissor(target_x, target_y, self->texture_size.x, self->texture_size.y);
|
||||
|
||||
gsr_color_conversion_draw(&self->base.color_conversion, self->cursor.texture_id,
|
||||
gsr_color_conversion_draw(color_conversion, self->cursor.texture_id,
|
||||
cursor_pos, self->cursor.size,
|
||||
(vec2i){0, 0}, self->cursor.size,
|
||||
0.0f, false);
|
||||
|
||||
self->base.egl->glDisable(GL_SCISSOR_TEST);
|
||||
self->params.egl->glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
}
|
||||
|
||||
self->params.egl->eglSwapBuffers(self->params.egl->egl_display, self->params.egl->egl_surface);
|
||||
|
||||
// TODO: Do video encoder specific conversion here
|
||||
|
||||
//self->params.egl->glFlush();
|
||||
//self->params.egl->glFinish();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static gsr_source_color gsr_capture_xcomposite_get_source_color(gsr_capture *cap) {
|
||||
(void)cap;
|
||||
return GSR_SOURCE_COLOR_RGB;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
if(cap->priv) {
|
||||
gsr_capture_xcomposite_stop(cap->priv);
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_xcomposite_create(const gsr_capture_xcomposite_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_xcomposite *cap_xcomp = calloc(1, sizeof(gsr_capture_xcomposite));
|
||||
if(!cap_xcomp) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
cap_xcomp->params = *params;
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_xcomposite_start,
|
||||
.tick = gsr_capture_xcomposite_tick,
|
||||
.is_damaged = gsr_capture_xcomposite_is_damaged,
|
||||
.clear_damage = gsr_capture_xcomposite_clear_damage,
|
||||
.should_stop = gsr_capture_xcomposite_should_stop,
|
||||
.capture = gsr_capture_xcomposite_capture,
|
||||
.capture_end = NULL,
|
||||
.get_source_color = gsr_capture_xcomposite_get_source_color,
|
||||
.uses_external_image = NULL,
|
||||
.destroy = gsr_capture_xcomposite_destroy,
|
||||
.priv = cap_xcomp
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
|
||||
@@ -1,169 +0,0 @@
|
||||
#include "../../include/capture/xcomposite_cuda.h"
|
||||
#include "../../include/cuda.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite xcomposite;
|
||||
bool overclock;
|
||||
|
||||
gsr_cuda cuda;
|
||||
CUgraphicsResource cuda_graphics_resources[2];
|
||||
CUarray mapped_arrays[2];
|
||||
CUstream cuda_stream;
|
||||
} gsr_capture_xcomposite_cuda;
|
||||
|
||||
static void gsr_capture_xcomposite_cuda_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
static int gsr_capture_xcomposite_cuda_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = cap->priv;
|
||||
|
||||
const int res = gsr_capture_xcomposite_start(&cap_xcomp->xcomposite, video_codec_context, frame);
|
||||
if(res != 0) {
|
||||
gsr_capture_xcomposite_cuda_stop(cap, video_codec_context);
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!gsr_cuda_load(&cap_xcomp->cuda, cap_xcomp->xcomposite.params.egl->x11.dpy, cap_xcomp->overclock)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_cuda_start: failed to load cuda\n");
|
||||
gsr_capture_xcomposite_cuda_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!cuda_create_codec_context(cap_xcomp->cuda.cu_ctx, video_codec_context, video_codec_context->width, video_codec_context->height, false, &cap_xcomp->cuda_stream)) {
|
||||
gsr_capture_xcomposite_cuda_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
gsr_cuda_context cuda_context = {
|
||||
.cuda = &cap_xcomp->cuda,
|
||||
.cuda_graphics_resources = cap_xcomp->cuda_graphics_resources,
|
||||
.mapped_arrays = cap_xcomp->mapped_arrays
|
||||
};
|
||||
|
||||
if(!gsr_capture_base_setup_cuda_textures(&cap_xcomp->xcomposite.base, frame, &cuda_context, cap_xcomp->xcomposite.params.color_range, GSR_SOURCE_COLOR_RGB, false)) {
|
||||
gsr_capture_xcomposite_cuda_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_unload_cuda_graphics(gsr_capture_xcomposite_cuda *cap_xcomp) {
|
||||
if(cap_xcomp->cuda.cu_ctx) {
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
if(cap_xcomp->cuda_graphics_resources[i]) {
|
||||
cap_xcomp->cuda.cuGraphicsUnmapResources(1, &cap_xcomp->cuda_graphics_resources[i], 0);
|
||||
cap_xcomp->cuda.cuGraphicsUnregisterResource(cap_xcomp->cuda_graphics_resources[i]);
|
||||
cap_xcomp->cuda_graphics_resources[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_cuda_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_stop(&cap_xcomp->xcomposite);
|
||||
gsr_capture_xcomposite_unload_cuda_graphics(cap_xcomp);
|
||||
gsr_cuda_unload(&cap_xcomp->cuda);
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_cuda_tick(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_tick(&cap_xcomp->xcomposite, video_codec_context);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_cuda_is_damaged(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_is_damaged(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_cuda_clear_damage(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_clear_damage(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_cuda_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_should_stop(&cap_xcomp->xcomposite, err);
|
||||
}
|
||||
|
||||
static int gsr_capture_xcomposite_cuda_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = cap->priv;
|
||||
|
||||
gsr_capture_xcomposite_capture(&cap_xcomp->xcomposite, frame);
|
||||
|
||||
cap_xcomp->xcomposite.params.egl->eglSwapBuffers(cap_xcomp->xcomposite.params.egl->egl_display, cap_xcomp->xcomposite.params.egl->egl_surface);
|
||||
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
CUDA_MEMCPY2D memcpy_struct;
|
||||
memcpy_struct.srcXInBytes = 0;
|
||||
memcpy_struct.srcY = 0;
|
||||
memcpy_struct.srcMemoryType = CU_MEMORYTYPE_ARRAY;
|
||||
|
||||
memcpy_struct.dstXInBytes = 0;
|
||||
memcpy_struct.dstY = 0;
|
||||
memcpy_struct.dstMemoryType = CU_MEMORYTYPE_DEVICE;
|
||||
|
||||
memcpy_struct.srcArray = cap_xcomp->mapped_arrays[i];
|
||||
memcpy_struct.srcPitch = frame->width / div[i];
|
||||
memcpy_struct.dstDevice = (CUdeviceptr)frame->data[i];
|
||||
memcpy_struct.dstPitch = frame->linesize[i];
|
||||
memcpy_struct.WidthInBytes = frame->width;
|
||||
memcpy_struct.Height = frame->height / div[i];
|
||||
// TODO: Remove this copy if possible
|
||||
cap_xcomp->cuda.cuMemcpy2DAsync_v2(&memcpy_struct, cap_xcomp->cuda_stream);
|
||||
}
|
||||
|
||||
// TODO: needed?
|
||||
cap_xcomp->cuda.cuStreamSynchronize(cap_xcomp->cuda_stream);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_cuda_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
if(cap->priv) {
|
||||
gsr_capture_xcomposite_cuda_stop(cap, video_codec_context);
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_xcomposite_cuda_create(const gsr_capture_xcomposite_cuda_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_cuda_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = calloc(1, sizeof(gsr_capture_xcomposite_cuda));
|
||||
if(!cap_xcomp) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture_xcomposite_init(&cap_xcomp->xcomposite, ¶ms->base);
|
||||
cap_xcomp->overclock = params->overclock;
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_xcomposite_cuda_start,
|
||||
.tick = gsr_capture_xcomposite_cuda_tick,
|
||||
.is_damaged = gsr_capture_xcomposite_cuda_is_damaged,
|
||||
.clear_damage = gsr_capture_xcomposite_cuda_clear_damage,
|
||||
.should_stop = gsr_capture_xcomposite_cuda_should_stop,
|
||||
.capture = gsr_capture_xcomposite_cuda_capture,
|
||||
.capture_end = NULL,
|
||||
.destroy = gsr_capture_xcomposite_cuda_destroy,
|
||||
.priv = cap_xcomp
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
#include "../../include/capture/xcomposite_software.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite xcomposite;
|
||||
} gsr_capture_xcomposite_software;
|
||||
|
||||
static void gsr_capture_xcomposite_software_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
static int gsr_capture_xcomposite_software_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
|
||||
const int res = gsr_capture_xcomposite_start(&cap_xcomp->xcomposite, video_codec_context, frame);
|
||||
if(res != 0) {
|
||||
gsr_capture_xcomposite_software_stop(cap, video_codec_context);
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!gsr_capture_base_setup_textures(&cap_xcomp->xcomposite.base, frame, cap_xcomp->xcomposite.params.color_range, GSR_SOURCE_COLOR_RGB, false, false)) {
|
||||
gsr_capture_xcomposite_software_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_software_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_stop(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_software_tick(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_tick(&cap_xcomp->xcomposite, video_codec_context);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_software_is_damaged(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_is_damaged(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_software_clear_damage(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_clear_damage(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_software_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_should_stop(&cap_xcomp->xcomposite, err);
|
||||
}
|
||||
|
||||
static int gsr_capture_xcomposite_software_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_software *cap_xcomp = cap->priv;
|
||||
|
||||
gsr_capture_xcomposite_capture(&cap_xcomp->xcomposite, frame);
|
||||
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
cap_xcomp->xcomposite.params.egl->glBindTexture(GL_TEXTURE_2D, cap_xcomp->xcomposite.base.target_textures[i]);
|
||||
cap_xcomp->xcomposite.params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]);
|
||||
}
|
||||
cap_xcomp->xcomposite.params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
cap_xcomp->xcomposite.params.egl->eglSwapBuffers(cap_xcomp->xcomposite.params.egl->egl_display, cap_xcomp->xcomposite.params.egl->egl_surface);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_software_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
if(cap->priv) {
|
||||
gsr_capture_xcomposite_software_stop(cap, video_codec_context);
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_xcomposite_software_create(const gsr_capture_xcomposite_software_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_software_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_xcomposite_software *cap_xcomp = calloc(1, sizeof(gsr_capture_xcomposite_software));
|
||||
if(!cap_xcomp) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture_xcomposite_init(&cap_xcomp->xcomposite, ¶ms->base);
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_xcomposite_software_start,
|
||||
.tick = gsr_capture_xcomposite_software_tick,
|
||||
.is_damaged = gsr_capture_xcomposite_software_is_damaged,
|
||||
.clear_damage = gsr_capture_xcomposite_software_clear_damage,
|
||||
.should_stop = gsr_capture_xcomposite_software_should_stop,
|
||||
.capture = gsr_capture_xcomposite_software_capture,
|
||||
.capture_end = NULL,
|
||||
.destroy = gsr_capture_xcomposite_software_destroy,
|
||||
.priv = cap_xcomp
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
#include "../../include/capture/xcomposite_vaapi.h"
|
||||
#include "../../include/capture/xcomposite.h"
|
||||
#include <unistd.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <va/va.h>
|
||||
#include <va/va_drmcommon.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite xcomposite;
|
||||
|
||||
VADisplay va_dpy;
|
||||
VADRMPRIMESurfaceDescriptor prime;
|
||||
} gsr_capture_xcomposite_vaapi;
|
||||
|
||||
static void gsr_capture_xcomposite_vaapi_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
static int gsr_capture_xcomposite_vaapi_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = cap->priv;
|
||||
|
||||
const int res = gsr_capture_xcomposite_start(&cap_xcomp->xcomposite, video_codec_context, frame);
|
||||
if(res != 0) {
|
||||
gsr_capture_xcomposite_vaapi_stop(cap, video_codec_context);
|
||||
return res;
|
||||
}
|
||||
|
||||
if(!vaapi_create_codec_context(cap_xcomp->xcomposite.params.egl->card_path, video_codec_context, video_codec_context->width, video_codec_context->height, false, &cap_xcomp->va_dpy)) {
|
||||
gsr_capture_xcomposite_vaapi_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!gsr_capture_base_setup_vaapi_textures(&cap_xcomp->xcomposite.base, frame, cap_xcomp->va_dpy, &cap_xcomp->prime, cap_xcomp->xcomposite.params.color_range)) {
|
||||
gsr_capture_xcomposite_vaapi_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_vaapi_tick(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_tick(&cap_xcomp->xcomposite, video_codec_context);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_vaapi_is_damaged(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_is_damaged(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_vaapi_clear_damage(gsr_capture *cap) {
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_clear_damage(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_vaapi_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = cap->priv;
|
||||
return gsr_capture_xcomposite_should_stop(&cap_xcomp->xcomposite, err);
|
||||
}
|
||||
|
||||
static int gsr_capture_xcomposite_vaapi_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = cap->priv;
|
||||
gsr_capture_xcomposite_capture(&cap_xcomp->xcomposite, frame);
|
||||
cap_xcomp->xcomposite.params.egl->eglSwapBuffers(cap_xcomp->xcomposite.params.egl->egl_display, cap_xcomp->xcomposite.params.egl->egl_surface);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_vaapi_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = cap->priv;
|
||||
|
||||
for(uint32_t i = 0; i < cap_xcomp->prime.num_objects; ++i) {
|
||||
if(cap_xcomp->prime.objects[i].fd > 0) {
|
||||
close(cap_xcomp->prime.objects[i].fd);
|
||||
cap_xcomp->prime.objects[i].fd = 0;
|
||||
}
|
||||
}
|
||||
|
||||
gsr_capture_xcomposite_stop(&cap_xcomp->xcomposite);
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_vaapi_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
if(cap->priv) {
|
||||
gsr_capture_xcomposite_vaapi_stop(cap, video_codec_context);
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
gsr_capture* gsr_capture_xcomposite_vaapi_create(const gsr_capture_xcomposite_vaapi_params *params) {
|
||||
if(!params) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_vaapi_create params is NULL\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture *cap = calloc(1, sizeof(gsr_capture));
|
||||
if(!cap)
|
||||
return NULL;
|
||||
|
||||
gsr_capture_xcomposite_vaapi *cap_xcomp = calloc(1, sizeof(gsr_capture_xcomposite_vaapi));
|
||||
if(!cap_xcomp) {
|
||||
free(cap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
gsr_capture_xcomposite_init(&cap_xcomp->xcomposite, ¶ms->base);
|
||||
|
||||
*cap = (gsr_capture) {
|
||||
.start = gsr_capture_xcomposite_vaapi_start,
|
||||
.tick = gsr_capture_xcomposite_vaapi_tick,
|
||||
.is_damaged = gsr_capture_xcomposite_vaapi_is_damaged,
|
||||
.clear_damage = gsr_capture_xcomposite_vaapi_clear_damage,
|
||||
.should_stop = gsr_capture_xcomposite_vaapi_should_stop,
|
||||
.capture = gsr_capture_xcomposite_vaapi_capture,
|
||||
.capture_end = NULL,
|
||||
.destroy = gsr_capture_xcomposite_vaapi_destroy,
|
||||
.priv = cap_xcomp
|
||||
};
|
||||
|
||||
return cap;
|
||||
}
|
||||
@@ -357,6 +357,7 @@ void gsr_color_conversion_draw(gsr_color_conversion *self, unsigned int texture_
|
||||
|
||||
vec2i source_texture_size = {0, 0};
|
||||
if(external_texture) {
|
||||
assert(self->params.load_external_image_shader);
|
||||
source_texture_size = source_size;
|
||||
} else {
|
||||
/* TODO: Do not call this every frame? */
|
||||
|
||||
@@ -19,7 +19,7 @@ bool gsr_cuda_load(gsr_cuda *self, Display *display, bool do_overclock) {
|
||||
}
|
||||
}
|
||||
|
||||
dlsym_assign required_dlsym[] = {
|
||||
const dlsym_assign required_dlsym[] = {
|
||||
{ (void**)&self->cuInit, "cuInit" },
|
||||
{ (void**)&self->cuDeviceGetCount, "cuDeviceGetCount" },
|
||||
{ (void**)&self->cuDeviceGet, "cuDeviceGet" },
|
||||
@@ -82,12 +82,13 @@ bool gsr_cuda_load(gsr_cuda *self, Display *display, bool do_overclock) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
if(self->do_overclock) {
|
||||
assert(display);
|
||||
if(self->do_overclock && display) {
|
||||
if(gsr_overclock_load(&self->overclock, display))
|
||||
gsr_overclock_start(&self->overclock);
|
||||
else
|
||||
fprintf(stderr, "gsr warning: gsr_cuda_load: failed to load xnvctrl, failed to overclock memory transfer rate\n");
|
||||
} else if(self->do_overclock && !display) {
|
||||
fprintf(stderr, "gsr warning: gsr_cuda_load: overclocking enabled but no X server is running. Overclocking has been disabled\n");
|
||||
}
|
||||
|
||||
self->library = lib;
|
||||
|
||||
236
src/encoder/video/cuda.c
Normal file
236
src/encoder/video/cuda.c
Normal file
@@ -0,0 +1,236 @@
|
||||
#include "../../../include/encoder/video/cuda.h"
|
||||
#include "../../../include/egl.h"
|
||||
#include "../../../include/cuda.h"
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/hwcontext_cuda.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_video_encoder_cuda_params params;
|
||||
|
||||
unsigned int target_textures[2];
|
||||
|
||||
gsr_cuda cuda;
|
||||
CUgraphicsResource cuda_graphics_resources[2];
|
||||
CUarray mapped_arrays[2];
|
||||
CUstream cuda_stream;
|
||||
} gsr_video_encoder_cuda;
|
||||
|
||||
static bool gsr_video_encoder_cuda_setup_context(gsr_video_encoder_cuda *self, AVCodecContext *video_codec_context) {
|
||||
AVBufferRef *device_ctx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_CUDA);
|
||||
if(!device_ctx) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_context failed: failed to create hardware device context\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
AVHWDeviceContext *hw_device_context = (AVHWDeviceContext*)device_ctx->data;
|
||||
AVCUDADeviceContext *cuda_device_context = (AVCUDADeviceContext*)hw_device_context->hwctx;
|
||||
cuda_device_context->cuda_ctx = self->cuda.cu_ctx;
|
||||
if(av_hwdevice_ctx_init(device_ctx) < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_context failed: failed to create hardware device context\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
return false;
|
||||
}
|
||||
|
||||
AVBufferRef *frame_context = av_hwframe_ctx_alloc(device_ctx);
|
||||
if(!frame_context) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_context failed: failed to create hwframe context\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
return false;
|
||||
}
|
||||
|
||||
AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)frame_context->data;
|
||||
hw_frame_context->width = video_codec_context->width;
|
||||
hw_frame_context->height = video_codec_context->height;
|
||||
hw_frame_context->sw_format = self->params.hdr ? AV_PIX_FMT_P010LE : AV_PIX_FMT_NV12;
|
||||
hw_frame_context->format = video_codec_context->pix_fmt;
|
||||
hw_frame_context->device_ref = device_ctx;
|
||||
hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
|
||||
|
||||
if (av_hwframe_ctx_init(frame_context) < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_context failed: failed to initialize hardware frame context "
|
||||
"(note: ffmpeg version needs to be > 4.0)\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
//av_buffer_unref(&frame_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
self->cuda_stream = cuda_device_context->stream;
|
||||
video_codec_context->hw_device_ctx = av_buffer_ref(device_ctx);
|
||||
video_codec_context->hw_frames_ctx = av_buffer_ref(frame_context);
|
||||
return true;
|
||||
}
|
||||
|
||||
static unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format) {
|
||||
unsigned int texture_id = 0;
|
||||
egl->glGenTextures(1, &texture_id);
|
||||
egl->glBindTexture(GL_TEXTURE_2D, texture_id);
|
||||
egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL);
|
||||
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
||||
|
||||
egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
return texture_id;
|
||||
}
|
||||
|
||||
static bool cuda_register_opengl_texture(gsr_cuda *cuda, CUgraphicsResource *cuda_graphics_resource, CUarray *mapped_array, unsigned int texture_id) {
|
||||
CUresult res;
|
||||
res = cuda->cuGraphicsGLRegisterImage(cuda_graphics_resource, texture_id, GL_TEXTURE_2D, CU_GRAPHICS_REGISTER_FLAGS_NONE);
|
||||
if (res != CUDA_SUCCESS) {
|
||||
const char *err_str = "unknown";
|
||||
cuda->cuGetErrorString(res, &err_str);
|
||||
fprintf(stderr, "gsr error: cuda_register_opengl_texture: cuGraphicsGLRegisterImage failed, error: %s, texture " "id: %u\n", err_str, texture_id);
|
||||
return false;
|
||||
}
|
||||
|
||||
res = cuda->cuGraphicsResourceSetMapFlags(*cuda_graphics_resource, CU_GRAPHICS_MAP_RESOURCE_FLAGS_NONE);
|
||||
res = cuda->cuGraphicsMapResources(1, cuda_graphics_resource, 0);
|
||||
|
||||
res = cuda->cuGraphicsSubResourceGetMappedArray(mapped_array, *cuda_graphics_resource, 0, 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool gsr_video_encoder_cuda_setup_textures(gsr_video_encoder_cuda *self, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
const int res = av_hwframe_get_buffer(video_codec_context->hw_frames_ctx, frame, 0);
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_textures: av_hwframe_get_buffer failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
const unsigned int internal_formats_nv12[2] = { GL_R8, GL_RG8 };
|
||||
const unsigned int internal_formats_p010[2] = { GL_R16, GL_RG16 };
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
self->target_textures[i] = gl_create_texture(self->params.egl, video_codec_context->width / div[i], video_codec_context->height / div[i], !self->params.hdr ? internal_formats_nv12[i] : internal_formats_p010[i], formats[i]);
|
||||
if(self->target_textures[i] == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_textures: failed to create opengl texture\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!cuda_register_opengl_texture(&self->cuda, &self->cuda_graphics_resources[i], &self->mapped_arrays[i], self->target_textures[i])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_cuda_stop(gsr_video_encoder_cuda *self, AVCodecContext *video_codec_context);
|
||||
|
||||
static bool gsr_video_encoder_cuda_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_video_encoder_cuda *encoder_cuda = encoder->priv;
|
||||
|
||||
// TODO: Force set overclock to false if wayland
|
||||
if(!gsr_cuda_load(&encoder_cuda->cuda, encoder_cuda->params.egl->x11.dpy, encoder_cuda->params.overclock)) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_cuda_start: failed to load cuda\n");
|
||||
gsr_video_encoder_cuda_stop(encoder_cuda, video_codec_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!gsr_video_encoder_cuda_setup_context(encoder_cuda, video_codec_context)) {
|
||||
gsr_video_encoder_cuda_stop(encoder_cuda, video_codec_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!gsr_video_encoder_cuda_setup_textures(encoder_cuda, video_codec_context, frame)) {
|
||||
gsr_video_encoder_cuda_stop(encoder_cuda, video_codec_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void gsr_video_encoder_cuda_stop(gsr_video_encoder_cuda *self, AVCodecContext *video_codec_context) {
|
||||
self->params.egl->glDeleteTextures(2, self->target_textures);
|
||||
self->target_textures[0] = 0;
|
||||
self->target_textures[1] = 0;
|
||||
|
||||
if(video_codec_context->hw_device_ctx)
|
||||
av_buffer_unref(&video_codec_context->hw_device_ctx);
|
||||
if(video_codec_context->hw_frames_ctx)
|
||||
av_buffer_unref(&video_codec_context->hw_frames_ctx);
|
||||
|
||||
if(self->cuda.cu_ctx) {
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
if(self->cuda_graphics_resources[i]) {
|
||||
self->cuda.cuGraphicsUnmapResources(1, &self->cuda_graphics_resources[i], 0);
|
||||
self->cuda.cuGraphicsUnregisterResource(self->cuda_graphics_resources[i]);
|
||||
self->cuda_graphics_resources[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gsr_cuda_unload(&self->cuda);
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_cuda_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) {
|
||||
gsr_video_encoder_cuda *encoder_cuda = encoder->priv;
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
CUDA_MEMCPY2D memcpy_struct;
|
||||
memcpy_struct.srcXInBytes = 0;
|
||||
memcpy_struct.srcY = 0;
|
||||
memcpy_struct.srcMemoryType = CU_MEMORYTYPE_ARRAY;
|
||||
|
||||
memcpy_struct.dstXInBytes = 0;
|
||||
memcpy_struct.dstY = 0;
|
||||
memcpy_struct.dstMemoryType = CU_MEMORYTYPE_DEVICE;
|
||||
|
||||
memcpy_struct.srcArray = encoder_cuda->mapped_arrays[i];
|
||||
memcpy_struct.srcPitch = frame->width / div[i];
|
||||
memcpy_struct.dstDevice = (CUdeviceptr)frame->data[i];
|
||||
memcpy_struct.dstPitch = frame->linesize[i];
|
||||
memcpy_struct.WidthInBytes = frame->width * (encoder_cuda->params.hdr ? 2 : 1);
|
||||
memcpy_struct.Height = frame->height / div[i];
|
||||
// TODO: Remove this copy if possible
|
||||
encoder_cuda->cuda.cuMemcpy2DAsync_v2(&memcpy_struct, encoder_cuda->cuda_stream);
|
||||
}
|
||||
|
||||
// TODO: needed?
|
||||
encoder_cuda->cuda.cuStreamSynchronize(encoder_cuda->cuda_stream);
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_cuda_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
|
||||
gsr_video_encoder_cuda *encoder_cuda = encoder->priv;
|
||||
textures[0] = encoder_cuda->target_textures[0];
|
||||
textures[1] = encoder_cuda->target_textures[1];
|
||||
*num_textures = 2;
|
||||
*destination_color = encoder_cuda->params.hdr ? GSR_DESTINATION_COLOR_P010 : GSR_DESTINATION_COLOR_NV12;
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_cuda_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
|
||||
gsr_video_encoder_cuda_stop(encoder->priv, video_codec_context);
|
||||
free(encoder->priv);
|
||||
free(encoder);
|
||||
}
|
||||
|
||||
gsr_video_encoder* gsr_video_encoder_cuda_create(const gsr_video_encoder_cuda_params *params) {
|
||||
gsr_video_encoder *encoder = calloc(1, sizeof(gsr_video_encoder));
|
||||
if(!encoder)
|
||||
return NULL;
|
||||
|
||||
gsr_video_encoder_cuda *encoder_cuda = calloc(1, sizeof(gsr_video_encoder_cuda));
|
||||
if(!encoder_cuda) {
|
||||
free(encoder);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
encoder_cuda->params = *params;
|
||||
|
||||
*encoder = (gsr_video_encoder) {
|
||||
.start = gsr_video_encoder_cuda_start,
|
||||
.copy_textures_to_frame = gsr_video_encoder_cuda_copy_textures_to_frame,
|
||||
.get_textures = gsr_video_encoder_cuda_get_textures,
|
||||
.destroy = gsr_video_encoder_cuda_destroy,
|
||||
.priv = encoder_cuda
|
||||
};
|
||||
|
||||
return encoder;
|
||||
}
|
||||
127
src/encoder/video/software.c
Normal file
127
src/encoder/video/software.c
Normal file
@@ -0,0 +1,127 @@
|
||||
#include "../../../include/encoder/video/software.h"
|
||||
#include "../../../include/egl.h"
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/frame.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_video_encoder_software_params params;
|
||||
|
||||
unsigned int target_textures[2];
|
||||
} gsr_video_encoder_software;
|
||||
|
||||
static unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format) {
|
||||
unsigned int texture_id = 0;
|
||||
egl->glGenTextures(1, &texture_id);
|
||||
egl->glBindTexture(GL_TEXTURE_2D, texture_id);
|
||||
egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL);
|
||||
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
||||
egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
||||
|
||||
egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
return texture_id;
|
||||
}
|
||||
|
||||
static bool gsr_video_encoder_software_setup_textures(gsr_video_encoder_software *self, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
int res = av_frame_get_buffer(frame, 1); // TODO: Align?
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_software_setup_textures: av_frame_get_buffer failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
res = av_frame_make_writable(frame);
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_software_setup_textures: av_frame_make_writable failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
const unsigned int internal_formats_nv12[2] = { GL_R8, GL_RG8 };
|
||||
const unsigned int internal_formats_p010[2] = { GL_R16, GL_RG16 };
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
self->target_textures[i] = gl_create_texture(self->params.egl, video_codec_context->width / div[i], video_codec_context->height / div[i], !self->params.hdr ? internal_formats_nv12[i] : internal_formats_p010[i], formats[i]);
|
||||
if(self->target_textures[i] == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create opengl texture\n");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_software_stop(gsr_video_encoder_software *self, AVCodecContext *video_codec_context);
|
||||
|
||||
static bool gsr_video_encoder_software_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_video_encoder_software *encoder_software = encoder->priv;
|
||||
|
||||
if(!gsr_video_encoder_software_setup_textures(encoder_software, video_codec_context, frame)) {
|
||||
gsr_video_encoder_software_stop(encoder_software, video_codec_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void gsr_video_encoder_software_stop(gsr_video_encoder_software *self, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
self->params.egl->glDeleteTextures(2, self->target_textures);
|
||||
self->target_textures[0] = 0;
|
||||
self->target_textures[1] = 0;
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_software_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) {
|
||||
gsr_video_encoder_software *encoder_software = encoder->priv;
|
||||
// TODO: hdr support
|
||||
const unsigned int formats[2] = { GL_RED, GL_RG };
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
encoder_software->params.egl->glBindTexture(GL_TEXTURE_2D, encoder_software->target_textures[i]);
|
||||
encoder_software->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]);
|
||||
}
|
||||
encoder_software->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
// cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_software_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
|
||||
gsr_video_encoder_software *encoder_software = encoder->priv;
|
||||
textures[0] = encoder_software->target_textures[0];
|
||||
textures[1] = encoder_software->target_textures[1];
|
||||
*num_textures = 2;
|
||||
*destination_color = encoder_software->params.hdr ? GSR_DESTINATION_COLOR_P010 : GSR_DESTINATION_COLOR_NV12;
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_software_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
|
||||
gsr_video_encoder_software_stop(encoder->priv, video_codec_context);
|
||||
free(encoder->priv);
|
||||
free(encoder);
|
||||
}
|
||||
|
||||
gsr_video_encoder* gsr_video_encoder_software_create(const gsr_video_encoder_software_params *params) {
|
||||
gsr_video_encoder *encoder = calloc(1, sizeof(gsr_video_encoder));
|
||||
if(!encoder)
|
||||
return NULL;
|
||||
|
||||
gsr_video_encoder_software *encoder_software = calloc(1, sizeof(gsr_video_encoder_software));
|
||||
if(!encoder_software) {
|
||||
free(encoder);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
encoder_software->params = *params;
|
||||
|
||||
*encoder = (gsr_video_encoder) {
|
||||
.start = gsr_video_encoder_software_start,
|
||||
.copy_textures_to_frame = gsr_video_encoder_software_copy_textures_to_frame,
|
||||
.get_textures = gsr_video_encoder_software_get_textures,
|
||||
.destroy = gsr_video_encoder_software_destroy,
|
||||
.priv = encoder_software
|
||||
};
|
||||
|
||||
return encoder;
|
||||
}
|
||||
221
src/encoder/video/vaapi.c
Normal file
221
src/encoder/video/vaapi.c
Normal file
@@ -0,0 +1,221 @@
|
||||
#include "../../../include/encoder/video/vaapi.h"
|
||||
#include "../../../include/utils.h"
|
||||
#include "../../../include/egl.h"
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/hwcontext_vaapi.h>
|
||||
|
||||
#include <va/va_drmcommon.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
|
||||
typedef struct {
|
||||
gsr_video_encoder_vaapi_params params;
|
||||
|
||||
unsigned int target_textures[2];
|
||||
|
||||
VADisplay va_dpy;
|
||||
VADRMPRIMESurfaceDescriptor prime;
|
||||
} gsr_video_encoder_vaapi;
|
||||
|
||||
static bool gsr_video_encoder_vaapi_setup_context(gsr_video_encoder_vaapi *self, AVCodecContext *video_codec_context) {
|
||||
char render_path[128];
|
||||
if(!gsr_card_path_get_render_path(self->params.egl->card_path, render_path)) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_context: failed to get /dev/dri/renderDXXX file from %s\n", self->params.egl->card_path);
|
||||
return false;
|
||||
}
|
||||
|
||||
AVBufferRef *device_ctx;
|
||||
if(av_hwdevice_ctx_create(&device_ctx, AV_HWDEVICE_TYPE_VAAPI, render_path, NULL, 0) < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_context: failed to create hardware device context\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
AVBufferRef *frame_context = av_hwframe_ctx_alloc(device_ctx);
|
||||
if(!frame_context) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_context: failed to create hwframe context\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
return false;
|
||||
}
|
||||
|
||||
AVHWFramesContext *hw_frame_context =
|
||||
(AVHWFramesContext *)frame_context->data;
|
||||
hw_frame_context->width = video_codec_context->width;
|
||||
hw_frame_context->height = video_codec_context->height;
|
||||
hw_frame_context->sw_format = self->params.hdr ? AV_PIX_FMT_P010LE : AV_PIX_FMT_NV12;
|
||||
hw_frame_context->format = video_codec_context->pix_fmt;
|
||||
hw_frame_context->device_ref = device_ctx;
|
||||
hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
|
||||
|
||||
//hw_frame_context->initial_pool_size = 20;
|
||||
|
||||
AVVAAPIDeviceContext *vactx =((AVHWDeviceContext*)device_ctx->data)->hwctx;
|
||||
self->va_dpy = vactx->display;
|
||||
|
||||
if (av_hwframe_ctx_init(frame_context) < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_context: failed to initialize hardware frame context "
|
||||
"(note: ffmpeg version needs to be > 4.0)\n");
|
||||
av_buffer_unref(&device_ctx);
|
||||
//av_buffer_unref(&frame_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
video_codec_context->hw_device_ctx = av_buffer_ref(device_ctx);
|
||||
video_codec_context->hw_frames_ctx = av_buffer_ref(frame_context);
|
||||
return true;
|
||||
}
|
||||
|
||||
static uint32_t fourcc(uint32_t a, uint32_t b, uint32_t c, uint32_t d) {
|
||||
return (d << 24) | (c << 16) | (b << 8) | a;
|
||||
}
|
||||
|
||||
static bool gsr_video_encoder_vaapi_setup_textures(gsr_video_encoder_vaapi *self, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
const int res = av_hwframe_get_buffer(video_codec_context->hw_frames_ctx, frame, 0);
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: av_hwframe_get_buffer failed: %d\n", res);
|
||||
return false;
|
||||
}
|
||||
|
||||
VASurfaceID target_surface_id = (uintptr_t)frame->data[3];
|
||||
|
||||
VAStatus va_status = vaExportSurfaceHandle(self->va_dpy, target_surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2, VA_EXPORT_SURFACE_WRITE_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS, &self->prime);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: vaExportSurfaceHandle failed, error: %d\n", va_status);
|
||||
return false;
|
||||
}
|
||||
vaSyncSurface(self->va_dpy, target_surface_id);
|
||||
|
||||
const uint32_t formats_nv12[2] = { fourcc('R', '8', ' ', ' '), fourcc('G', 'R', '8', '8') };
|
||||
const uint32_t formats_p010[2] = { fourcc('R', '1', '6', ' '), fourcc('G', 'R', '3', '2') };
|
||||
|
||||
if(self->prime.fourcc == VA_FOURCC_NV12 || self->prime.fourcc == VA_FOURCC_P010) {
|
||||
const uint32_t *formats = self->prime.fourcc == VA_FOURCC_NV12 ? formats_nv12 : formats_p010;
|
||||
const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
|
||||
|
||||
self->params.egl->glGenTextures(2, self->target_textures);
|
||||
for(int i = 0; i < 2; ++i) {
|
||||
const int layer = i;
|
||||
const int plane = 0;
|
||||
|
||||
const uint64_t modifier = self->prime.objects[self->prime.layers[layer].object_index[plane]].drm_format_modifier;
|
||||
const intptr_t img_attr[] = {
|
||||
EGL_LINUX_DRM_FOURCC_EXT, formats[i],
|
||||
EGL_WIDTH, self->prime.width / div[i],
|
||||
EGL_HEIGHT, self->prime.height / div[i],
|
||||
EGL_DMA_BUF_PLANE0_FD_EXT, self->prime.objects[self->prime.layers[layer].object_index[plane]].fd,
|
||||
EGL_DMA_BUF_PLANE0_OFFSET_EXT, self->prime.layers[layer].offset[plane],
|
||||
EGL_DMA_BUF_PLANE0_PITCH_EXT, self->prime.layers[layer].pitch[plane],
|
||||
EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT, modifier & 0xFFFFFFFFULL,
|
||||
EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, modifier >> 32ULL,
|
||||
EGL_NONE
|
||||
};
|
||||
|
||||
while(self->params.egl->eglGetError() != EGL_SUCCESS){}
|
||||
EGLImage image = self->params.egl->eglCreateImage(self->params.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr);
|
||||
if(!image) {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: failed to create egl image from drm fd for output drm fd, error: %d\n", self->params.egl->eglGetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, self->target_textures[i]);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
|
||||
while(self->params.egl->glGetError()) {}
|
||||
while(self->params.egl->eglGetError() != EGL_SUCCESS){}
|
||||
self->params.egl->glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
|
||||
if(self->params.egl->glGetError() != 0 || self->params.egl->eglGetError() != EGL_SUCCESS) {
|
||||
// TODO: Get the error properly
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: failed to bind egl image to gl texture, error: %d\n", self->params.egl->eglGetError());
|
||||
self->params.egl->eglDestroyImage(self->params.egl->egl_display, image);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
return false;
|
||||
}
|
||||
|
||||
self->params.egl->eglDestroyImage(self->params.egl->egl_display, image);
|
||||
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
return true;
|
||||
} else {
|
||||
fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: unexpected fourcc %u for output drm fd, expected nv12 or p010\n", self->prime.fourcc);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_vaapi_stop(gsr_video_encoder_vaapi *self, AVCodecContext *video_codec_context);
|
||||
|
||||
static bool gsr_video_encoder_vaapi_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
gsr_video_encoder_vaapi *encoder_vaapi = encoder->priv;
|
||||
|
||||
if(!gsr_video_encoder_vaapi_setup_context(encoder_vaapi, video_codec_context)) {
|
||||
gsr_video_encoder_vaapi_stop(encoder_vaapi, video_codec_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!gsr_video_encoder_vaapi_setup_textures(encoder_vaapi, video_codec_context, frame)) {
|
||||
gsr_video_encoder_vaapi_stop(encoder_vaapi, video_codec_context);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void gsr_video_encoder_vaapi_stop(gsr_video_encoder_vaapi *self, AVCodecContext *video_codec_context) {
|
||||
self->params.egl->glDeleteTextures(2, self->target_textures);
|
||||
self->target_textures[0] = 0;
|
||||
self->target_textures[1] = 0;
|
||||
|
||||
if(video_codec_context->hw_device_ctx)
|
||||
av_buffer_unref(&video_codec_context->hw_device_ctx);
|
||||
if(video_codec_context->hw_frames_ctx)
|
||||
av_buffer_unref(&video_codec_context->hw_frames_ctx);
|
||||
|
||||
for(uint32_t i = 0; i < self->prime.num_objects; ++i) {
|
||||
if(self->prime.objects[i].fd > 0) {
|
||||
close(self->prime.objects[i].fd);
|
||||
self->prime.objects[i].fd = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_vaapi_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
|
||||
gsr_video_encoder_vaapi *encoder_vaapi = encoder->priv;
|
||||
textures[0] = encoder_vaapi->target_textures[0];
|
||||
textures[1] = encoder_vaapi->target_textures[1];
|
||||
*num_textures = 2;
|
||||
*destination_color = encoder_vaapi->params.hdr ? GSR_DESTINATION_COLOR_P010 : GSR_DESTINATION_COLOR_NV12;
|
||||
}
|
||||
|
||||
static void gsr_video_encoder_vaapi_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
|
||||
gsr_video_encoder_vaapi_stop(encoder->priv, video_codec_context);
|
||||
free(encoder->priv);
|
||||
free(encoder);
|
||||
}
|
||||
|
||||
gsr_video_encoder* gsr_video_encoder_vaapi_create(const gsr_video_encoder_vaapi_params *params) {
|
||||
gsr_video_encoder *encoder = calloc(1, sizeof(gsr_video_encoder));
|
||||
if(!encoder)
|
||||
return NULL;
|
||||
|
||||
gsr_video_encoder_vaapi *encoder_vaapi = calloc(1, sizeof(gsr_video_encoder_vaapi));
|
||||
if(!encoder_vaapi) {
|
||||
free(encoder);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
encoder_vaapi->params = *params;
|
||||
|
||||
*encoder = (gsr_video_encoder) {
|
||||
.start = gsr_video_encoder_vaapi_start,
|
||||
.copy_textures_to_frame = NULL,
|
||||
.get_textures = gsr_video_encoder_vaapi_get_textures,
|
||||
.destroy = gsr_video_encoder_vaapi_destroy,
|
||||
.priv = encoder_vaapi
|
||||
};
|
||||
|
||||
return encoder;
|
||||
}
|
||||
26
src/encoder/video/video.c
Normal file
26
src/encoder/video/video.c
Normal file
@@ -0,0 +1,26 @@
|
||||
#include "../../../include/encoder/video/video.h"
|
||||
#include <assert.h>
|
||||
|
||||
bool gsr_video_encoder_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
|
||||
assert(!encoder->started);
|
||||
bool res = encoder->start(encoder, video_codec_context, frame);
|
||||
if(res)
|
||||
encoder->started = true;
|
||||
return res;
|
||||
}
|
||||
|
||||
void gsr_video_encoder_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) {
|
||||
assert(encoder->started);
|
||||
if(encoder->copy_textures_to_frame)
|
||||
encoder->copy_textures_to_frame(encoder, frame);
|
||||
}
|
||||
|
||||
void gsr_video_encoder_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
|
||||
assert(encoder->started);
|
||||
encoder->get_textures(encoder, textures, num_textures, destination_color);
|
||||
}
|
||||
|
||||
void gsr_video_encoder_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
|
||||
assert(encoder->started);
|
||||
encoder->destroy(encoder, video_codec_context);
|
||||
}
|
||||
177
src/main.cpp
177
src/main.cpp
@@ -1,11 +1,10 @@
|
||||
extern "C" {
|
||||
#include "../include/capture/nvfbc.h"
|
||||
#include "../include/capture/xcomposite_cuda.h"
|
||||
#include "../include/capture/xcomposite_vaapi.h"
|
||||
#include "../include/capture/xcomposite_software.h"
|
||||
#include "../include/capture/kms_vaapi.h"
|
||||
#include "../include/capture/kms_cuda.h"
|
||||
#include "../include/capture/kms_software.h"
|
||||
#include "../include/capture/xcomposite.h"
|
||||
#include "../include/capture/kms.h"
|
||||
#include "../include/encoder/video/cuda.h"
|
||||
#include "../include/encoder/video/vaapi.h"
|
||||
#include "../include/encoder/video/software.h"
|
||||
#include "../include/egl.h"
|
||||
#include "../include/utils.h"
|
||||
#include "../include/color_conversion.h"
|
||||
@@ -1564,7 +1563,7 @@ static void list_supported_video_codecs() {
|
||||
XCloseDisplay(dpy);
|
||||
}
|
||||
|
||||
static gsr_capture* create_capture_impl(const char *window_str, const char *screen_region, bool wayland, gsr_egl &egl, int fps, bool overclock, VideoCodec video_codec, gsr_color_range color_range, bool record_cursor, bool track_damage, bool use_software_video_encoder) {
|
||||
static gsr_capture* create_capture_impl(const char *window_str, const char *screen_region, bool wayland, gsr_egl *egl, int fps, bool overclock, VideoCodec video_codec, gsr_color_range color_range, bool record_cursor, bool track_damage, bool use_software_video_encoder) {
|
||||
vec2i region_size = { 0, 0 };
|
||||
Window src_window_id = None;
|
||||
bool follow_focused = false;
|
||||
@@ -1593,11 +1592,11 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
|
||||
|
||||
follow_focused = true;
|
||||
} else if(contains_non_hex_number(window_str)) {
|
||||
if(wayland || egl.gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA) {
|
||||
if(wayland || egl->gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA) {
|
||||
if(strcmp(window_str, "screen") == 0) {
|
||||
FirstOutputCallback first_output;
|
||||
first_output.output_name = NULL;
|
||||
for_each_active_monitor_output(&egl, GSR_CONNECTION_DRM, get_first_output, &first_output);
|
||||
for_each_active_monitor_output(egl, GSR_CONNECTION_DRM, get_first_output, &first_output);
|
||||
|
||||
if(first_output.output_name) {
|
||||
window_str = first_output.output_name;
|
||||
@@ -1608,48 +1607,48 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
|
||||
}
|
||||
|
||||
gsr_monitor gmon;
|
||||
if(!get_monitor_by_name(&egl, GSR_CONNECTION_DRM, window_str, &gmon)) {
|
||||
if(!get_monitor_by_name(egl, GSR_CONNECTION_DRM, window_str, &gmon)) {
|
||||
fprintf(stderr, "gsr error: display \"%s\" not found, expected one of:\n", window_str);
|
||||
fprintf(stderr, " \"screen\"\n");
|
||||
for_each_active_monitor_output(&egl, GSR_CONNECTION_DRM, monitor_output_callback_print, NULL);
|
||||
for_each_active_monitor_output(egl, GSR_CONNECTION_DRM, monitor_output_callback_print, NULL);
|
||||
_exit(1);
|
||||
}
|
||||
} else {
|
||||
if(strcmp(window_str, "screen") != 0 && strcmp(window_str, "screen-direct") != 0 && strcmp(window_str, "screen-direct-force") != 0) {
|
||||
gsr_monitor gmon;
|
||||
if(!get_monitor_by_name(&egl, GSR_CONNECTION_X11, window_str, &gmon)) {
|
||||
const int screens_width = XWidthOfScreen(DefaultScreenOfDisplay(egl.x11.dpy));
|
||||
const int screens_height = XWidthOfScreen(DefaultScreenOfDisplay(egl.x11.dpy));
|
||||
if(!get_monitor_by_name(egl, GSR_CONNECTION_X11, window_str, &gmon)) {
|
||||
const int screens_width = XWidthOfScreen(DefaultScreenOfDisplay(egl->x11.dpy));
|
||||
const int screens_height = XWidthOfScreen(DefaultScreenOfDisplay(egl->x11.dpy));
|
||||
fprintf(stderr, "gsr error: display \"%s\" not found, expected one of:\n", window_str);
|
||||
fprintf(stderr, " \"screen\" (%dx%d+%d+%d)\n", screens_width, screens_height, 0, 0);
|
||||
fprintf(stderr, " \"screen-direct\" (%dx%d+%d+%d)\n", screens_width, screens_height, 0, 0);
|
||||
fprintf(stderr, " \"screen-direct-force\" (%dx%d+%d+%d)\n", screens_width, screens_height, 0, 0);
|
||||
for_each_active_monitor_output(&egl, GSR_CONNECTION_X11, monitor_output_callback_print, NULL);
|
||||
for_each_active_monitor_output(egl, GSR_CONNECTION_X11, monitor_output_callback_print, NULL);
|
||||
_exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(use_software_video_encoder && (wayland || egl.gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA)) {
|
||||
gsr_capture_kms_software_params kms_params;
|
||||
kms_params.egl = &egl;
|
||||
if(use_software_video_encoder && (wayland || egl->gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA)) {
|
||||
gsr_capture_kms_params kms_params;
|
||||
kms_params.egl = egl;
|
||||
kms_params.display_to_capture = window_str;
|
||||
kms_params.hdr = video_codec_is_hdr(video_codec);
|
||||
kms_params.color_range = color_range;
|
||||
kms_params.record_cursor = record_cursor;
|
||||
capture = gsr_capture_kms_software_create(&kms_params);
|
||||
capture = gsr_capture_kms_create(&kms_params);
|
||||
if(!capture)
|
||||
_exit(1);
|
||||
} else {
|
||||
if(egl.gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA) {
|
||||
if(egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA) {
|
||||
if(wayland) {
|
||||
gsr_capture_kms_cuda_params kms_params;
|
||||
kms_params.egl = &egl;
|
||||
gsr_capture_kms_params kms_params;
|
||||
kms_params.egl = egl;
|
||||
kms_params.display_to_capture = window_str;
|
||||
kms_params.hdr = video_codec_is_hdr(video_codec);
|
||||
kms_params.color_range = color_range;
|
||||
kms_params.record_cursor = record_cursor;
|
||||
capture = gsr_capture_kms_cuda_create(&kms_params);
|
||||
capture = gsr_capture_kms_create(&kms_params);
|
||||
if(!capture)
|
||||
_exit(1);
|
||||
} else {
|
||||
@@ -1668,7 +1667,7 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
|
||||
}
|
||||
|
||||
gsr_capture_nvfbc_params nvfbc_params;
|
||||
nvfbc_params.egl = &egl;
|
||||
nvfbc_params.egl = egl;
|
||||
nvfbc_params.display_to_capture = capture_target;
|
||||
nvfbc_params.fps = fps;
|
||||
nvfbc_params.pos = { 0, 0 };
|
||||
@@ -1684,13 +1683,13 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
|
||||
_exit(1);
|
||||
}
|
||||
} else {
|
||||
gsr_capture_kms_vaapi_params kms_params;
|
||||
kms_params.egl = &egl;
|
||||
gsr_capture_kms_params kms_params;
|
||||
kms_params.egl = egl;
|
||||
kms_params.display_to_capture = window_str;
|
||||
kms_params.hdr = video_codec_is_hdr(video_codec);
|
||||
kms_params.color_range = color_range;
|
||||
kms_params.record_cursor = record_cursor;
|
||||
capture = gsr_capture_kms_vaapi_create(&kms_params);
|
||||
capture = gsr_capture_kms_create(&kms_params);
|
||||
if(!capture)
|
||||
_exit(1);
|
||||
}
|
||||
@@ -1710,57 +1709,55 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
|
||||
}
|
||||
|
||||
if(!capture) {
|
||||
if(use_software_video_encoder) {
|
||||
gsr_capture_xcomposite_software_params xcomposite_params;
|
||||
xcomposite_params.base.egl = &egl;
|
||||
xcomposite_params.base.window = src_window_id;
|
||||
xcomposite_params.base.follow_focused = follow_focused;
|
||||
xcomposite_params.base.region_size = region_size;
|
||||
xcomposite_params.base.color_range = color_range;
|
||||
xcomposite_params.base.record_cursor = record_cursor;
|
||||
xcomposite_params.base.track_damage = track_damage;
|
||||
capture = gsr_capture_xcomposite_software_create(&xcomposite_params);
|
||||
gsr_capture_xcomposite_params xcomposite_params;
|
||||
xcomposite_params.egl = egl;
|
||||
xcomposite_params.window = src_window_id;
|
||||
xcomposite_params.follow_focused = follow_focused;
|
||||
xcomposite_params.region_size = region_size;
|
||||
xcomposite_params.color_range = color_range;
|
||||
xcomposite_params.record_cursor = record_cursor;
|
||||
xcomposite_params.track_damage = track_damage;
|
||||
capture = gsr_capture_xcomposite_create(&xcomposite_params);
|
||||
if(!capture)
|
||||
_exit(1);
|
||||
} else {
|
||||
switch(egl.gpu_info.vendor) {
|
||||
case GSR_GPU_VENDOR_AMD:
|
||||
case GSR_GPU_VENDOR_INTEL: {
|
||||
gsr_capture_xcomposite_vaapi_params xcomposite_params;
|
||||
xcomposite_params.base.egl = &egl;
|
||||
xcomposite_params.base.window = src_window_id;
|
||||
xcomposite_params.base.follow_focused = follow_focused;
|
||||
xcomposite_params.base.region_size = region_size;
|
||||
xcomposite_params.base.color_range = color_range;
|
||||
xcomposite_params.base.record_cursor = record_cursor;
|
||||
xcomposite_params.base.track_damage = track_damage;
|
||||
capture = gsr_capture_xcomposite_vaapi_create(&xcomposite_params);
|
||||
if(!capture)
|
||||
_exit(1);
|
||||
break;
|
||||
}
|
||||
case GSR_GPU_VENDOR_NVIDIA: {
|
||||
gsr_capture_xcomposite_cuda_params xcomposite_params;
|
||||
xcomposite_params.base.egl = &egl;
|
||||
xcomposite_params.base.window = src_window_id;
|
||||
xcomposite_params.base.follow_focused = follow_focused;
|
||||
xcomposite_params.base.region_size = region_size;
|
||||
xcomposite_params.base.color_range = color_range;
|
||||
xcomposite_params.base.record_cursor = record_cursor;
|
||||
xcomposite_params.base.track_damage = track_damage;
|
||||
xcomposite_params.overclock = overclock;
|
||||
capture = gsr_capture_xcomposite_cuda_create(&xcomposite_params);
|
||||
if(!capture)
|
||||
_exit(1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return capture;
|
||||
}
|
||||
|
||||
static gsr_video_encoder* create_video_encoder(gsr_egl *egl, bool overclock, bool hdr, bool use_software_video_encoder) {
|
||||
gsr_video_encoder *video_encoder = nullptr;
|
||||
|
||||
if(use_software_video_encoder) {
|
||||
gsr_video_encoder_software_params params;
|
||||
params.egl = egl;
|
||||
params.hdr = hdr;
|
||||
video_encoder = gsr_video_encoder_software_create(¶ms);
|
||||
return video_encoder;
|
||||
}
|
||||
|
||||
switch(egl->gpu_info.vendor) {
|
||||
case GSR_GPU_VENDOR_AMD:
|
||||
case GSR_GPU_VENDOR_INTEL: {
|
||||
gsr_video_encoder_vaapi_params params;
|
||||
params.egl = egl;
|
||||
params.hdr = hdr;
|
||||
video_encoder = gsr_video_encoder_vaapi_create(¶ms);
|
||||
break;
|
||||
}
|
||||
case GSR_GPU_VENDOR_NVIDIA: {
|
||||
gsr_video_encoder_cuda_params params;
|
||||
params.egl = egl;
|
||||
params.overclock = overclock;
|
||||
params.hdr = hdr;
|
||||
video_encoder = gsr_video_encoder_cuda_create(¶ms);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return video_encoder;
|
||||
}
|
||||
|
||||
static AVPixelFormat get_pixel_format(gsr_gpu_vendor vendor, bool use_software_video_encoder) {
|
||||
if(use_software_video_encoder) {
|
||||
return AV_PIX_FMT_NV12;
|
||||
@@ -2144,10 +2141,12 @@ int main(int argc, char **argv) {
|
||||
|
||||
if(egl.gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA && overclock) {
|
||||
fprintf(stderr, "Info: overclock option has no effect on amd/intel, ignoring option\n");
|
||||
overclock = false;
|
||||
}
|
||||
|
||||
if(egl.gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA && overclock && wayland) {
|
||||
fprintf(stderr, "Info: overclocking is not possible on nvidia on wayland, ignoring option\n");
|
||||
overclock = false;
|
||||
}
|
||||
|
||||
egl.card_path[0] = '\0';
|
||||
@@ -2441,7 +2440,7 @@ int main(int argc, char **argv) {
|
||||
_exit(2);
|
||||
}
|
||||
|
||||
gsr_capture *capture = create_capture_impl(window_str, screen_region, wayland, egl, fps, overclock, video_codec, color_range, record_cursor, framerate_mode == FramerateMode::CONTENT, use_software_video_encoder);
|
||||
gsr_capture *capture = create_capture_impl(window_str, screen_region, wayland, &egl, fps, overclock, video_codec, color_range, record_cursor, framerate_mode == FramerateMode::CONTENT, use_software_video_encoder);
|
||||
|
||||
// (Some?) livestreaming services require at least one audio track to work.
|
||||
// If not audio is provided then create one silent audio track.
|
||||
@@ -2486,6 +2485,33 @@ int main(int argc, char **argv) {
|
||||
_exit(capture_result);
|
||||
}
|
||||
|
||||
gsr_video_encoder *video_encoder = create_video_encoder(&egl, overclock, hdr, use_software_video_encoder);
|
||||
if(!video_encoder) {
|
||||
fprintf(stderr, "Error: failed to create video encoder\n");
|
||||
_exit(1);
|
||||
}
|
||||
|
||||
if(!gsr_video_encoder_start(video_encoder, video_codec_context, video_frame)) {
|
||||
fprintf(stderr, "Error: failed to start video encoder\n");
|
||||
_exit(1);
|
||||
}
|
||||
|
||||
gsr_color_conversion_params color_conversion_params;
|
||||
memset(&color_conversion_params, 0, sizeof(color_conversion_params));
|
||||
color_conversion_params.color_range = color_range;
|
||||
color_conversion_params.egl = &egl;
|
||||
color_conversion_params.source_color = gsr_capture_get_source_color(capture);
|
||||
color_conversion_params.load_external_image_shader = gsr_capture_uses_external_image(capture);
|
||||
gsr_video_encoder_get_textures(video_encoder, color_conversion_params.destination_textures, &color_conversion_params.num_destination_textures, &color_conversion_params.destination_color);
|
||||
|
||||
gsr_color_conversion color_conversion;
|
||||
if(gsr_color_conversion_init(&color_conversion, &color_conversion_params) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_kms_setup_vaapi_textures: failed to create color conversion\n");
|
||||
_exit(1);
|
||||
}
|
||||
|
||||
gsr_color_conversion_clear(&color_conversion);
|
||||
|
||||
if(use_software_video_encoder) {
|
||||
open_video_software(video_codec_context, quality, pixel_format, hdr);
|
||||
} else {
|
||||
@@ -2852,7 +2878,8 @@ int main(int argc, char **argv) {
|
||||
const int num_frames = framerate_mode == FramerateMode::CONSTANT ? std::max((int64_t)0LL, expected_frames - video_pts_counter) : 1;
|
||||
|
||||
if(num_frames > 0 && !paused) {
|
||||
gsr_capture_capture(capture, video_frame);
|
||||
gsr_capture_capture(capture, video_frame, &color_conversion);
|
||||
gsr_video_encoder_copy_textures_to_frame(video_encoder, video_frame);
|
||||
|
||||
// TODO: Check if duplicate frame can be saved just by writing it with a different pts instead of sending it again
|
||||
for(int i = 0; i < num_frames; ++i) {
|
||||
@@ -2876,7 +2903,7 @@ int main(int argc, char **argv) {
|
||||
}
|
||||
}
|
||||
|
||||
gsr_capture_end(capture, video_frame);
|
||||
gsr_capture_capture_end(capture, video_frame);
|
||||
video_pts_counter += num_frames;
|
||||
}
|
||||
}
|
||||
@@ -2945,6 +2972,8 @@ int main(int argc, char **argv) {
|
||||
if(replay_buffer_size_secs == -1 && !(output_format->flags & AVFMT_NOFILE))
|
||||
avio_close(av_format_context->pb);
|
||||
|
||||
gsr_color_conversion_deinit(&color_conversion);
|
||||
gsr_video_encoder_destroy(video_encoder, video_codec_context);
|
||||
gsr_capture_destroy(capture, video_codec_context);
|
||||
|
||||
if(replay_buffer_size_secs == -1 && recording_saved_script)
|
||||
|
||||
@@ -15,7 +15,7 @@ bool gsr_xnvctrl_load(gsr_xnvctrl *self, Display *display) {
|
||||
return false;
|
||||
}
|
||||
|
||||
dlsym_assign required_dlsym[] = {
|
||||
const dlsym_assign required_dlsym[] = {
|
||||
{ (void**)&self->XNVCTRLQueryExtension, "XNVCTRLQueryExtension" },
|
||||
{ (void**)&self->XNVCTRLSetTargetAttributeAndGetStatus, "XNVCTRLSetTargetAttributeAndGetStatus" },
|
||||
{ (void**)&self->XNVCTRLQueryValidTargetAttributeValues, "XNVCTRLQueryValidTargetAttributeValues" },
|
||||
|
||||
Reference in New Issue
Block a user