2017-08-09 10:43:01 +02:00
|
|
|
#include <gbm.h>
|
2018-02-12 21:29:23 +01:00
|
|
|
#include <stdlib.h>
|
|
|
|
#include <wlr/util/log.h>
|
2017-08-09 10:43:01 +02:00
|
|
|
#include <xf86drm.h>
|
|
|
|
#include <xf86drmMode.h>
|
2017-09-30 08:03:34 +02:00
|
|
|
#include "backend/drm/drm.h"
|
2017-09-30 08:11:41 +02:00
|
|
|
#include "backend/drm/iface.h"
|
2017-09-30 08:03:34 +02:00
|
|
|
#include "backend/drm/util.h"
|
2017-08-09 10:43:01 +02:00
|
|
|
|
|
|
|
struct atomic {
|
|
|
|
drmModeAtomicReq *req;
|
|
|
|
int cursor;
|
|
|
|
bool failed;
|
|
|
|
};
|
|
|
|
|
|
|
|
static void atomic_begin(struct wlr_drm_crtc *crtc, struct atomic *atom) {
|
|
|
|
if (!crtc->atomic) {
|
|
|
|
crtc->atomic = drmModeAtomicAlloc();
|
|
|
|
if (!crtc->atomic) {
|
2018-07-09 23:49:54 +02:00
|
|
|
wlr_log_errno(WLR_ERROR, "Allocation failed");
|
2017-08-09 10:43:01 +02:00
|
|
|
atom->failed = true;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
atom->req = crtc->atomic;
|
|
|
|
atom->cursor = drmModeAtomicGetCursor(atom->req);
|
|
|
|
atom->failed = false;
|
|
|
|
}
|
|
|
|
|
2020-02-11 00:38:43 +01:00
|
|
|
static bool atomic_end(int drm_fd, uint32_t flags, struct atomic *atom) {
|
2017-08-09 10:43:01 +02:00
|
|
|
if (atom->failed) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-02-11 00:38:43 +01:00
|
|
|
flags |= DRM_MODE_ATOMIC_TEST_ONLY;
|
2017-08-09 10:43:01 +02:00
|
|
|
if (drmModeAtomicCommit(drm_fd, atom->req, flags, NULL)) {
|
2020-02-11 00:38:43 +01:00
|
|
|
wlr_log_errno(WLR_DEBUG, "Atomic test failed");
|
2017-08-09 10:43:01 +02:00
|
|
|
drmModeAtomicSetCursor(atom->req, atom->cursor);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2017-08-13 16:51:50 +02:00
|
|
|
static bool atomic_commit(int drm_fd, struct atomic *atom,
|
2018-01-15 21:49:37 +01:00
|
|
|
struct wlr_drm_connector *conn, uint32_t flags, bool modeset) {
|
2019-06-26 17:14:31 +02:00
|
|
|
struct wlr_drm_backend *drm =
|
|
|
|
get_drm_backend_from_backend(conn->output.backend);
|
2017-08-09 10:43:01 +02:00
|
|
|
if (atom->failed) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-06-26 17:14:31 +02:00
|
|
|
int ret = drmModeAtomicCommit(drm_fd, atom->req, flags, drm);
|
2017-08-09 10:43:01 +02:00
|
|
|
if (ret) {
|
2018-07-09 23:49:54 +02:00
|
|
|
wlr_log_errno(WLR_ERROR, "%s: Atomic commit failed (%s)",
|
2017-09-30 12:31:08 +02:00
|
|
|
conn->output.name, modeset ? "modeset" : "pageflip");
|
2017-08-09 10:43:01 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
drmModeAtomicSetCursor(atom->req, 0);
|
|
|
|
|
|
|
|
return !ret;
|
|
|
|
}
|
|
|
|
|
2020-02-11 00:38:43 +01:00
|
|
|
static void atomic_add(struct atomic *atom, uint32_t id, uint32_t prop, uint64_t val) {
|
2017-08-09 10:43:01 +02:00
|
|
|
if (!atom->failed && drmModeAtomicAddProperty(atom->req, id, prop, val) < 0) {
|
2018-07-09 23:49:54 +02:00
|
|
|
wlr_log_errno(WLR_ERROR, "Failed to add atomic DRM property");
|
2017-08-09 10:43:01 +02:00
|
|
|
atom->failed = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-07 17:50:40 +02:00
|
|
|
static bool create_gamma_lut_blob(struct wlr_drm_backend *drm,
|
|
|
|
struct wlr_drm_crtc *crtc, uint32_t *blob_id) {
|
|
|
|
if (crtc->gamma_table_size == 0) {
|
|
|
|
*blob_id = 0;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t size = crtc->gamma_table_size;
|
|
|
|
uint16_t *r = crtc->gamma_table;
|
|
|
|
uint16_t *g = crtc->gamma_table + size;
|
|
|
|
uint16_t *b = crtc->gamma_table + 2 * size;
|
|
|
|
|
|
|
|
struct drm_color_lut *gamma = malloc(size * sizeof(struct drm_color_lut));
|
|
|
|
if (gamma == NULL) {
|
|
|
|
wlr_log(WLR_ERROR, "Failed to allocate gamma table");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (size_t i = 0; i < size; i++) {
|
|
|
|
gamma[i].red = r[i];
|
|
|
|
gamma[i].green = g[i];
|
|
|
|
gamma[i].blue = b[i];
|
|
|
|
}
|
|
|
|
|
|
|
|
if (drmModeCreatePropertyBlob(drm->fd, gamma,
|
|
|
|
size * sizeof(struct drm_color_lut), blob_id) != 0) {
|
|
|
|
wlr_log_errno(WLR_ERROR, "Unable to create property blob");
|
|
|
|
free(gamma);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
free(gamma);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-02-12 09:25:40 +01:00
|
|
|
static void plane_disable(struct atomic *atom, struct wlr_drm_plane *plane) {
|
2017-08-09 10:43:01 +02:00
|
|
|
uint32_t id = plane->id;
|
|
|
|
const union wlr_drm_plane_props *props = &plane->props;
|
2020-02-12 09:25:40 +01:00
|
|
|
atomic_add(atom, id, props->fb_id, 0);
|
|
|
|
atomic_add(atom, id, props->crtc_id, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void set_plane_props(struct atomic *atom, struct wlr_drm_backend *drm,
|
|
|
|
struct wlr_drm_plane *plane, uint32_t crtc_id, int32_t x, int32_t y) {
|
|
|
|
uint32_t id = plane->id;
|
|
|
|
const union wlr_drm_plane_props *props = &plane->props;
|
|
|
|
struct wlr_drm_fb *fb = plane_get_next_fb(plane);
|
|
|
|
struct gbm_bo *bo = drm_fb_acquire(fb, drm, &plane->mgpu_surf);
|
|
|
|
if (!bo) {
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t fb_id = get_fb_for_bo(bo, drm->addfb2_modifiers);
|
|
|
|
if (!fb_id) {
|
|
|
|
goto error;
|
|
|
|
}
|
2017-08-09 10:43:01 +02:00
|
|
|
|
|
|
|
// The src_* properties are in 16.16 fixed point
|
|
|
|
atomic_add(atom, id, props->src_x, 0);
|
|
|
|
atomic_add(atom, id, props->src_y, 0);
|
2018-06-30 02:59:44 +02:00
|
|
|
atomic_add(atom, id, props->src_w, (uint64_t)plane->surf.width << 16);
|
|
|
|
atomic_add(atom, id, props->src_h, (uint64_t)plane->surf.height << 16);
|
2017-09-30 09:52:58 +02:00
|
|
|
atomic_add(atom, id, props->crtc_w, plane->surf.width);
|
|
|
|
atomic_add(atom, id, props->crtc_h, plane->surf.height);
|
2017-08-09 10:43:01 +02:00
|
|
|
atomic_add(atom, id, props->fb_id, fb_id);
|
|
|
|
atomic_add(atom, id, props->crtc_id, crtc_id);
|
2020-02-12 09:25:40 +01:00
|
|
|
atomic_add(atom, id, props->crtc_x, (uint64_t)x);
|
|
|
|
atomic_add(atom, id, props->crtc_y, (uint64_t)y);
|
|
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
error:
|
|
|
|
atom->failed = true;
|
2017-08-09 10:43:01 +02:00
|
|
|
}
|
|
|
|
|
2020-05-07 19:20:56 +02:00
|
|
|
static bool atomic_crtc_commit(struct wlr_drm_backend *drm,
|
|
|
|
struct wlr_drm_connector *conn, uint32_t flags) {
|
2020-02-12 09:25:40 +01:00
|
|
|
struct wlr_drm_crtc *crtc = conn->crtc;
|
|
|
|
|
2020-05-07 17:11:32 +02:00
|
|
|
bool modeset = crtc->pending & WLR_DRM_CRTC_MODE;
|
|
|
|
if (modeset) {
|
2018-01-15 21:49:37 +01:00
|
|
|
if (crtc->mode_id != 0) {
|
2017-09-30 11:22:26 +02:00
|
|
|
drmModeDestroyPropertyBlob(drm->fd, crtc->mode_id);
|
2017-08-09 10:43:01 +02:00
|
|
|
}
|
|
|
|
|
2020-05-07 17:11:32 +02:00
|
|
|
if (drmModeCreatePropertyBlob(drm->fd, &crtc->mode,
|
|
|
|
sizeof(drmModeModeInfo), &crtc->mode_id)) {
|
|
|
|
wlr_log_errno(WLR_ERROR, "Unable to create mode property blob");
|
2017-08-09 10:43:01 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-07 17:50:40 +02:00
|
|
|
if (crtc->pending & WLR_DRM_CRTC_GAMMA_LUT) {
|
|
|
|
// Fallback to legacy gamma interface when gamma properties are not available
|
|
|
|
// (can happen on older Intel GPUs that support gamma but not degamma).
|
|
|
|
if (crtc->props.gamma_lut == 0) {
|
|
|
|
if (!drm_legacy_crtc_set_gamma(drm, crtc)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (crtc->gamma_lut != 0) {
|
|
|
|
drmModeDestroyPropertyBlob(drm->fd, crtc->gamma_lut);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!create_gamma_lut_blob(drm, crtc, &crtc->gamma_lut)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-07 17:11:32 +02:00
|
|
|
if (modeset) {
|
2018-01-15 21:49:37 +01:00
|
|
|
flags |= DRM_MODE_ATOMIC_ALLOW_MODESET;
|
|
|
|
} else {
|
|
|
|
flags |= DRM_MODE_ATOMIC_NONBLOCK;
|
|
|
|
}
|
2017-08-09 10:43:01 +02:00
|
|
|
|
2018-01-15 21:49:37 +01:00
|
|
|
struct atomic atom;
|
2017-08-09 10:43:01 +02:00
|
|
|
atomic_begin(crtc, &atom);
|
2017-09-30 12:31:08 +02:00
|
|
|
atomic_add(&atom, conn->id, conn->props.crtc_id, crtc->id);
|
2020-05-07 17:11:32 +02:00
|
|
|
if (modeset && conn->props.link_status != 0) {
|
2018-10-04 14:11:37 +02:00
|
|
|
atomic_add(&atom, conn->id, conn->props.link_status,
|
|
|
|
DRM_MODE_LINK_STATUS_GOOD);
|
|
|
|
}
|
2017-08-09 10:43:01 +02:00
|
|
|
atomic_add(&atom, crtc->id, crtc->props.mode_id, crtc->mode_id);
|
|
|
|
atomic_add(&atom, crtc->id, crtc->props.active, 1);
|
2020-05-07 17:50:40 +02:00
|
|
|
atomic_add(&atom, crtc->id, crtc->props.gamma_lut, crtc->gamma_lut);
|
2020-02-12 09:25:40 +01:00
|
|
|
set_plane_props(&atom, drm, crtc->primary, crtc->id, 0, 0);
|
|
|
|
if (crtc->cursor) {
|
|
|
|
if (crtc->cursor->cursor_enabled) {
|
|
|
|
set_plane_props(&atom, drm, crtc->cursor, crtc->id,
|
|
|
|
conn->cursor_x, conn->cursor_y);
|
|
|
|
} else {
|
|
|
|
plane_disable(&atom, crtc->cursor);
|
|
|
|
}
|
|
|
|
}
|
2020-02-11 00:38:43 +01:00
|
|
|
|
2020-05-07 17:11:32 +02:00
|
|
|
if (!atomic_end(drm->fd, modeset ? DRM_MODE_ATOMIC_ALLOW_MODESET : 0,
|
|
|
|
&atom)) {
|
2020-02-11 00:38:43 +01:00
|
|
|
drmModeAtomicSetCursor(atom.req, 0);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-05-07 17:11:32 +02:00
|
|
|
if (!atomic_commit(drm->fd, &atom, conn, flags, modeset)) {
|
2020-02-12 09:25:40 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (crtc->cursor) {
|
|
|
|
drm_fb_move(&crtc->cursor->queued_fb, &crtc->cursor->pending_fb);
|
|
|
|
}
|
|
|
|
return true;
|
2017-08-09 10:43:01 +02:00
|
|
|
}
|
|
|
|
|
2018-01-07 00:28:21 +01:00
|
|
|
static bool atomic_conn_enable(struct wlr_drm_backend *drm,
|
2017-09-30 12:31:08 +02:00
|
|
|
struct wlr_drm_connector *conn, bool enable) {
|
|
|
|
struct wlr_drm_crtc *crtc = conn->crtc;
|
2018-09-10 23:35:22 +02:00
|
|
|
if (crtc == NULL) {
|
|
|
|
return !enable;
|
|
|
|
}
|
2017-08-09 10:43:01 +02:00
|
|
|
|
2018-01-07 00:28:21 +01:00
|
|
|
struct atomic atom;
|
2017-08-09 10:43:01 +02:00
|
|
|
atomic_begin(crtc, &atom);
|
|
|
|
atomic_add(&atom, crtc->id, crtc->props.active, enable);
|
2018-01-07 00:28:21 +01:00
|
|
|
if (enable) {
|
|
|
|
atomic_add(&atom, conn->id, conn->props.crtc_id, crtc->id);
|
|
|
|
atomic_add(&atom, crtc->id, crtc->props.mode_id, crtc->mode_id);
|
|
|
|
} else {
|
|
|
|
atomic_add(&atom, conn->id, conn->props.crtc_id, 0);
|
|
|
|
atomic_add(&atom, crtc->id, crtc->props.mode_id, 0);
|
|
|
|
}
|
|
|
|
return atomic_commit(drm->fd, &atom, conn, DRM_MODE_ATOMIC_ALLOW_MODESET,
|
|
|
|
true);
|
2017-08-09 10:43:01 +02:00
|
|
|
}
|
|
|
|
|
2017-09-30 11:22:26 +02:00
|
|
|
static bool atomic_crtc_set_cursor(struct wlr_drm_backend *drm,
|
2017-08-13 01:52:22 +02:00
|
|
|
struct wlr_drm_crtc *crtc, struct gbm_bo *bo) {
|
2020-02-12 09:25:40 +01:00
|
|
|
/* Cursor updates happen when we pageflip */
|
|
|
|
return true;
|
2017-08-09 10:43:01 +02:00
|
|
|
}
|
|
|
|
|
2018-10-03 10:36:33 +02:00
|
|
|
static size_t atomic_crtc_get_gamma_size(struct wlr_drm_backend *drm,
|
2018-02-04 21:03:44 +01:00
|
|
|
struct wlr_drm_crtc *crtc) {
|
2018-02-10 10:24:49 +01:00
|
|
|
if (crtc->props.gamma_lut_size == 0) {
|
|
|
|
return legacy_iface.crtc_get_gamma_size(drm, crtc);
|
|
|
|
}
|
|
|
|
|
2018-10-03 10:36:33 +02:00
|
|
|
uint64_t gamma_lut_size;
|
2018-04-26 00:24:58 +02:00
|
|
|
if (!get_drm_prop(drm->fd, crtc->id, crtc->props.gamma_lut_size,
|
2018-10-03 10:36:33 +02:00
|
|
|
&gamma_lut_size)) {
|
2018-07-09 23:49:54 +02:00
|
|
|
wlr_log(WLR_ERROR, "Unable to get gamma lut size");
|
2018-02-04 21:03:44 +01:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-10-03 10:36:33 +02:00
|
|
|
return (size_t)gamma_lut_size;
|
2018-02-04 21:03:44 +01:00
|
|
|
}
|
|
|
|
|
2017-10-02 10:44:33 +02:00
|
|
|
const struct wlr_drm_interface atomic_iface = {
|
2017-08-09 10:43:01 +02:00
|
|
|
.conn_enable = atomic_conn_enable,
|
2020-05-07 19:20:56 +02:00
|
|
|
.crtc_commit = atomic_crtc_commit,
|
2017-08-09 10:43:01 +02:00
|
|
|
.crtc_set_cursor = atomic_crtc_set_cursor,
|
2018-02-04 21:03:44 +01:00
|
|
|
.crtc_get_gamma_size = atomic_crtc_get_gamma_size,
|
2017-08-09 10:43:01 +02:00
|
|
|
};
|