Merge tag 'drm-intel-next-2012-12-21' of git://people.freedesktop.org/~danvet/drm-intel into drm-next
Daniel writes: - seqno wrap fixes and debug infrastructure from Mika Kuoppala and Chris Wilson - some leftover kill-agp on gen6+ patches from Ben - hotplug improvements from Damien - clear fb when allocated from stolen, avoids dirt on the fbcon (Chris) - Stolen mem support from Chris Wilson, one of the many steps to get to real fastboot support. - Some DDI code cleanups from Paulo. - Some refactorings around lvds and dp code. - some random little bits&pieces * tag 'drm-intel-next-2012-12-21' of git://people.freedesktop.org/~danvet/drm-intel: (93 commits) drm/i915: Return the real error code from intel_set_mode() drm/i915: Make GSM void drm/i915: Move GSM mapping into dev_priv drm/i915: Move even more gtt code to i915_gem_gtt drm/i915: Make next_seqno debugs entry to use i915_gem_set_seqno drm/i915: Introduce i915_gem_set_seqno() drm/i915: Always clear semaphore mboxes on seqno wrap drm/i915: Initialize hardware semaphore state on ring init drm/i915: Introduce ring set_seqno drm/i915: Missed conversion to gtt_pte_t drm/i915: Bug on unsupported swizzled platforms drm/i915: BUG() if fences are used on unsupported platform drm/i915: fixup overlay stolen memory leak drm/i915: clean up PIPECONF bpc #defines drm/i915: add intel_dp_set_signal_levels drm/i915: remove leftover display.update_wm assignment drm/i915: check for the PCH when setting pch_transcoder drm/i915: Clear the stolen fb before enabling drm/i915: Access to snooped system memory through the GTT is incoherent drm/i915: Remove stale comment about intel_dp_detect() ... Conflicts: drivers/gpu/drm/i915/intel_display.c
This commit is contained in:
@@ -102,20 +102,6 @@ int drm_mm_pre_get(struct drm_mm *mm)
|
||||
}
|
||||
EXPORT_SYMBOL(drm_mm_pre_get);
|
||||
|
||||
static inline unsigned long drm_mm_hole_node_start(struct drm_mm_node *hole_node)
|
||||
{
|
||||
return hole_node->start + hole_node->size;
|
||||
}
|
||||
|
||||
static inline unsigned long drm_mm_hole_node_end(struct drm_mm_node *hole_node)
|
||||
{
|
||||
struct drm_mm_node *next_node =
|
||||
list_entry(hole_node->node_list.next, struct drm_mm_node,
|
||||
node_list);
|
||||
|
||||
return next_node->start;
|
||||
}
|
||||
|
||||
static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
|
||||
struct drm_mm_node *node,
|
||||
unsigned long size, unsigned alignment,
|
||||
@@ -127,7 +113,7 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
|
||||
unsigned long adj_start = hole_start;
|
||||
unsigned long adj_end = hole_end;
|
||||
|
||||
BUG_ON(!hole_node->hole_follows || node->allocated);
|
||||
BUG_ON(node->allocated);
|
||||
|
||||
if (mm->color_adjust)
|
||||
mm->color_adjust(hole_node, color, &adj_start, &adj_end);
|
||||
@@ -155,12 +141,57 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
|
||||
BUG_ON(node->start + node->size > adj_end);
|
||||
|
||||
node->hole_follows = 0;
|
||||
if (node->start + node->size < hole_end) {
|
||||
if (__drm_mm_hole_node_start(node) < hole_end) {
|
||||
list_add(&node->hole_stack, &mm->hole_stack);
|
||||
node->hole_follows = 1;
|
||||
}
|
||||
}
|
||||
|
||||
struct drm_mm_node *drm_mm_create_block(struct drm_mm *mm,
|
||||
unsigned long start,
|
||||
unsigned long size,
|
||||
bool atomic)
|
||||
{
|
||||
struct drm_mm_node *hole, *node;
|
||||
unsigned long end = start + size;
|
||||
unsigned long hole_start;
|
||||
unsigned long hole_end;
|
||||
|
||||
drm_mm_for_each_hole(hole, mm, hole_start, hole_end) {
|
||||
if (hole_start > start || hole_end < end)
|
||||
continue;
|
||||
|
||||
node = drm_mm_kmalloc(mm, atomic);
|
||||
if (unlikely(node == NULL))
|
||||
return NULL;
|
||||
|
||||
node->start = start;
|
||||
node->size = size;
|
||||
node->mm = mm;
|
||||
node->allocated = 1;
|
||||
|
||||
INIT_LIST_HEAD(&node->hole_stack);
|
||||
list_add(&node->node_list, &hole->node_list);
|
||||
|
||||
if (start == hole_start) {
|
||||
hole->hole_follows = 0;
|
||||
list_del_init(&hole->hole_stack);
|
||||
}
|
||||
|
||||
node->hole_follows = 0;
|
||||
if (end != hole_end) {
|
||||
list_add(&node->hole_stack, &mm->hole_stack);
|
||||
node->hole_follows = 1;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
WARN(1, "no hole found for block 0x%lx + 0x%lx\n", start, size);
|
||||
return NULL;
|
||||
}
|
||||
EXPORT_SYMBOL(drm_mm_create_block);
|
||||
|
||||
struct drm_mm_node *drm_mm_get_block_generic(struct drm_mm_node *hole_node,
|
||||
unsigned long size,
|
||||
unsigned alignment,
|
||||
@@ -251,7 +282,7 @@ static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node,
|
||||
BUG_ON(node->start + node->size > end);
|
||||
|
||||
node->hole_follows = 0;
|
||||
if (node->start + node->size < hole_end) {
|
||||
if (__drm_mm_hole_node_start(node) < hole_end) {
|
||||
list_add(&node->hole_stack, &mm->hole_stack);
|
||||
node->hole_follows = 1;
|
||||
}
|
||||
@@ -325,12 +356,13 @@ void drm_mm_remove_node(struct drm_mm_node *node)
|
||||
list_entry(node->node_list.prev, struct drm_mm_node, node_list);
|
||||
|
||||
if (node->hole_follows) {
|
||||
BUG_ON(drm_mm_hole_node_start(node)
|
||||
== drm_mm_hole_node_end(node));
|
||||
BUG_ON(__drm_mm_hole_node_start(node) ==
|
||||
__drm_mm_hole_node_end(node));
|
||||
list_del(&node->hole_stack);
|
||||
} else
|
||||
BUG_ON(drm_mm_hole_node_start(node)
|
||||
!= drm_mm_hole_node_end(node));
|
||||
BUG_ON(__drm_mm_hole_node_start(node) !=
|
||||
__drm_mm_hole_node_end(node));
|
||||
|
||||
|
||||
if (!prev_node->hole_follows) {
|
||||
prev_node->hole_follows = 1;
|
||||
@@ -388,6 +420,8 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
|
||||
{
|
||||
struct drm_mm_node *entry;
|
||||
struct drm_mm_node *best;
|
||||
unsigned long adj_start;
|
||||
unsigned long adj_end;
|
||||
unsigned long best_size;
|
||||
|
||||
BUG_ON(mm->scanned_blocks);
|
||||
@@ -395,17 +429,13 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
|
||||
best = NULL;
|
||||
best_size = ~0UL;
|
||||
|
||||
list_for_each_entry(entry, &mm->hole_stack, hole_stack) {
|
||||
unsigned long adj_start = drm_mm_hole_node_start(entry);
|
||||
unsigned long adj_end = drm_mm_hole_node_end(entry);
|
||||
|
||||
drm_mm_for_each_hole(entry, mm, adj_start, adj_end) {
|
||||
if (mm->color_adjust) {
|
||||
mm->color_adjust(entry, color, &adj_start, &adj_end);
|
||||
if (adj_end <= adj_start)
|
||||
continue;
|
||||
}
|
||||
|
||||
BUG_ON(!entry->hole_follows);
|
||||
if (!check_free_hole(adj_start, adj_end, size, alignment))
|
||||
continue;
|
||||
|
||||
@@ -432,6 +462,8 @@ struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
|
||||
{
|
||||
struct drm_mm_node *entry;
|
||||
struct drm_mm_node *best;
|
||||
unsigned long adj_start;
|
||||
unsigned long adj_end;
|
||||
unsigned long best_size;
|
||||
|
||||
BUG_ON(mm->scanned_blocks);
|
||||
@@ -439,13 +471,11 @@ struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
|
||||
best = NULL;
|
||||
best_size = ~0UL;
|
||||
|
||||
list_for_each_entry(entry, &mm->hole_stack, hole_stack) {
|
||||
unsigned long adj_start = drm_mm_hole_node_start(entry) < start ?
|
||||
start : drm_mm_hole_node_start(entry);
|
||||
unsigned long adj_end = drm_mm_hole_node_end(entry) > end ?
|
||||
end : drm_mm_hole_node_end(entry);
|
||||
|
||||
BUG_ON(!entry->hole_follows);
|
||||
drm_mm_for_each_hole(entry, mm, adj_start, adj_end) {
|
||||
if (adj_start < start)
|
||||
adj_start = start;
|
||||
if (adj_end > end)
|
||||
adj_end = end;
|
||||
|
||||
if (mm->color_adjust) {
|
||||
mm->color_adjust(entry, color, &adj_start, &adj_end);
|
||||
|
Reference in New Issue
Block a user