26 #include <drm/i915_drm.h>
32 static void i915_ppgtt_clear_range(
struct i915_hw_ppgtt *ppgtt,
52 for (i = first_pte; i < last_pte; i++)
53 pt_vaddr[i] = scratch_pte;
57 num_entries -= last_pte - first_pte;
67 unsigned first_pd_entry_in_global_pt;
92 if (dev_priv->
mm.gtt->needs_dmar) {
102 pt_addr = pci_map_page(dev->pdev, ppgtt->
pt_pages[i],
106 if (pci_dma_mapping_error(dev->pdev,
118 i915_ppgtt_clear_range(ppgtt, 0,
123 dev_priv->
mm.aliasing_ppgtt = ppgtt;
129 for (i--; i >= 0; i--)
168 static void i915_ppgtt_insert_sg_entries(
struct i915_hw_ppgtt *ppgtt,
170 unsigned first_entry,
176 unsigned i,
j,
m, segment_len;
186 while (i < pages->nents) {
192 pt_vaddr[
j] = pte | pte_flags;
195 if (++m == segment_len) {
196 if (++i == pages->
nents)
218 switch (cache_level) {
235 i915_ppgtt_insert_sg_entries(ppgtt,
244 i915_ppgtt_clear_range(ppgtt,
250 static unsigned int cache_level_to_agp_type(
struct drm_device *
dev,
253 switch (cache_level) {
271 bool ret = dev_priv->
mm.interruptible;
273 if (
unlikely(dev_priv->
mm.gtt->do_idle_maps)) {
274 dev_priv->
mm.interruptible =
false;
276 DRM_ERROR(
"Couldn't idle GPU\n");
285 static void undo_idling(
struct drm_i915_private *dev_priv,
bool interruptible)
288 dev_priv->
mm.interruptible = interruptible;
298 (dev_priv->
mm.gtt_end - dev_priv->
mm.gtt_start) /
PAGE_SIZE);
325 unsigned int agp_type = cache_level_to_agp_type(dev, cache_level);
347 interruptible = do_idling(dev_priv);
354 undo_idling(dev_priv, interruptible);
359 unsigned long *
start,
362 if (node->
color != color)
376 unsigned long mappable_end,
384 dev_priv->
mm.gtt_space.color_adjust = i915_gtt_color_adjust;
386 dev_priv->
mm.gtt_start =
start;
387 dev_priv->
mm.gtt_mappable_end = mappable_end;
388 dev_priv->
mm.gtt_end =
end;
389 dev_priv->
mm.gtt_total = end -
start;
390 dev_priv->
mm.mappable_gtt_total =
min(end, mappable_end) -
start;