Lines Matching defs:dirty
70 } dirty;
170 * vmw_fb_dirty_flush - flush dirty regions to the kms framebuffer
174 * This function flushes the dirty regions of the vmalloc framebuffer to the
178 * off during hibernation using the par->dirty.active bool.
195 if (!READ_ONCE(par->dirty.active))
209 spin_lock_irqsave(&par->dirty.lock, irq_flags);
210 if (!par->dirty.active) {
211 spin_unlock_irqrestore(&par->dirty.lock, irq_flags);
217 * Clip dirty area to framebuffer.
223 dst_x1 = par->dirty.x1 - par->fb_x;
224 dst_y1 = par->dirty.y1 - par->fb_y;
228 dst_x2 = par->dirty.x2 - par->fb_x;
229 dst_y2 = par->dirty.y2 - par->fb_y;
237 par->dirty.x1 = par->dirty.x2 = 0;
238 par->dirty.y1 = par->dirty.y2 = 0;
239 spin_unlock_irqrestore(&par->dirty.lock, irq_flags);
264 WARN_ON_ONCE(par->set_fb->funcs->dirty(cur_fb, NULL, 0, 0,
280 spin_lock_irqsave(&par->dirty.lock, flags);
281 if (par->dirty.x1 == par->dirty.x2) {
282 par->dirty.x1 = x1;
283 par->dirty.y1 = y1;
284 par->dirty.x2 = x2;
285 par->dirty.y2 = y2;
286 /* if we are active start the dirty work
288 if (par->dirty.active)
292 if (x1 < par->dirty.x1)
293 par->dirty.x1 = x1;
294 if (y1 < par->dirty.y1)
295 par->dirty.y1 = y1;
296 if (x2 > par->dirty.x2)
297 par->dirty.x2 = x2;
298 if (y2 > par->dirty.y2)
299 par->dirty.y2 = y2;
301 spin_unlock_irqrestore(&par->dirty.lock, flags);
348 spin_lock_irqsave(&par->dirty.lock, flags);
349 par->dirty.x1 = 0;
350 par->dirty.y1 = y1;
351 par->dirty.x2 = info->var.xres;
352 par->dirty.y2 = y2;
353 spin_unlock_irqrestore(&par->dirty.lock, flags);
616 /* If there already was stuff dirty we wont
748 * Dirty & Deferred IO
750 par->dirty.x1 = par->dirty.x2 = 0;
751 par->dirty.y1 = par->dirty.y2 = 0;
752 par->dirty.active = true;
753 spin_lock_init(&par->dirty.lock);
816 spin_lock_irqsave(&par->dirty.lock, flags);
817 par->dirty.active = false;
818 spin_unlock_irqrestore(&par->dirty.lock, flags);
838 spin_lock_irqsave(&par->dirty.lock, flags);
839 par->dirty.active = true;
840 spin_unlock_irqrestore(&par->dirty.lock, flags);
843 * Need to reschedule a dirty update, because otherwise that's
845 * dirty region was empty.