mga_state.c 28 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099
  1. /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by [email protected]
  3. *
  4. * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
  5. * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  6. * All Rights Reserved.
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a
  9. * copy of this software and associated documentation files (the "Software"),
  10. * to deal in the Software without restriction, including without limitation
  11. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  12. * and/or sell copies of the Software, and to permit persons to whom the
  13. * Software is furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice (including the next
  16. * paragraph) shall be included in all copies or substantial portions of the
  17. * Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  22. * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  25. * OTHER DEALINGS IN THE SOFTWARE.
  26. *
  27. * Authors:
  28. * Jeff Hartmann <[email protected]>
  29. * Keith Whitwell <[email protected]>
  30. *
  31. * Rewritten by:
  32. * Gareth Hughes <[email protected]>
  33. */
  34. #include "mga_drv.h"
  35. /* ================================================================
  36. * DMA hardware state programming functions
  37. */
  38. static void mga_emit_clip_rect(drm_mga_private_t *dev_priv,
  39. struct drm_clip_rect *box)
  40. {
  41. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  42. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  43. unsigned int pitch = dev_priv->front_pitch;
  44. DMA_LOCALS;
  45. BEGIN_DMA(2);
  46. /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
  47. */
  48. if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
  49. DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
  50. MGA_LEN + MGA_EXEC, 0x80000000,
  51. MGA_DWGCTL, ctx->dwgctl,
  52. MGA_LEN + MGA_EXEC, 0x80000000);
  53. }
  54. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  55. MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
  56. MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
  57. ADVANCE_DMA();
  58. }
  59. static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv)
  60. {
  61. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  62. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  63. DMA_LOCALS;
  64. BEGIN_DMA(3);
  65. DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
  66. MGA_MACCESS, ctx->maccess,
  67. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  68. DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
  69. MGA_FOGCOL, ctx->fogcolor,
  70. MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
  71. DMA_BLOCK(MGA_FCOL, ctx->fcol,
  72. MGA_DMAPAD, 0x00000000,
  73. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  74. ADVANCE_DMA();
  75. }
  76. static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv)
  77. {
  78. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  79. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  80. DMA_LOCALS;
  81. BEGIN_DMA(4);
  82. DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
  83. MGA_MACCESS, ctx->maccess,
  84. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  85. DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
  86. MGA_FOGCOL, ctx->fogcolor,
  87. MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
  88. DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
  89. MGA_TDUALSTAGE0, ctx->tdualstage0,
  90. MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
  91. DMA_BLOCK(MGA_STENCIL, ctx->stencil,
  92. MGA_STENCILCTL, ctx->stencilctl,
  93. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  94. ADVANCE_DMA();
  95. }
  96. static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv)
  97. {
  98. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  99. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
  100. DMA_LOCALS;
  101. BEGIN_DMA(4);
  102. DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
  103. MGA_TEXCTL, tex->texctl,
  104. MGA_TEXFILTER, tex->texfilter,
  105. MGA_TEXBORDERCOL, tex->texbordercol);
  106. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  107. MGA_TEXORG1, tex->texorg1,
  108. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  109. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  110. MGA_TEXWIDTH, tex->texwidth,
  111. MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
  112. DMA_BLOCK(MGA_WR34, tex->texheight,
  113. MGA_TEXTRANS, 0x0000ffff,
  114. MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
  115. ADVANCE_DMA();
  116. }
  117. static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv)
  118. {
  119. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  120. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
  121. DMA_LOCALS;
  122. /* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
  123. /* tex->texctl, tex->texctl2); */
  124. BEGIN_DMA(6);
  125. DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
  126. MGA_TEXCTL, tex->texctl,
  127. MGA_TEXFILTER, tex->texfilter,
  128. MGA_TEXBORDERCOL, tex->texbordercol);
  129. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  130. MGA_TEXORG1, tex->texorg1,
  131. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  132. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  133. MGA_TEXWIDTH, tex->texwidth,
  134. MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
  135. DMA_BLOCK(MGA_WR57, 0x00000000,
  136. MGA_WR53, 0x00000000,
  137. MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
  138. DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
  139. MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
  140. MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
  141. MGA_DMAPAD, 0x00000000);
  142. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  143. MGA_DMAPAD, 0x00000000,
  144. MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
  145. ADVANCE_DMA();
  146. }
  147. static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv)
  148. {
  149. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  150. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
  151. DMA_LOCALS;
  152. /* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
  153. /* tex->texctl, tex->texctl2); */
  154. BEGIN_DMA(5);
  155. DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
  156. MGA_MAP1_ENABLE |
  157. MGA_G400_TC2_MAGIC),
  158. MGA_TEXCTL, tex->texctl,
  159. MGA_TEXFILTER, tex->texfilter,
  160. MGA_TEXBORDERCOL, tex->texbordercol);
  161. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  162. MGA_TEXORG1, tex->texorg1,
  163. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  164. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  165. MGA_TEXWIDTH, tex->texwidth,
  166. MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
  167. DMA_BLOCK(MGA_WR57, 0x00000000,
  168. MGA_WR53, 0x00000000,
  169. MGA_WR61, 0x00000000,
  170. MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
  171. DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
  172. MGA_TEXTRANS, 0x0000ffff,
  173. MGA_TEXTRANSHIGH, 0x0000ffff,
  174. MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
  175. ADVANCE_DMA();
  176. }
  177. static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv)
  178. {
  179. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  180. unsigned int pipe = sarea_priv->warp_pipe;
  181. DMA_LOCALS;
  182. BEGIN_DMA(3);
  183. DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
  184. MGA_WVRTXSZ, 0x00000007,
  185. MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
  186. DMA_BLOCK(MGA_WR25, 0x00000100,
  187. MGA_WR34, 0x00000000,
  188. MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
  189. /* Padding required due to hardware bug.
  190. */
  191. DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
  192. MGA_DMAPAD, 0xffffffff,
  193. MGA_DMAPAD, 0xffffffff,
  194. MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
  195. MGA_WMODE_START | dev_priv->wagp_enable));
  196. ADVANCE_DMA();
  197. }
  198. static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv)
  199. {
  200. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  201. unsigned int pipe = sarea_priv->warp_pipe;
  202. DMA_LOCALS;
  203. /* printk("mga_g400_emit_pipe %x\n", pipe); */
  204. BEGIN_DMA(10);
  205. DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
  206. MGA_DMAPAD, 0x00000000,
  207. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  208. if (pipe & MGA_T2) {
  209. DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
  210. MGA_DMAPAD, 0x00000000,
  211. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  212. DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
  213. MGA_WACCEPTSEQ, 0x00000000,
  214. MGA_WACCEPTSEQ, 0x00000000,
  215. MGA_WACCEPTSEQ, 0x1e000000);
  216. } else {
  217. if (dev_priv->warp_pipe & MGA_T2) {
  218. /* Flush the WARP pipe */
  219. DMA_BLOCK(MGA_YDST, 0x00000000,
  220. MGA_FXLEFT, 0x00000000,
  221. MGA_FXRIGHT, 0x00000001,
  222. MGA_DWGCTL, MGA_DWGCTL_FLUSH);
  223. DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
  224. MGA_DWGSYNC, 0x00007000,
  225. MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
  226. MGA_LEN + MGA_EXEC, 0x00000000);
  227. DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
  228. MGA_G400_TC2_MAGIC),
  229. MGA_LEN + MGA_EXEC, 0x00000000,
  230. MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
  231. MGA_DMAPAD, 0x00000000);
  232. }
  233. DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
  234. MGA_DMAPAD, 0x00000000,
  235. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  236. DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
  237. MGA_WACCEPTSEQ, 0x00000000,
  238. MGA_WACCEPTSEQ, 0x00000000,
  239. MGA_WACCEPTSEQ, 0x18000000);
  240. }
  241. DMA_BLOCK(MGA_WFLAG, 0x00000000,
  242. MGA_WFLAG1, 0x00000000,
  243. MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
  244. DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
  245. MGA_WR57, 0x00000000, /* tex0 */
  246. MGA_WR53, 0x00000000, /* tex1 */
  247. MGA_WR61, 0x00000000); /* tex1 */
  248. DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
  249. MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
  250. MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
  251. MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
  252. /* Padding required due to hardware bug */
  253. DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
  254. MGA_DMAPAD, 0xffffffff,
  255. MGA_DMAPAD, 0xffffffff,
  256. MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
  257. MGA_WMODE_START | dev_priv->wagp_enable));
  258. ADVANCE_DMA();
  259. }
  260. static void mga_g200_emit_state(drm_mga_private_t *dev_priv)
  261. {
  262. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  263. unsigned int dirty = sarea_priv->dirty;
  264. if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
  265. mga_g200_emit_pipe(dev_priv);
  266. dev_priv->warp_pipe = sarea_priv->warp_pipe;
  267. }
  268. if (dirty & MGA_UPLOAD_CONTEXT) {
  269. mga_g200_emit_context(dev_priv);
  270. sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
  271. }
  272. if (dirty & MGA_UPLOAD_TEX0) {
  273. mga_g200_emit_tex0(dev_priv);
  274. sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
  275. }
  276. }
  277. static void mga_g400_emit_state(drm_mga_private_t *dev_priv)
  278. {
  279. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  280. unsigned int dirty = sarea_priv->dirty;
  281. int multitex = sarea_priv->warp_pipe & MGA_T2;
  282. if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
  283. mga_g400_emit_pipe(dev_priv);
  284. dev_priv->warp_pipe = sarea_priv->warp_pipe;
  285. }
  286. if (dirty & MGA_UPLOAD_CONTEXT) {
  287. mga_g400_emit_context(dev_priv);
  288. sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
  289. }
  290. if (dirty & MGA_UPLOAD_TEX0) {
  291. mga_g400_emit_tex0(dev_priv);
  292. sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
  293. }
  294. if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
  295. mga_g400_emit_tex1(dev_priv);
  296. sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
  297. }
  298. }
  299. /* ================================================================
  300. * SAREA state verification
  301. */
  302. /* Disallow all write destinations except the front and backbuffer.
  303. */
  304. static int mga_verify_context(drm_mga_private_t *dev_priv)
  305. {
  306. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  307. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  308. if (ctx->dstorg != dev_priv->front_offset &&
  309. ctx->dstorg != dev_priv->back_offset) {
  310. DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
  311. ctx->dstorg, dev_priv->front_offset,
  312. dev_priv->back_offset);
  313. ctx->dstorg = 0;
  314. return -EINVAL;
  315. }
  316. return 0;
  317. }
  318. /* Disallow texture reads from PCI space.
  319. */
  320. static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit)
  321. {
  322. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  323. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
  324. unsigned int org;
  325. org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
  326. if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
  327. DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
  328. tex->texorg = 0;
  329. return -EINVAL;
  330. }
  331. return 0;
  332. }
  333. static int mga_verify_state(drm_mga_private_t *dev_priv)
  334. {
  335. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  336. unsigned int dirty = sarea_priv->dirty;
  337. int ret = 0;
  338. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  339. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  340. if (dirty & MGA_UPLOAD_CONTEXT)
  341. ret |= mga_verify_context(dev_priv);
  342. if (dirty & MGA_UPLOAD_TEX0)
  343. ret |= mga_verify_tex(dev_priv, 0);
  344. if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
  345. if (dirty & MGA_UPLOAD_TEX1)
  346. ret |= mga_verify_tex(dev_priv, 1);
  347. if (dirty & MGA_UPLOAD_PIPE)
  348. ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
  349. } else {
  350. if (dirty & MGA_UPLOAD_PIPE)
  351. ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
  352. }
  353. return (ret == 0);
  354. }
  355. static int mga_verify_iload(drm_mga_private_t *dev_priv,
  356. unsigned int dstorg, unsigned int length)
  357. {
  358. if (dstorg < dev_priv->texture_offset ||
  359. dstorg + length > (dev_priv->texture_offset +
  360. dev_priv->texture_size)) {
  361. DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
  362. return -EINVAL;
  363. }
  364. if (length & MGA_ILOAD_MASK) {
  365. DRM_ERROR("*** bad iload length: 0x%x\n",
  366. length & MGA_ILOAD_MASK);
  367. return -EINVAL;
  368. }
  369. return 0;
  370. }
  371. static int mga_verify_blit(drm_mga_private_t *dev_priv,
  372. unsigned int srcorg, unsigned int dstorg)
  373. {
  374. if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
  375. (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
  376. DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
  377. return -EINVAL;
  378. }
  379. return 0;
  380. }
  381. /* ================================================================
  382. *
  383. */
  384. static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear)
  385. {
  386. drm_mga_private_t *dev_priv = dev->dev_private;
  387. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  388. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  389. struct drm_clip_rect *pbox = sarea_priv->boxes;
  390. int nbox = sarea_priv->nbox;
  391. int i;
  392. DMA_LOCALS;
  393. DRM_DEBUG("\n");
  394. BEGIN_DMA(1);
  395. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  396. MGA_DMAPAD, 0x00000000,
  397. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  398. ADVANCE_DMA();
  399. for (i = 0; i < nbox; i++) {
  400. struct drm_clip_rect *box = &pbox[i];
  401. u32 height = box->y2 - box->y1;
  402. DRM_DEBUG(" from=%d,%d to=%d,%d\n",
  403. box->x1, box->y1, box->x2, box->y2);
  404. if (clear->flags & MGA_FRONT) {
  405. BEGIN_DMA(2);
  406. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  407. MGA_PLNWT, clear->color_mask,
  408. MGA_YDSTLEN, (box->y1 << 16) | height,
  409. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  410. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  411. MGA_FCOL, clear->clear_color,
  412. MGA_DSTORG, dev_priv->front_offset,
  413. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  414. ADVANCE_DMA();
  415. }
  416. if (clear->flags & MGA_BACK) {
  417. BEGIN_DMA(2);
  418. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  419. MGA_PLNWT, clear->color_mask,
  420. MGA_YDSTLEN, (box->y1 << 16) | height,
  421. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  422. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  423. MGA_FCOL, clear->clear_color,
  424. MGA_DSTORG, dev_priv->back_offset,
  425. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  426. ADVANCE_DMA();
  427. }
  428. if (clear->flags & MGA_DEPTH) {
  429. BEGIN_DMA(2);
  430. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  431. MGA_PLNWT, clear->depth_mask,
  432. MGA_YDSTLEN, (box->y1 << 16) | height,
  433. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  434. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  435. MGA_FCOL, clear->clear_depth,
  436. MGA_DSTORG, dev_priv->depth_offset,
  437. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  438. ADVANCE_DMA();
  439. }
  440. }
  441. BEGIN_DMA(1);
  442. /* Force reset of DWGCTL */
  443. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  444. MGA_DMAPAD, 0x00000000,
  445. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  446. ADVANCE_DMA();
  447. FLUSH_DMA();
  448. }
  449. static void mga_dma_dispatch_swap(struct drm_device *dev)
  450. {
  451. drm_mga_private_t *dev_priv = dev->dev_private;
  452. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  453. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  454. struct drm_clip_rect *pbox = sarea_priv->boxes;
  455. int nbox = sarea_priv->nbox;
  456. int i;
  457. DMA_LOCALS;
  458. DRM_DEBUG("\n");
  459. sarea_priv->last_frame.head = dev_priv->prim.tail;
  460. sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
  461. BEGIN_DMA(4 + nbox);
  462. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  463. MGA_DMAPAD, 0x00000000,
  464. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  465. DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
  466. MGA_MACCESS, dev_priv->maccess,
  467. MGA_SRCORG, dev_priv->back_offset,
  468. MGA_AR5, dev_priv->front_pitch);
  469. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  470. MGA_DMAPAD, 0x00000000,
  471. MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
  472. for (i = 0; i < nbox; i++) {
  473. struct drm_clip_rect *box = &pbox[i];
  474. u32 height = box->y2 - box->y1;
  475. u32 start = box->y1 * dev_priv->front_pitch;
  476. DRM_DEBUG(" from=%d,%d to=%d,%d\n",
  477. box->x1, box->y1, box->x2, box->y2);
  478. DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
  479. MGA_AR3, start + box->x1,
  480. MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
  481. MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
  482. }
  483. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  484. MGA_PLNWT, ctx->plnwt,
  485. MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
  486. ADVANCE_DMA();
  487. FLUSH_DMA();
  488. DRM_DEBUG("... done.\n");
  489. }
  490. static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
  491. {
  492. drm_mga_private_t *dev_priv = dev->dev_private;
  493. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  494. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  495. u32 address = (u32) buf->bus_address;
  496. u32 length = (u32) buf->used;
  497. int i = 0;
  498. DMA_LOCALS;
  499. DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
  500. if (buf->used) {
  501. buf_priv->dispatched = 1;
  502. MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
  503. do {
  504. if (i < sarea_priv->nbox) {
  505. mga_emit_clip_rect(dev_priv,
  506. &sarea_priv->boxes[i]);
  507. }
  508. BEGIN_DMA(1);
  509. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  510. MGA_DMAPAD, 0x00000000,
  511. MGA_SECADDRESS, (address |
  512. MGA_DMA_VERTEX),
  513. MGA_SECEND, ((address + length) |
  514. dev_priv->dma_access));
  515. ADVANCE_DMA();
  516. } while (++i < sarea_priv->nbox);
  517. }
  518. if (buf_priv->discard) {
  519. AGE_BUFFER(buf_priv);
  520. buf->pending = 0;
  521. buf->used = 0;
  522. buf_priv->dispatched = 0;
  523. mga_freelist_put(dev, buf);
  524. }
  525. FLUSH_DMA();
  526. }
  527. static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf,
  528. unsigned int start, unsigned int end)
  529. {
  530. drm_mga_private_t *dev_priv = dev->dev_private;
  531. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  532. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  533. u32 address = (u32) buf->bus_address;
  534. int i = 0;
  535. DMA_LOCALS;
  536. DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
  537. if (start != end) {
  538. buf_priv->dispatched = 1;
  539. MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
  540. do {
  541. if (i < sarea_priv->nbox) {
  542. mga_emit_clip_rect(dev_priv,
  543. &sarea_priv->boxes[i]);
  544. }
  545. BEGIN_DMA(1);
  546. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  547. MGA_DMAPAD, 0x00000000,
  548. MGA_SETUPADDRESS, address + start,
  549. MGA_SETUPEND, ((address + end) |
  550. dev_priv->dma_access));
  551. ADVANCE_DMA();
  552. } while (++i < sarea_priv->nbox);
  553. }
  554. if (buf_priv->discard) {
  555. AGE_BUFFER(buf_priv);
  556. buf->pending = 0;
  557. buf->used = 0;
  558. buf_priv->dispatched = 0;
  559. mga_freelist_put(dev, buf);
  560. }
  561. FLUSH_DMA();
  562. }
  563. /* This copies a 64 byte aligned agp region to the frambuffer with a
  564. * standard blit, the ioctl needs to do checking.
  565. */
  566. static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf,
  567. unsigned int dstorg, unsigned int length)
  568. {
  569. drm_mga_private_t *dev_priv = dev->dev_private;
  570. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  571. drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
  572. u32 srcorg =
  573. buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
  574. u32 y2;
  575. DMA_LOCALS;
  576. DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
  577. y2 = length / 64;
  578. BEGIN_DMA(5);
  579. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  580. MGA_DMAPAD, 0x00000000,
  581. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  582. DMA_BLOCK(MGA_DSTORG, dstorg,
  583. MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
  584. DMA_BLOCK(MGA_PITCH, 64,
  585. MGA_PLNWT, 0xffffffff,
  586. MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
  587. DMA_BLOCK(MGA_AR0, 63,
  588. MGA_AR3, 0,
  589. MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
  590. DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
  591. MGA_SRCORG, dev_priv->front_offset,
  592. MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
  593. ADVANCE_DMA();
  594. AGE_BUFFER(buf_priv);
  595. buf->pending = 0;
  596. buf->used = 0;
  597. buf_priv->dispatched = 0;
  598. mga_freelist_put(dev, buf);
  599. FLUSH_DMA();
  600. }
  601. static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit)
  602. {
  603. drm_mga_private_t *dev_priv = dev->dev_private;
  604. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  605. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  606. struct drm_clip_rect *pbox = sarea_priv->boxes;
  607. int nbox = sarea_priv->nbox;
  608. u32 scandir = 0, i;
  609. DMA_LOCALS;
  610. DRM_DEBUG("\n");
  611. BEGIN_DMA(4 + nbox);
  612. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  613. MGA_DMAPAD, 0x00000000,
  614. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  615. DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
  616. MGA_PLNWT, blit->planemask,
  617. MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
  618. DMA_BLOCK(MGA_SGN, scandir,
  619. MGA_MACCESS, dev_priv->maccess,
  620. MGA_AR5, blit->ydir * blit->src_pitch,
  621. MGA_PITCH, blit->dst_pitch);
  622. for (i = 0; i < nbox; i++) {
  623. int srcx = pbox[i].x1 + blit->delta_sx;
  624. int srcy = pbox[i].y1 + blit->delta_sy;
  625. int dstx = pbox[i].x1 + blit->delta_dx;
  626. int dsty = pbox[i].y1 + blit->delta_dy;
  627. int h = pbox[i].y2 - pbox[i].y1;
  628. int w = pbox[i].x2 - pbox[i].x1 - 1;
  629. int start;
  630. if (blit->ydir == -1)
  631. srcy = blit->height - srcy - 1;
  632. start = srcy * blit->src_pitch + srcx;
  633. DMA_BLOCK(MGA_AR0, start + w,
  634. MGA_AR3, start,
  635. MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
  636. MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
  637. }
  638. /* Do something to flush AGP?
  639. */
  640. /* Force reset of DWGCTL */
  641. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  642. MGA_PLNWT, ctx->plnwt,
  643. MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
  644. ADVANCE_DMA();
  645. }
  646. /* ================================================================
  647. *
  648. */
  649. static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
  650. {
  651. drm_mga_private_t *dev_priv = dev->dev_private;
  652. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  653. drm_mga_clear_t *clear = data;
  654. LOCK_TEST_WITH_RETURN(dev, file_priv);
  655. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  656. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  657. WRAP_TEST_WITH_RETURN(dev_priv);
  658. mga_dma_dispatch_clear(dev, clear);
  659. /* Make sure we restore the 3D state next time.
  660. */
  661. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  662. return 0;
  663. }
  664. static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
  665. {
  666. drm_mga_private_t *dev_priv = dev->dev_private;
  667. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  668. LOCK_TEST_WITH_RETURN(dev, file_priv);
  669. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  670. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  671. WRAP_TEST_WITH_RETURN(dev_priv);
  672. mga_dma_dispatch_swap(dev);
  673. /* Make sure we restore the 3D state next time.
  674. */
  675. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  676. return 0;
  677. }
  678. static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
  679. {
  680. drm_mga_private_t *dev_priv = dev->dev_private;
  681. struct drm_device_dma *dma = dev->dma;
  682. struct drm_buf *buf;
  683. drm_mga_buf_priv_t *buf_priv;
  684. drm_mga_vertex_t *vertex = data;
  685. LOCK_TEST_WITH_RETURN(dev, file_priv);
  686. if (vertex->idx < 0 || vertex->idx > dma->buf_count)
  687. return -EINVAL;
  688. buf = dma->buflist[vertex->idx];
  689. buf_priv = buf->dev_private;
  690. buf->used = vertex->used;
  691. buf_priv->discard = vertex->discard;
  692. if (!mga_verify_state(dev_priv)) {
  693. if (vertex->discard) {
  694. if (buf_priv->dispatched == 1)
  695. AGE_BUFFER(buf_priv);
  696. buf_priv->dispatched = 0;
  697. mga_freelist_put(dev, buf);
  698. }
  699. return -EINVAL;
  700. }
  701. WRAP_TEST_WITH_RETURN(dev_priv);
  702. mga_dma_dispatch_vertex(dev, buf);
  703. return 0;
  704. }
  705. static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
  706. {
  707. drm_mga_private_t *dev_priv = dev->dev_private;
  708. struct drm_device_dma *dma = dev->dma;
  709. struct drm_buf *buf;
  710. drm_mga_buf_priv_t *buf_priv;
  711. drm_mga_indices_t *indices = data;
  712. LOCK_TEST_WITH_RETURN(dev, file_priv);
  713. if (indices->idx < 0 || indices->idx > dma->buf_count)
  714. return -EINVAL;
  715. buf = dma->buflist[indices->idx];
  716. buf_priv = buf->dev_private;
  717. buf_priv->discard = indices->discard;
  718. if (!mga_verify_state(dev_priv)) {
  719. if (indices->discard) {
  720. if (buf_priv->dispatched == 1)
  721. AGE_BUFFER(buf_priv);
  722. buf_priv->dispatched = 0;
  723. mga_freelist_put(dev, buf);
  724. }
  725. return -EINVAL;
  726. }
  727. WRAP_TEST_WITH_RETURN(dev_priv);
  728. mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
  729. return 0;
  730. }
  731. static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
  732. {
  733. struct drm_device_dma *dma = dev->dma;
  734. drm_mga_private_t *dev_priv = dev->dev_private;
  735. struct drm_buf *buf;
  736. drm_mga_iload_t *iload = data;
  737. DRM_DEBUG("\n");
  738. LOCK_TEST_WITH_RETURN(dev, file_priv);
  739. #if 0
  740. if (mga_do_wait_for_idle(dev_priv) < 0) {
  741. if (MGA_DMA_DEBUG)
  742. DRM_INFO("-EBUSY\n");
  743. return -EBUSY;
  744. }
  745. #endif
  746. if (iload->idx < 0 || iload->idx > dma->buf_count)
  747. return -EINVAL;
  748. buf = dma->buflist[iload->idx];
  749. if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
  750. mga_freelist_put(dev, buf);
  751. return -EINVAL;
  752. }
  753. WRAP_TEST_WITH_RETURN(dev_priv);
  754. mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
  755. /* Make sure we restore the 3D state next time.
  756. */
  757. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  758. return 0;
  759. }
  760. static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
  761. {
  762. drm_mga_private_t *dev_priv = dev->dev_private;
  763. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  764. drm_mga_blit_t *blit = data;
  765. DRM_DEBUG("\n");
  766. LOCK_TEST_WITH_RETURN(dev, file_priv);
  767. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  768. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  769. if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
  770. return -EINVAL;
  771. WRAP_TEST_WITH_RETURN(dev_priv);
  772. mga_dma_dispatch_blit(dev, blit);
  773. /* Make sure we restore the 3D state next time.
  774. */
  775. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  776. return 0;
  777. }
  778. int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
  779. {
  780. drm_mga_private_t *dev_priv = dev->dev_private;
  781. drm_mga_getparam_t *param = data;
  782. struct pci_dev *pdev = to_pci_dev(dev->dev);
  783. int value;
  784. if (!dev_priv) {
  785. DRM_ERROR("called with no initialization\n");
  786. return -EINVAL;
  787. }
  788. DRM_DEBUG("pid=%d\n", task_pid_nr(current));
  789. switch (param->param) {
  790. case MGA_PARAM_IRQ_NR:
  791. value = pdev->irq;
  792. break;
  793. case MGA_PARAM_CARD_TYPE:
  794. value = dev_priv->chipset;
  795. break;
  796. default:
  797. return -EINVAL;
  798. }
  799. if (copy_to_user(param->value, &value, sizeof(int))) {
  800. DRM_ERROR("copy_to_user\n");
  801. return -EFAULT;
  802. }
  803. return 0;
  804. }
  805. static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
  806. {
  807. drm_mga_private_t *dev_priv = dev->dev_private;
  808. u32 *fence = data;
  809. DMA_LOCALS;
  810. if (!dev_priv) {
  811. DRM_ERROR("called with no initialization\n");
  812. return -EINVAL;
  813. }
  814. DRM_DEBUG("pid=%d\n", task_pid_nr(current));
  815. /* I would normal do this assignment in the declaration of fence,
  816. * but dev_priv may be NULL.
  817. */
  818. *fence = dev_priv->next_fence_to_post;
  819. dev_priv->next_fence_to_post++;
  820. BEGIN_DMA(1);
  821. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  822. MGA_DMAPAD, 0x00000000,
  823. MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
  824. ADVANCE_DMA();
  825. return 0;
  826. }
  827. static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
  828. file_priv)
  829. {
  830. drm_mga_private_t *dev_priv = dev->dev_private;
  831. u32 *fence = data;
  832. if (!dev_priv) {
  833. DRM_ERROR("called with no initialization\n");
  834. return -EINVAL;
  835. }
  836. DRM_DEBUG("pid=%d\n", task_pid_nr(current));
  837. mga_driver_fence_wait(dev, fence);
  838. return 0;
  839. }
  840. const struct drm_ioctl_desc mga_ioctls[] = {
  841. DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  842. DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH),
  843. DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH),
  844. DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
  845. DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
  846. DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
  847. DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
  848. DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
  849. DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
  850. DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
  851. DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
  852. DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
  853. DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  854. };
  855. int mga_max_ioctl = ARRAY_SIZE(mga_ioctls);