Subversion Repositories Kolibri OS

Rev

Rev 4111 | Rev 5078 | Go to most recent revision | Show entire file | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 4111 Rev 4569
Line 52... Line 52...
52
 * @head: List head for the software context's resource list.
52
 * @head: List head for the software context's resource list.
53
 * @hash: Hash entry for quick resouce to val_node lookup.
53
 * @hash: Hash entry for quick resouce to val_node lookup.
54
 * @res: Ref-counted pointer to the resource.
54
 * @res: Ref-counted pointer to the resource.
55
 * @switch_backup: Boolean whether to switch backup buffer on unreserve.
55
 * @switch_backup: Boolean whether to switch backup buffer on unreserve.
56
 * @new_backup: Refcounted pointer to the new backup buffer.
56
 * @new_backup: Refcounted pointer to the new backup buffer.
-
 
57
 * @staged_bindings: If @res is a context, tracks bindings set up during
-
 
58
 * the command batch. Otherwise NULL.
57
 * @new_backup_offset: New backup buffer offset if @new_backup is non-NUll.
59
 * @new_backup_offset: New backup buffer offset if @new_backup is non-NUll.
58
 * @first_usage: Set to true the first time the resource is referenced in
60
 * @first_usage: Set to true the first time the resource is referenced in
59
 * the command stream.
61
 * the command stream.
60
 * @no_buffer_needed: Resources do not need to allocate buffer backup on
62
 * @no_buffer_needed: Resources do not need to allocate buffer backup on
61
 * reservation. The command stream will provide one.
63
 * reservation. The command stream will provide one.
Line 63... Line 65...
63
struct vmw_resource_val_node {
65
struct vmw_resource_val_node {
64
	struct list_head head;
66
	struct list_head head;
65
	struct drm_hash_item hash;
67
	struct drm_hash_item hash;
66
	struct vmw_resource *res;
68
	struct vmw_resource *res;
67
	struct vmw_dma_buffer *new_backup;
69
	struct vmw_dma_buffer *new_backup;
-
 
70
	struct vmw_ctx_binding_state *staged_bindings;
68
	unsigned long new_backup_offset;
71
	unsigned long new_backup_offset;
69
	bool first_usage;
72
	bool first_usage;
70
	bool no_buffer_needed;
73
	bool no_buffer_needed;
71
};
74
};
Line 72... Line 75...
72
 
75
 
-
 
76
/**
-
 
77
 * struct vmw_cmd_entry - Describe a command for the verifier
-
 
78
 *
-
 
79
 * @user_allow: Whether allowed from the execbuf ioctl.
-
 
80
 * @gb_disable: Whether disabled if guest-backed objects are available.
-
 
81
 * @gb_enable: Whether enabled iff guest-backed objects are available.
-
 
82
 */
-
 
83
struct vmw_cmd_entry {
-
 
84
	int (*func) (struct vmw_private *, struct vmw_sw_context *,
-
 
85
		     SVGA3dCmdHeader *);
-
 
86
	bool user_allow;
-
 
87
	bool gb_disable;
-
 
88
	bool gb_enable;
-
 
89
};
-
 
90
 
-
 
91
#define VMW_CMD_DEF(_cmd, _func, _user_allow, _gb_disable, _gb_enable)	\
-
 
92
	[(_cmd) - SVGA_3D_CMD_BASE] = {(_func), (_user_allow),\
-
 
93
				       (_gb_disable), (_gb_enable)}
-
 
94
 
73
/**
95
/**
74
 * vmw_resource_unreserve - unreserve resources previously reserved for
96
 * vmw_resource_unreserve - unreserve resources previously reserved for
75
 * command submission.
97
 * command submission.
76
 *
98
 *
77
 * @list_head: list of resources to unreserve.
99
 * @list_head: list of resources to unreserve.
Line 85... Line 107...
85
	list_for_each_entry(val, list, head) {
107
	list_for_each_entry(val, list, head) {
86
		struct vmw_resource *res = val->res;
108
		struct vmw_resource *res = val->res;
87
		struct vmw_dma_buffer *new_backup =
109
		struct vmw_dma_buffer *new_backup =
88
			backoff ? NULL : val->new_backup;
110
			backoff ? NULL : val->new_backup;
Line -... Line 111...
-
 
111
 
-
 
112
		/*
-
 
113
		 * Transfer staged context bindings to the
-
 
114
		 * persistent context binding tracker.
-
 
115
		 */
-
 
116
		if (unlikely(val->staged_bindings)) {
-
 
117
			vmw_context_binding_state_transfer
-
 
118
				(val->res, val->staged_bindings);
-
 
119
			kfree(val->staged_bindings);
-
 
120
			val->staged_bindings = NULL;
89
 
121
		}
90
		vmw_resource_unreserve(res, new_backup,
122
		vmw_resource_unreserve(res, new_backup,
91
			val->new_backup_offset);
123
			val->new_backup_offset);
92
		vmw_dmabuf_unreference(&val->new_backup);
124
		vmw_dmabuf_unreference(&val->new_backup);
93
	}
125
	}
Line 222... Line 254...
222
/**
254
/**
223
 * vmw_bo_to_validate_list - add a bo to a validate list
255
 * vmw_bo_to_validate_list - add a bo to a validate list
224
 *
256
 *
225
 * @sw_context: The software context used for this command submission batch.
257
 * @sw_context: The software context used for this command submission batch.
226
 * @bo: The buffer object to add.
258
 * @bo: The buffer object to add.
-
 
259
 * @validate_as_mob: Validate this buffer as a MOB.
227
 * @p_val_node: If non-NULL Will be updated with the validate node number
260
 * @p_val_node: If non-NULL Will be updated with the validate node number
228
 * on return.
261
 * on return.
229
 *
262
 *
230
 * Returns -EINVAL if the limit of number of buffer objects per command
263
 * Returns -EINVAL if the limit of number of buffer objects per command
231
 * submission is reached.
264
 * submission is reached.
232
 */
265
 */
233
static int vmw_bo_to_validate_list(struct vmw_sw_context *sw_context,
266
static int vmw_bo_to_validate_list(struct vmw_sw_context *sw_context,
234
				   struct ttm_buffer_object *bo,
267
				   struct ttm_buffer_object *bo,
-
 
268
				   bool validate_as_mob,
235
				   uint32_t *p_val_node)
269
				   uint32_t *p_val_node)
236
{
270
{
237
	uint32_t val_node;
271
	uint32_t val_node;
238
	struct vmw_validate_buffer *vval_buf;
272
	struct vmw_validate_buffer *vval_buf;
239
	struct ttm_validate_buffer *val_buf;
273
	struct ttm_validate_buffer *val_buf;
Line 242... Line 276...
242
 
276
 
243
	if (likely(drm_ht_find_item(&sw_context->res_ht, (unsigned long) bo,
277
	if (likely(drm_ht_find_item(&sw_context->res_ht, (unsigned long) bo,
244
				    &hash) == 0)) {
278
				    &hash) == 0)) {
245
		vval_buf = container_of(hash, struct vmw_validate_buffer,
279
		vval_buf = container_of(hash, struct vmw_validate_buffer,
-
 
280
					hash);
-
 
281
		if (unlikely(vval_buf->validate_as_mob != validate_as_mob)) {
-
 
282
			DRM_ERROR("Inconsistent buffer usage.\n");
-
 
283
			return -EINVAL;
246
					hash);
284
		}
247
		val_buf = &vval_buf->base;
285
		val_buf = &vval_buf->base;
248
		val_node = vval_buf - sw_context->val_bufs;
286
		val_node = vval_buf - sw_context->val_bufs;
249
	} else {
287
	} else {
250
		val_node = sw_context->cur_val_buf;
288
		val_node = sw_context->cur_val_buf;
Line 264... Line 302...
264
		++sw_context->cur_val_buf;
302
		++sw_context->cur_val_buf;
265
		val_buf = &vval_buf->base;
303
		val_buf = &vval_buf->base;
266
		val_buf->bo = ttm_bo_reference(bo);
304
		val_buf->bo = ttm_bo_reference(bo);
267
		val_buf->reserved = false;
305
		val_buf->reserved = false;
268
		list_add_tail(&val_buf->head, &sw_context->validate_nodes);
306
		list_add_tail(&val_buf->head, &sw_context->validate_nodes);
-
 
307
		vval_buf->validate_as_mob = validate_as_mob;
269
	}
308
	}
Line 270... Line 309...
270
 
309
 
Line 271... Line 310...
271
	sw_context->fence_flags |= DRM_VMW_FENCE_FLAG_EXEC;
310
	sw_context->fence_flags |= DRM_VMW_FENCE_FLAG_EXEC;
Line 300... Line 339...
300
 
339
 
301
		if (res->backup) {
340
		if (res->backup) {
Line 302... Line 341...
302
			struct ttm_buffer_object *bo = &res->backup->base;
341
			struct ttm_buffer_object *bo = &res->backup->base;
303
 
342
 
-
 
343
			ret = vmw_bo_to_validate_list
Line 304... Line 344...
304
			ret = vmw_bo_to_validate_list
344
				(sw_context, bo,
305
				(sw_context, bo, NULL);
345
				 vmw_resource_needs_backup(res), NULL);
306
 
346
 
307
			if (unlikely(ret != 0))
347
			if (unlikely(ret != 0))
Line 360... Line 400...
360
		&sw_context->res_cache[res_type];
400
		&sw_context->res_cache[res_type];
361
	struct vmw_resource *res;
401
	struct vmw_resource *res;
362
	struct vmw_resource_val_node *node;
402
	struct vmw_resource_val_node *node;
363
	int ret;
403
	int ret;
Line 364... Line 404...
364
 
404
 
-
 
405
	if (*id == SVGA3D_INVALID_ID) {
-
 
406
		if (p_val)
-
 
407
			*p_val = NULL;
-
 
408
		if (res_type == vmw_res_context) {
-
 
409
			DRM_ERROR("Illegal context invalid id.\n");
-
 
410
			return -EINVAL;
365
	if (*id == SVGA3D_INVALID_ID)
411
		}
-
 
412
		return 0;
Line 366... Line 413...
366
		return 0;
413
	}
367
 
414
 
368
	/*
415
	/*
369
	 * Fastpath in case of repeated commands referencing the same
416
	 * Fastpath in case of repeated commands referencing the same
Line 409... Line 456...
409
		goto out_no_reloc;
456
		goto out_no_reloc;
Line 410... Line 457...
410
 
457
 
411
	rcache->node = node;
458
	rcache->node = node;
412
	if (p_val)
459
	if (p_val)
-
 
460
		*p_val = node;
-
 
461
 
-
 
462
	if (node->first_usage && res_type == vmw_res_context) {
-
 
463
		node->staged_bindings =
-
 
464
			kzalloc(sizeof(*node->staged_bindings), GFP_KERNEL);
-
 
465
		if (node->staged_bindings == NULL) {
-
 
466
			DRM_ERROR("Failed to allocate context binding "
-
 
467
				  "information.\n");
-
 
468
			goto out_no_reloc;
-
 
469
		}
-
 
470
		INIT_LIST_HEAD(&node->staged_bindings->list);
-
 
471
	}
413
		*p_val = node;
472
 
414
	vmw_resource_unreference(&res);
473
	vmw_resource_unreference(&res);
Line 415... Line 474...
415
	return 0;
474
	return 0;
416
 
475
 
Line 451... Line 510...
451
{
510
{
452
	struct vmw_sid_cmd {
511
	struct vmw_sid_cmd {
453
		SVGA3dCmdHeader header;
512
		SVGA3dCmdHeader header;
454
		SVGA3dCmdSetRenderTarget body;
513
		SVGA3dCmdSetRenderTarget body;
455
	} *cmd;
514
	} *cmd;
-
 
515
	struct vmw_resource_val_node *ctx_node;
-
 
516
	struct vmw_resource_val_node *res_node;
456
	int ret;
517
	int ret;
Line -... Line 518...
-
 
518
 
-
 
519
	cmd = container_of(header, struct vmw_sid_cmd, header);
457
 
520
 
-
 
521
	ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
-
 
522
				user_context_converter, &cmd->body.cid,
458
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
523
				&ctx_node);
459
	if (unlikely(ret != 0))
524
	if (unlikely(ret != 0))
Line 460... Line -...
460
		return ret;
-
 
461
 
525
		return ret;
462
	cmd = container_of(header, struct vmw_sid_cmd, header);
526
 
463
	ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
527
	ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
-
 
528
				user_surface_converter,
464
				user_surface_converter,
529
				&cmd->body.target.sid, &res_node);
-
 
530
	if (unlikely(ret != 0))
-
 
531
	return ret;
-
 
532
 
-
 
533
	if (dev_priv->has_mob) {
-
 
534
		struct vmw_ctx_bindinfo bi;
-
 
535
 
-
 
536
		bi.ctx = ctx_node->res;
-
 
537
		bi.res = res_node ? res_node->res : NULL;
-
 
538
		bi.bt = vmw_ctx_binding_rt;
-
 
539
		bi.i1.rt_type = cmd->body.type;
-
 
540
		return vmw_context_binding_add(ctx_node->staged_bindings, &bi);
-
 
541
	}
465
				&cmd->body.target.sid, NULL);
542
 
Line 466... Line 543...
466
	return ret;
543
	return 0;
467
}
544
}
468
 
545
 
Line 517... Line 594...
517
		SVGA3dCmdBlitSurfaceToScreen body;
594
		SVGA3dCmdBlitSurfaceToScreen body;
518
	} *cmd;
595
	} *cmd;
Line 519... Line 596...
519
 
596
 
Line 520... Line -...
520
	cmd = container_of(header, struct vmw_sid_cmd, header);
-
 
521
 
-
 
522
	if (unlikely(!sw_context->kernel)) {
-
 
523
		DRM_ERROR("Kernel only SVGA3d command: %u.\n", cmd->header.id);
-
 
524
		return -EPERM;
-
 
525
	}
597
	cmd = container_of(header, struct vmw_sid_cmd, header);
526
 
598
 
527
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
599
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
528
				 user_surface_converter,
600
				 user_surface_converter,
Line 539... Line 611...
539
	} *cmd;
611
	} *cmd;
Line 540... Line 612...
540
 
612
 
Line 541... Line -...
541
 
-
 
542
	cmd = container_of(header, struct vmw_sid_cmd, header);
-
 
543
 
-
 
544
	if (unlikely(!sw_context->kernel)) {
-
 
545
		DRM_ERROR("Kernel only SVGA3d command: %u.\n", cmd->header.id);
-
 
546
		return -EPERM;
613
 
547
	}
614
	cmd = container_of(header, struct vmw_sid_cmd, header);
548
 
615
 
549
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
616
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
Line 584... Line 651...
584
 
651
 
585
		if (unlikely(sw_context->cur_query_bo != NULL)) {
652
		if (unlikely(sw_context->cur_query_bo != NULL)) {
586
			sw_context->needs_post_query_barrier = true;
653
			sw_context->needs_post_query_barrier = true;
587
			ret = vmw_bo_to_validate_list(sw_context,
654
			ret = vmw_bo_to_validate_list(sw_context,
588
						      sw_context->cur_query_bo,
655
						      sw_context->cur_query_bo,
589
						      NULL);
656
						      dev_priv->has_mob, NULL);
590
			if (unlikely(ret != 0))
657
			if (unlikely(ret != 0))
591
				return ret;
658
				return ret;
592
		}
659
		}
Line 593... Line 660...
593
		sw_context->cur_query_bo = new_query_bo;
660
		sw_context->cur_query_bo = new_query_bo;
594
 
661
 
595
		ret = vmw_bo_to_validate_list(sw_context,
662
		ret = vmw_bo_to_validate_list(sw_context,
596
					      dev_priv->dummy_query_bo,
663
					      dev_priv->dummy_query_bo,
597
					      NULL);
664
					      dev_priv->has_mob, NULL);
Line 598... Line 665...
598
		if (unlikely(ret != 0))
665
		if (unlikely(ret != 0))
Line 670... Line 737...
670
		}
737
		}
671
	}
738
	}
672
}
739
}
Line 673... Line 740...
673
 
740
 
-
 
741
/**
-
 
742
 * vmw_translate_mob_pointer - Prepare to translate a user-space buffer
-
 
743
 * handle to a MOB id.
-
 
744
 *
-
 
745
 * @dev_priv: Pointer to a device private structure.
-
 
746
 * @sw_context: The software context used for this command batch validation.
-
 
747
 * @id: Pointer to the user-space handle to be translated.
-
 
748
 * @vmw_bo_p: Points to a location that, on successful return will carry
-
 
749
 * a reference-counted pointer to the DMA buffer identified by the
-
 
750
 * user-space handle in @id.
-
 
751
 *
-
 
752
 * This function saves information needed to translate a user-space buffer
-
 
753
 * handle to a MOB id. The translation does not take place immediately, but
-
 
754
 * during a call to vmw_apply_relocations(). This function builds a relocation
-
 
755
 * list and a list of buffers to validate. The former needs to be freed using
-
 
756
 * either vmw_apply_relocations() or vmw_free_relocations(). The latter
-
 
757
 * needs to be freed using vmw_clear_validations.
-
 
758
 */
-
 
759
static int vmw_translate_mob_ptr(struct vmw_private *dev_priv,
-
 
760
				 struct vmw_sw_context *sw_context,
-
 
761
				 SVGAMobId *id,
-
 
762
				 struct vmw_dma_buffer **vmw_bo_p)
-
 
763
{
-
 
764
	struct vmw_dma_buffer *vmw_bo = NULL;
-
 
765
	struct ttm_buffer_object *bo;
-
 
766
	uint32_t handle = *id;
-
 
767
	struct vmw_relocation *reloc;
-
 
768
	int ret;
-
 
769
 
-
 
770
	ret = vmw_user_dmabuf_lookup(sw_context->tfile, handle, &vmw_bo);
-
 
771
	if (unlikely(ret != 0)) {
-
 
772
		DRM_ERROR("Could not find or use MOB buffer.\n");
-
 
773
		return -EINVAL;
-
 
774
	}
-
 
775
	bo = &vmw_bo->base;
-
 
776
 
-
 
777
	if (unlikely(sw_context->cur_reloc >= VMWGFX_MAX_RELOCATIONS)) {
-
 
778
		DRM_ERROR("Max number relocations per submission"
-
 
779
			  " exceeded\n");
-
 
780
		ret = -EINVAL;
-
 
781
		goto out_no_reloc;
-
 
782
	}
-
 
783
 
-
 
784
	reloc = &sw_context->relocs[sw_context->cur_reloc++];
-
 
785
	reloc->mob_loc = id;
-
 
786
	reloc->location = NULL;
-
 
787
 
-
 
788
	ret = vmw_bo_to_validate_list(sw_context, bo, true, &reloc->index);
-
 
789
	if (unlikely(ret != 0))
-
 
790
		goto out_no_reloc;
-
 
791
 
-
 
792
	*vmw_bo_p = vmw_bo;
-
 
793
	return 0;
-
 
794
 
-
 
795
out_no_reloc:
-
 
796
	vmw_dmabuf_unreference(&vmw_bo);
-
 
797
	vmw_bo_p = NULL;
-
 
798
	return ret;
-
 
799
}
-
 
800
 
674
/**
801
/**
675
 * vmw_translate_guest_pointer - Prepare to translate a user-space buffer
802
 * vmw_translate_guest_pointer - Prepare to translate a user-space buffer
676
 * handle to a valid SVGAGuestPtr
803
 * handle to a valid SVGAGuestPtr
677
 *
804
 *
678
 * @dev_priv: Pointer to a device private structure.
805
 * @dev_priv: Pointer to a device private structure.
Line 716... Line 843...
716
	}
843
	}
Line 717... Line 844...
717
 
844
 
718
	reloc = &sw_context->relocs[sw_context->cur_reloc++];
845
	reloc = &sw_context->relocs[sw_context->cur_reloc++];
Line 719... Line 846...
719
	reloc->location = ptr;
846
	reloc->location = ptr;
720
 
847
 
721
	ret = vmw_bo_to_validate_list(sw_context, bo, &reloc->index);
848
	ret = vmw_bo_to_validate_list(sw_context, bo, false, &reloc->index);
Line 722... Line 849...
722
	if (unlikely(ret != 0))
849
	if (unlikely(ret != 0))
723
		goto out_no_reloc;
850
		goto out_no_reloc;
Line 730... Line 857...
730
	vmw_bo_p = NULL;
857
	vmw_bo_p = NULL;
731
	return ret;
858
	return ret;
732
}
859
}
Line 733... Line 860...
733
 
860
 
-
 
861
/**
-
 
862
 * vmw_cmd_begin_gb_query - validate a  SVGA_3D_CMD_BEGIN_GB_QUERY command.
-
 
863
 *
-
 
864
 * @dev_priv: Pointer to a device private struct.
-
 
865
 * @sw_context: The software context used for this command submission.
-
 
866
 * @header: Pointer to the command header in the command stream.
-
 
867
 */
-
 
868
static int vmw_cmd_begin_gb_query(struct vmw_private *dev_priv,
-
 
869
				  struct vmw_sw_context *sw_context,
-
 
870
				  SVGA3dCmdHeader *header)
-
 
871
{
-
 
872
	struct vmw_begin_gb_query_cmd {
-
 
873
		SVGA3dCmdHeader header;
-
 
874
		SVGA3dCmdBeginGBQuery q;
-
 
875
	} *cmd;
-
 
876
 
-
 
877
	cmd = container_of(header, struct vmw_begin_gb_query_cmd,
-
 
878
			   header);
-
 
879
 
-
 
880
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
-
 
881
				 user_context_converter, &cmd->q.cid,
-
 
882
				 NULL);
-
 
883
}
-
 
884
 
734
/**
885
/**
735
 * vmw_cmd_begin_query - validate a  SVGA_3D_CMD_BEGIN_QUERY command.
886
 * vmw_cmd_begin_query - validate a  SVGA_3D_CMD_BEGIN_QUERY command.
736
 *
887
 *
737
 * @dev_priv: Pointer to a device private struct.
888
 * @dev_priv: Pointer to a device private struct.
738
 * @sw_context: The software context used for this command submission.
889
 * @sw_context: The software context used for this command submission.
Line 748... Line 899...
748
	} *cmd;
899
	} *cmd;
Line 749... Line 900...
749
 
900
 
750
	cmd = container_of(header, struct vmw_begin_query_cmd,
901
	cmd = container_of(header, struct vmw_begin_query_cmd,
Line -... Line 902...
-
 
902
			   header);
-
 
903
 
-
 
904
	if (unlikely(dev_priv->has_mob)) {
-
 
905
		struct {
-
 
906
			SVGA3dCmdHeader header;
-
 
907
			SVGA3dCmdBeginGBQuery q;
-
 
908
		} gb_cmd;
-
 
909
 
-
 
910
		BUG_ON(sizeof(gb_cmd) != sizeof(*cmd));
-
 
911
 
-
 
912
		gb_cmd.header.id = SVGA_3D_CMD_BEGIN_GB_QUERY;
-
 
913
		gb_cmd.header.size = cmd->header.size;
-
 
914
		gb_cmd.q.cid = cmd->q.cid;
-
 
915
		gb_cmd.q.type = cmd->q.type;
-
 
916
 
-
 
917
		memcpy(cmd, &gb_cmd, sizeof(*cmd));
-
 
918
		return vmw_cmd_begin_gb_query(dev_priv, sw_context, header);
751
			   header);
919
	}
752
 
920
 
753
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
921
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
754
				 user_context_converter, &cmd->q.cid,
922
				 user_context_converter, &cmd->q.cid,
Line 755... Line 923...
755
				 NULL);
923
				 NULL);
-
 
924
}
-
 
925
 
-
 
926
/**
-
 
927
 * vmw_cmd_end_gb_query - validate a  SVGA_3D_CMD_END_GB_QUERY command.
-
 
928
 *
-
 
929
 * @dev_priv: Pointer to a device private struct.
-
 
930
 * @sw_context: The software context used for this command submission.
-
 
931
 * @header: Pointer to the command header in the command stream.
-
 
932
 */
-
 
933
static int vmw_cmd_end_gb_query(struct vmw_private *dev_priv,
-
 
934
				struct vmw_sw_context *sw_context,
-
 
935
				SVGA3dCmdHeader *header)
-
 
936
{
-
 
937
	struct vmw_dma_buffer *vmw_bo;
-
 
938
	struct vmw_query_cmd {
-
 
939
		SVGA3dCmdHeader header;
-
 
940
		SVGA3dCmdEndGBQuery q;
-
 
941
	} *cmd;
-
 
942
	int ret;
-
 
943
 
-
 
944
	cmd = container_of(header, struct vmw_query_cmd, header);
-
 
945
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
-
 
946
	if (unlikely(ret != 0))
-
 
947
		return ret;
-
 
948
 
-
 
949
	ret = vmw_translate_mob_ptr(dev_priv, sw_context,
-
 
950
				    &cmd->q.mobid,
-
 
951
				    &vmw_bo);
-
 
952
	if (unlikely(ret != 0))
-
 
953
		return ret;
-
 
954
 
-
 
955
	ret = vmw_query_bo_switch_prepare(dev_priv, &vmw_bo->base, sw_context);
-
 
956
 
-
 
957
	vmw_dmabuf_unreference(&vmw_bo);
-
 
958
	return ret;
756
}
959
}
757
 
960
 
758
/**
961
/**
759
 * vmw_cmd_end_query - validate a  SVGA_3D_CMD_END_QUERY command.
962
 * vmw_cmd_end_query - validate a  SVGA_3D_CMD_END_QUERY command.
760
 *
963
 *
Line 772... Line 975...
772
		SVGA3dCmdEndQuery q;
975
		SVGA3dCmdEndQuery q;
773
	} *cmd;
976
	} *cmd;
774
	int ret;
977
	int ret;
Line 775... Line 978...
775
 
978
 
-
 
979
	cmd = container_of(header, struct vmw_query_cmd, header);
-
 
980
	if (dev_priv->has_mob) {
-
 
981
		struct {
-
 
982
			SVGA3dCmdHeader header;
-
 
983
			SVGA3dCmdEndGBQuery q;
-
 
984
		} gb_cmd;
-
 
985
 
-
 
986
		BUG_ON(sizeof(gb_cmd) != sizeof(*cmd));
-
 
987
 
-
 
988
		gb_cmd.header.id = SVGA_3D_CMD_END_GB_QUERY;
-
 
989
		gb_cmd.header.size = cmd->header.size;
-
 
990
		gb_cmd.q.cid = cmd->q.cid;
-
 
991
		gb_cmd.q.type = cmd->q.type;
-
 
992
		gb_cmd.q.mobid = cmd->q.guestResult.gmrId;
-
 
993
		gb_cmd.q.offset = cmd->q.guestResult.offset;
-
 
994
 
-
 
995
		memcpy(cmd, &gb_cmd, sizeof(*cmd));
-
 
996
		return vmw_cmd_end_gb_query(dev_priv, sw_context, header);
-
 
997
	}
776
	cmd = container_of(header, struct vmw_query_cmd, header);
998
 
777
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
999
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
778
	if (unlikely(ret != 0))
1000
	if (unlikely(ret != 0))
Line 779... Line 1001...
779
		return ret;
1001
		return ret;
Line 788... Line 1010...
788
 
1010
 
789
	vmw_dmabuf_unreference(&vmw_bo);
1011
	vmw_dmabuf_unreference(&vmw_bo);
790
	return ret;
1012
	return ret;
Line 791... Line 1013...
791
}
1013
}
-
 
1014
 
-
 
1015
/**
-
 
1016
 * vmw_cmd_wait_gb_query - validate a  SVGA_3D_CMD_WAIT_GB_QUERY command.
-
 
1017
 *
-
 
1018
 * @dev_priv: Pointer to a device private struct.
-
 
1019
 * @sw_context: The software context used for this command submission.
-
 
1020
 * @header: Pointer to the command header in the command stream.
-
 
1021
 */
-
 
1022
static int vmw_cmd_wait_gb_query(struct vmw_private *dev_priv,
-
 
1023
				 struct vmw_sw_context *sw_context,
-
 
1024
				 SVGA3dCmdHeader *header)
-
 
1025
{
-
 
1026
	struct vmw_dma_buffer *vmw_bo;
-
 
1027
	struct vmw_query_cmd {
-
 
1028
		SVGA3dCmdHeader header;
-
 
1029
		SVGA3dCmdWaitForGBQuery q;
-
 
1030
	} *cmd;
-
 
1031
	int ret;
-
 
1032
 
-
 
1033
	cmd = container_of(header, struct vmw_query_cmd, header);
-
 
1034
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
-
 
1035
	if (unlikely(ret != 0))
-
 
1036
		return ret;
-
 
1037
 
-
 
1038
	ret = vmw_translate_mob_ptr(dev_priv, sw_context,
-
 
1039
				    &cmd->q.mobid,
-
 
1040
				    &vmw_bo);
-
 
1041
	if (unlikely(ret != 0))
-
 
1042
		return ret;
-
 
1043
 
-
 
1044
	vmw_dmabuf_unreference(&vmw_bo);
-
 
1045
	return 0;
-
 
1046
}
792
 
1047
 
793
/*
1048
/**
794
 * vmw_cmd_wait_query - validate a  SVGA_3D_CMD_WAIT_QUERY command.
1049
 * vmw_cmd_wait_query - validate a  SVGA_3D_CMD_WAIT_QUERY command.
795
 *
1050
 *
796
 * @dev_priv: Pointer to a device private struct.
1051
 * @dev_priv: Pointer to a device private struct.
Line 807... Line 1062...
807
		SVGA3dCmdWaitForQuery q;
1062
		SVGA3dCmdWaitForQuery q;
808
	} *cmd;
1063
	} *cmd;
809
	int ret;
1064
	int ret;
Line 810... Line 1065...
810
 
1065
 
-
 
1066
	cmd = container_of(header, struct vmw_query_cmd, header);
-
 
1067
	if (dev_priv->has_mob) {
-
 
1068
		struct {
-
 
1069
			SVGA3dCmdHeader header;
-
 
1070
			SVGA3dCmdWaitForGBQuery q;
-
 
1071
		} gb_cmd;
-
 
1072
 
-
 
1073
		BUG_ON(sizeof(gb_cmd) != sizeof(*cmd));
-
 
1074
 
-
 
1075
		gb_cmd.header.id = SVGA_3D_CMD_WAIT_FOR_GB_QUERY;
-
 
1076
		gb_cmd.header.size = cmd->header.size;
-
 
1077
		gb_cmd.q.cid = cmd->q.cid;
-
 
1078
		gb_cmd.q.type = cmd->q.type;
-
 
1079
		gb_cmd.q.mobid = cmd->q.guestResult.gmrId;
-
 
1080
		gb_cmd.q.offset = cmd->q.guestResult.offset;
-
 
1081
 
-
 
1082
		memcpy(cmd, &gb_cmd, sizeof(*cmd));
-
 
1083
		return vmw_cmd_wait_gb_query(dev_priv, sw_context, header);
-
 
1084
	}
811
	cmd = container_of(header, struct vmw_query_cmd, header);
1085
 
812
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
1086
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
813
	if (unlikely(ret != 0))
1087
	if (unlikely(ret != 0))
Line 814... Line 1088...
814
		return ret;
1088
		return ret;
Line 919... Line 1193...
919
			     SVGA3dCmdHeader *header)
1193
			     SVGA3dCmdHeader *header)
920
{
1194
{
921
	struct vmw_tex_state_cmd {
1195
	struct vmw_tex_state_cmd {
922
		SVGA3dCmdHeader header;
1196
		SVGA3dCmdHeader header;
923
		SVGA3dCmdSetTextureState state;
1197
		SVGA3dCmdSetTextureState state;
924
	};
1198
	} *cmd;
Line 925... Line 1199...
925
 
1199
 
926
	SVGA3dTextureState *last_state = (SVGA3dTextureState *)
1200
	SVGA3dTextureState *last_state = (SVGA3dTextureState *)
927
	  ((unsigned long) header + header->size + sizeof(header));
1201
	  ((unsigned long) header + header->size + sizeof(header));
928
	SVGA3dTextureState *cur_state = (SVGA3dTextureState *)
1202
	SVGA3dTextureState *cur_state = (SVGA3dTextureState *)
-
 
1203
		((unsigned long) header + sizeof(struct vmw_tex_state_cmd));
-
 
1204
	struct vmw_resource_val_node *ctx_node;
929
		((unsigned long) header + sizeof(struct vmw_tex_state_cmd));
1205
	struct vmw_resource_val_node *res_node;
Line -... Line 1206...
-
 
1206
	int ret;
-
 
1207
 
-
 
1208
	cmd = container_of(header, struct vmw_tex_state_cmd,
930
	int ret;
1209
			   header);
-
 
1210
 
-
 
1211
	ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
931
 
1212
				user_context_converter, &cmd->state.cid,
932
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
1213
				&ctx_node);
Line 933... Line 1214...
933
	if (unlikely(ret != 0))
1214
	if (unlikely(ret != 0))
934
		return ret;
1215
		return ret;
935
 
1216
 
Line 936... Line 1217...
936
	for (; cur_state < last_state; ++cur_state) {
1217
	for (; cur_state < last_state; ++cur_state) {
937
		if (likely(cur_state->name != SVGA3D_TS_BIND_TEXTURE))
1218
		if (likely(cur_state->name != SVGA3D_TS_BIND_TEXTURE))
938
			continue;
1219
			continue;
939
 
1220
 
940
		ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
1221
		ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
-
 
1222
					user_surface_converter,
-
 
1223
					&cur_state->value, &res_node);
-
 
1224
		if (unlikely(ret != 0))
-
 
1225
			return ret;
-
 
1226
 
-
 
1227
		if (dev_priv->has_mob) {
-
 
1228
			struct vmw_ctx_bindinfo bi;
-
 
1229
 
-
 
1230
			bi.ctx = ctx_node->res;
-
 
1231
			bi.res = res_node ? res_node->res : NULL;
-
 
1232
			bi.bt = vmw_ctx_binding_tex;
941
					user_surface_converter,
1233
			bi.i1.texture_stage = cur_state->stage;
Line 942... Line 1234...
942
					&cur_state->value, NULL);
1234
			vmw_context_binding_add(ctx_node->staged_bindings,
943
		if (unlikely(ret != 0))
1235
						&bi);
Line 969... Line 1261...
969
 
1261
 
970
	return ret;
1262
	return ret;
Line 971... Line 1263...
971
}
1263
}
-
 
1264
 
-
 
1265
/**
-
 
1266
 * vmw_cmd_switch_backup - Utility function to handle backup buffer switching
-
 
1267
 *
-
 
1268
 * @dev_priv: Pointer to a device private struct.
-
 
1269
 * @sw_context: The software context being used for this batch.
-
 
1270
 * @res_type: The resource type.
-
 
1271
 * @converter: Information about user-space binding for this resource type.
-
 
1272
 * @res_id: Pointer to the user-space resource handle in the command stream.
-
 
1273
 * @buf_id: Pointer to the user-space backup buffer handle in the command
-
 
1274
 * stream.
-
 
1275
 * @backup_offset: Offset of backup into MOB.
-
 
1276
 *
-
 
1277
 * This function prepares for registering a switch of backup buffers
-
 
1278
 * in the resource metadata just prior to unreserving.
-
 
1279
 */
-
 
1280
static int vmw_cmd_switch_backup(struct vmw_private *dev_priv,
-
 
1281
				 struct vmw_sw_context *sw_context,
-
 
1282
				 enum vmw_res_type res_type,
-
 
1283
				 const struct vmw_user_resource_conv
-
 
1284
				 *converter,
-
 
1285
				 uint32_t *res_id,
-
 
1286
				 uint32_t *buf_id,
-
 
1287
				 unsigned long backup_offset)
-
 
1288
{
-
 
1289
	int ret;
-
 
1290
	struct vmw_dma_buffer *dma_buf;
-
 
1291
	struct vmw_resource_val_node *val_node;
-
 
1292
 
-
 
1293
	ret = vmw_cmd_res_check(dev_priv, sw_context, res_type,
-
 
1294
				converter, res_id, &val_node);
-
 
1295
	if (unlikely(ret != 0))
-
 
1296
		return ret;
-
 
1297
 
-
 
1298
	ret = vmw_translate_mob_ptr(dev_priv, sw_context, buf_id, &dma_buf);
-
 
1299
	if (unlikely(ret != 0))
-
 
1300
		return ret;
-
 
1301
 
-
 
1302
	if (val_node->first_usage)
-
 
1303
		val_node->no_buffer_needed = true;
-
 
1304
 
-
 
1305
	vmw_dmabuf_unreference(&val_node->new_backup);
-
 
1306
	val_node->new_backup = dma_buf;
-
 
1307
	val_node->new_backup_offset = backup_offset;
-
 
1308
 
-
 
1309
	return 0;
-
 
1310
}
-
 
1311
 
-
 
1312
/**
-
 
1313
 * vmw_cmd_bind_gb_surface - Validate an SVGA_3D_CMD_BIND_GB_SURFACE
-
 
1314
 * command
-
 
1315
 *
-
 
1316
 * @dev_priv: Pointer to a device private struct.
-
 
1317
 * @sw_context: The software context being used for this batch.
-
 
1318
 * @header: Pointer to the command header in the command stream.
-
 
1319
 */
-
 
1320
static int vmw_cmd_bind_gb_surface(struct vmw_private *dev_priv,
-
 
1321
				   struct vmw_sw_context *sw_context,
-
 
1322
				   SVGA3dCmdHeader *header)
-
 
1323
{
-
 
1324
	struct vmw_bind_gb_surface_cmd {
-
 
1325
		SVGA3dCmdHeader header;
-
 
1326
		SVGA3dCmdBindGBSurface body;
-
 
1327
	} *cmd;
-
 
1328
 
-
 
1329
	cmd = container_of(header, struct vmw_bind_gb_surface_cmd, header);
-
 
1330
 
-
 
1331
	return vmw_cmd_switch_backup(dev_priv, sw_context, vmw_res_surface,
-
 
1332
				     user_surface_converter,
-
 
1333
				     &cmd->body.sid, &cmd->body.mobid,
-
 
1334
				     0);
-
 
1335
}
-
 
1336
 
-
 
1337
/**
-
 
1338
 * vmw_cmd_update_gb_image - Validate an SVGA_3D_CMD_UPDATE_GB_IMAGE
-
 
1339
 * command
-
 
1340
 *
-
 
1341
 * @dev_priv: Pointer to a device private struct.
-
 
1342
 * @sw_context: The software context being used for this batch.
-
 
1343
 * @header: Pointer to the command header in the command stream.
-
 
1344
 */
-
 
1345
static int vmw_cmd_update_gb_image(struct vmw_private *dev_priv,
-
 
1346
				   struct vmw_sw_context *sw_context,
-
 
1347
				   SVGA3dCmdHeader *header)
-
 
1348
{
-
 
1349
	struct vmw_gb_surface_cmd {
-
 
1350
		SVGA3dCmdHeader header;
-
 
1351
		SVGA3dCmdUpdateGBImage body;
-
 
1352
	} *cmd;
-
 
1353
 
-
 
1354
	cmd = container_of(header, struct vmw_gb_surface_cmd, header);
-
 
1355
 
-
 
1356
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
-
 
1357
				 user_surface_converter,
-
 
1358
				 &cmd->body.image.sid, NULL);
-
 
1359
}
-
 
1360
 
-
 
1361
/**
-
 
1362
 * vmw_cmd_update_gb_surface - Validate an SVGA_3D_CMD_UPDATE_GB_SURFACE
-
 
1363
 * command
-
 
1364
 *
-
 
1365
 * @dev_priv: Pointer to a device private struct.
-
 
1366
 * @sw_context: The software context being used for this batch.
-
 
1367
 * @header: Pointer to the command header in the command stream.
-
 
1368
 */
-
 
1369
static int vmw_cmd_update_gb_surface(struct vmw_private *dev_priv,
-
 
1370
				     struct vmw_sw_context *sw_context,
-
 
1371
				     SVGA3dCmdHeader *header)
-
 
1372
{
-
 
1373
	struct vmw_gb_surface_cmd {
-
 
1374
		SVGA3dCmdHeader header;
-
 
1375
		SVGA3dCmdUpdateGBSurface body;
-
 
1376
	} *cmd;
-
 
1377
 
-
 
1378
	cmd = container_of(header, struct vmw_gb_surface_cmd, header);
-
 
1379
 
-
 
1380
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
-
 
1381
				 user_surface_converter,
-
 
1382
				 &cmd->body.sid, NULL);
-
 
1383
}
-
 
1384
 
-
 
1385
/**
-
 
1386
 * vmw_cmd_readback_gb_image - Validate an SVGA_3D_CMD_READBACK_GB_IMAGE
-
 
1387
 * command
-
 
1388
 *
-
 
1389
 * @dev_priv: Pointer to a device private struct.
-
 
1390
 * @sw_context: The software context being used for this batch.
-
 
1391
 * @header: Pointer to the command header in the command stream.
-
 
1392
 */
-
 
1393
static int vmw_cmd_readback_gb_image(struct vmw_private *dev_priv,
-
 
1394
				     struct vmw_sw_context *sw_context,
-
 
1395
				     SVGA3dCmdHeader *header)
-
 
1396
{
-
 
1397
	struct vmw_gb_surface_cmd {
-
 
1398
		SVGA3dCmdHeader header;
-
 
1399
		SVGA3dCmdReadbackGBImage body;
-
 
1400
	} *cmd;
-
 
1401
 
-
 
1402
	cmd = container_of(header, struct vmw_gb_surface_cmd, header);
-
 
1403
 
-
 
1404
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
-
 
1405
				 user_surface_converter,
-
 
1406
				 &cmd->body.image.sid, NULL);
-
 
1407
}
-
 
1408
 
-
 
1409
/**
-
 
1410
 * vmw_cmd_readback_gb_surface - Validate an SVGA_3D_CMD_READBACK_GB_SURFACE
-
 
1411
 * command
-
 
1412
 *
-
 
1413
 * @dev_priv: Pointer to a device private struct.
-
 
1414
 * @sw_context: The software context being used for this batch.
-
 
1415
 * @header: Pointer to the command header in the command stream.
-
 
1416
 */
-
 
1417
static int vmw_cmd_readback_gb_surface(struct vmw_private *dev_priv,
-
 
1418
				       struct vmw_sw_context *sw_context,
-
 
1419
				       SVGA3dCmdHeader *header)
-
 
1420
{
-
 
1421
	struct vmw_gb_surface_cmd {
-
 
1422
		SVGA3dCmdHeader header;
-
 
1423
		SVGA3dCmdReadbackGBSurface body;
-
 
1424
	} *cmd;
-
 
1425
 
-
 
1426
	cmd = container_of(header, struct vmw_gb_surface_cmd, header);
-
 
1427
 
-
 
1428
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
-
 
1429
				 user_surface_converter,
-
 
1430
				 &cmd->body.sid, NULL);
-
 
1431
}
-
 
1432
 
-
 
1433
/**
-
 
1434
 * vmw_cmd_invalidate_gb_image - Validate an SVGA_3D_CMD_INVALIDATE_GB_IMAGE
-
 
1435
 * command
-
 
1436
 *
-
 
1437
 * @dev_priv: Pointer to a device private struct.
-
 
1438
 * @sw_context: The software context being used for this batch.
-
 
1439
 * @header: Pointer to the command header in the command stream.
-
 
1440
 */
-
 
1441
static int vmw_cmd_invalidate_gb_image(struct vmw_private *dev_priv,
-
 
1442
				       struct vmw_sw_context *sw_context,
-
 
1443
				       SVGA3dCmdHeader *header)
-
 
1444
{
-
 
1445
	struct vmw_gb_surface_cmd {
-
 
1446
		SVGA3dCmdHeader header;
-
 
1447
		SVGA3dCmdInvalidateGBImage body;
-
 
1448
	} *cmd;
-
 
1449
 
-
 
1450
	cmd = container_of(header, struct vmw_gb_surface_cmd, header);
-
 
1451
 
-
 
1452
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
-
 
1453
				 user_surface_converter,
-
 
1454
				 &cmd->body.image.sid, NULL);
-
 
1455
}
-
 
1456
 
-
 
1457
/**
-
 
1458
 * vmw_cmd_invalidate_gb_surface - Validate an
-
 
1459
 * SVGA_3D_CMD_INVALIDATE_GB_SURFACE command
-
 
1460
 *
-
 
1461
 * @dev_priv: Pointer to a device private struct.
-
 
1462
 * @sw_context: The software context being used for this batch.
-
 
1463
 * @header: Pointer to the command header in the command stream.
-
 
1464
 */
-
 
1465
static int vmw_cmd_invalidate_gb_surface(struct vmw_private *dev_priv,
-
 
1466
					 struct vmw_sw_context *sw_context,
-
 
1467
					 SVGA3dCmdHeader *header)
-
 
1468
{
-
 
1469
	struct vmw_gb_surface_cmd {
-
 
1470
		SVGA3dCmdHeader header;
-
 
1471
		SVGA3dCmdInvalidateGBSurface body;
-
 
1472
	} *cmd;
-
 
1473
 
-
 
1474
	cmd = container_of(header, struct vmw_gb_surface_cmd, header);
-
 
1475
 
-
 
1476
	return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface,
-
 
1477
				 user_surface_converter,
-
 
1478
				 &cmd->body.sid, NULL);
-
 
1479
}
972
 
1480
 
973
/**
1481
/**
974
 * vmw_cmd_set_shader - Validate an SVGA_3D_CMD_SET_SHADER
1482
 * vmw_cmd_set_shader - Validate an SVGA_3D_CMD_SET_SHADER
975
 * command
1483
 * command
976
 *
1484
 *
Line 984... Line 1492...
984
{
1492
{
985
	struct vmw_set_shader_cmd {
1493
	struct vmw_set_shader_cmd {
986
		SVGA3dCmdHeader header;
1494
		SVGA3dCmdHeader header;
987
		SVGA3dCmdSetShader body;
1495
		SVGA3dCmdSetShader body;
988
	} *cmd;
1496
	} *cmd;
-
 
1497
	struct vmw_resource_val_node *ctx_node;
989
	int ret;
1498
	int ret;
Line 990... Line 1499...
990
 
1499
 
991
	cmd = container_of(header, struct vmw_set_shader_cmd,
1500
	cmd = container_of(header, struct vmw_set_shader_cmd,
Line 992... Line 1501...
992
			   header);
1501
			   header);
-
 
1502
 
-
 
1503
	ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
993
 
1504
				user_context_converter, &cmd->body.cid,
994
	ret = vmw_cmd_cid_check(dev_priv, sw_context, header);
1505
				&ctx_node);
Line -... Line 1506...
-
 
1506
	if (unlikely(ret != 0))
-
 
1507
		return ret;
-
 
1508
 
-
 
1509
	if (dev_priv->has_mob) {
-
 
1510
		struct vmw_ctx_bindinfo bi;
-
 
1511
		struct vmw_resource_val_node *res_node;
-
 
1512
 
-
 
1513
		ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_shader,
-
 
1514
					user_shader_converter,
-
 
1515
					&cmd->body.shid, &res_node);
-
 
1516
	if (unlikely(ret != 0))
-
 
1517
		return ret;
-
 
1518
 
-
 
1519
		bi.ctx = ctx_node->res;
-
 
1520
		bi.res = res_node ? res_node->res : NULL;
-
 
1521
		bi.bt = vmw_ctx_binding_shader;
-
 
1522
		bi.i1.shader_type = cmd->body.type;
995
	if (unlikely(ret != 0))
1523
		return vmw_context_binding_add(ctx_node->staged_bindings, &bi);
996
		return ret;
1524
	}
Line -... Line 1525...
-
 
1525
 
-
 
1526
	return 0;
-
 
1527
}
-
 
1528
 
-
 
1529
/**
-
 
1530
 * vmw_cmd_bind_gb_shader - Validate an SVGA_3D_CMD_BIND_GB_SHADER
-
 
1531
 * command
-
 
1532
 *
-
 
1533
 * @dev_priv: Pointer to a device private struct.
-
 
1534
 * @sw_context: The software context being used for this batch.
-
 
1535
 * @header: Pointer to the command header in the command stream.
-
 
1536
 */
-
 
1537
static int vmw_cmd_bind_gb_shader(struct vmw_private *dev_priv,
-
 
1538
				  struct vmw_sw_context *sw_context,
-
 
1539
				  SVGA3dCmdHeader *header)
-
 
1540
{
-
 
1541
	struct vmw_bind_gb_shader_cmd {
-
 
1542
		SVGA3dCmdHeader header;
-
 
1543
		SVGA3dCmdBindGBShader body;
-
 
1544
	} *cmd;
-
 
1545
 
-
 
1546
	cmd = container_of(header, struct vmw_bind_gb_shader_cmd,
-
 
1547
			   header);
-
 
1548
 
-
 
1549
	return vmw_cmd_switch_backup(dev_priv, sw_context, vmw_res_shader,
-
 
1550
				     user_shader_converter,
997
 
1551
				     &cmd->body.shid, &cmd->body.mobid,
998
	return 0;
1552
				     cmd->body.offsetInBytes);
999
}
1553
}
1000
 
1554
 
1001
static int vmw_cmd_check_not_3d(struct vmw_private *dev_priv,
1555
static int vmw_cmd_check_not_3d(struct vmw_private *dev_priv,
Line 1039... Line 1593...
1039
		return vmw_cmd_check_define_gmrfb(dev_priv, sw_context, buf);
1593
		return vmw_cmd_check_define_gmrfb(dev_priv, sw_context, buf);
Line 1040... Line 1594...
1040
 
1594
 
1041
	return 0;
1595
	return 0;
Line 1042... Line -...
1042
}
-
 
1043
 
-
 
1044
typedef int (*vmw_cmd_func) (struct vmw_private *,
-
 
1045
			     struct vmw_sw_context *,
-
 
1046
			     SVGA3dCmdHeader *);
-
 
1047
 
-
 
1048
#define VMW_CMD_DEF(cmd, func) \
-
 
1049
	[cmd - SVGA_3D_CMD_BASE] = func
1596
}
1050
 
1597
 
-
 
1598
static const struct vmw_cmd_entry const vmw_cmd_entries[SVGA_3D_CMD_MAX] = {
1051
static vmw_cmd_func vmw_cmd_funcs[SVGA_3D_CMD_MAX] = {
1599
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DEFINE, &vmw_cmd_invalid,
-
 
1600
		    false, false, false),
1052
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DEFINE, &vmw_cmd_invalid),
1601
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DESTROY, &vmw_cmd_invalid,
-
 
1602
		    false, false, false),
1053
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DESTROY, &vmw_cmd_invalid),
1603
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_COPY, &vmw_cmd_surface_copy_check,
-
 
1604
		    true, false, false),
1054
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_COPY, &vmw_cmd_surface_copy_check),
1605
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_STRETCHBLT, &vmw_cmd_stretch_blt_check,
-
 
1606
		    true, false, false),
1055
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_STRETCHBLT, &vmw_cmd_stretch_blt_check),
1607
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DMA, &vmw_cmd_dma,
-
 
1608
		    true, false, false),
1056
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DMA, &vmw_cmd_dma),
1609
	VMW_CMD_DEF(SVGA_3D_CMD_CONTEXT_DEFINE, &vmw_cmd_invalid,
-
 
1610
		    false, false, false),
1057
	VMW_CMD_DEF(SVGA_3D_CMD_CONTEXT_DEFINE, &vmw_cmd_invalid),
1611
	VMW_CMD_DEF(SVGA_3D_CMD_CONTEXT_DESTROY, &vmw_cmd_invalid,
-
 
1612
		    false, false, false),
1058
	VMW_CMD_DEF(SVGA_3D_CMD_CONTEXT_DESTROY, &vmw_cmd_invalid),
1613
	VMW_CMD_DEF(SVGA_3D_CMD_SETTRANSFORM, &vmw_cmd_cid_check,
-
 
1614
		    true, false, false),
1059
	VMW_CMD_DEF(SVGA_3D_CMD_SETTRANSFORM, &vmw_cmd_cid_check),
1615
	VMW_CMD_DEF(SVGA_3D_CMD_SETZRANGE, &vmw_cmd_cid_check,
-
 
1616
		    true, false, false),
1060
	VMW_CMD_DEF(SVGA_3D_CMD_SETZRANGE, &vmw_cmd_cid_check),
1617
	VMW_CMD_DEF(SVGA_3D_CMD_SETRENDERSTATE, &vmw_cmd_cid_check,
1061
	VMW_CMD_DEF(SVGA_3D_CMD_SETRENDERSTATE, &vmw_cmd_cid_check),
1618
		    true, false, false),
1062
	VMW_CMD_DEF(SVGA_3D_CMD_SETRENDERTARGET,
1619
	VMW_CMD_DEF(SVGA_3D_CMD_SETRENDERTARGET,
-
 
1620
		    &vmw_cmd_set_render_target_check, true, false, false),
1063
		    &vmw_cmd_set_render_target_check),
1621
	VMW_CMD_DEF(SVGA_3D_CMD_SETTEXTURESTATE, &vmw_cmd_tex_state,
-
 
1622
		    true, false, false),
1064
	VMW_CMD_DEF(SVGA_3D_CMD_SETTEXTURESTATE, &vmw_cmd_tex_state),
1623
	VMW_CMD_DEF(SVGA_3D_CMD_SETMATERIAL, &vmw_cmd_cid_check,
-
 
1624
		    true, false, false),
1065
	VMW_CMD_DEF(SVGA_3D_CMD_SETMATERIAL, &vmw_cmd_cid_check),
1625
	VMW_CMD_DEF(SVGA_3D_CMD_SETLIGHTDATA, &vmw_cmd_cid_check,
-
 
1626
		    true, false, false),
1066
	VMW_CMD_DEF(SVGA_3D_CMD_SETLIGHTDATA, &vmw_cmd_cid_check),
1627
	VMW_CMD_DEF(SVGA_3D_CMD_SETLIGHTENABLED, &vmw_cmd_cid_check,
-
 
1628
		    true, false, false),
1067
	VMW_CMD_DEF(SVGA_3D_CMD_SETLIGHTENABLED, &vmw_cmd_cid_check),
1629
	VMW_CMD_DEF(SVGA_3D_CMD_SETVIEWPORT, &vmw_cmd_cid_check,
-
 
1630
		    true, false, false),
1068
	VMW_CMD_DEF(SVGA_3D_CMD_SETVIEWPORT, &vmw_cmd_cid_check),
1631
	VMW_CMD_DEF(SVGA_3D_CMD_SETCLIPPLANE, &vmw_cmd_cid_check,
-
 
1632
		    true, false, false),
1069
	VMW_CMD_DEF(SVGA_3D_CMD_SETCLIPPLANE, &vmw_cmd_cid_check),
1633
	VMW_CMD_DEF(SVGA_3D_CMD_CLEAR, &vmw_cmd_cid_check,
-
 
1634
		    true, false, false),
1070
	VMW_CMD_DEF(SVGA_3D_CMD_CLEAR, &vmw_cmd_cid_check),
1635
	VMW_CMD_DEF(SVGA_3D_CMD_PRESENT, &vmw_cmd_present_check,
-
 
1636
		    false, false, false),
1071
	VMW_CMD_DEF(SVGA_3D_CMD_PRESENT, &vmw_cmd_present_check),
1637
	VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DEFINE, &vmw_cmd_cid_check,
-
 
1638
		    true, true, false),
1072
	VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DEFINE, &vmw_cmd_cid_check),
1639
	VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DESTROY, &vmw_cmd_cid_check,
-
 
1640
		    true, true, false),
1073
	VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DESTROY, &vmw_cmd_cid_check),
1641
	VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER, &vmw_cmd_set_shader,
-
 
1642
		    true, false, false),
1074
	VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER, &vmw_cmd_set_shader),
1643
	VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER_CONST, &vmw_cmd_cid_check,
-
 
1644
		    true, true, false),
1075
	VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER_CONST, &vmw_cmd_cid_check),
1645
	VMW_CMD_DEF(SVGA_3D_CMD_DRAW_PRIMITIVES, &vmw_cmd_draw,
-
 
1646
		    true, false, false),
1076
	VMW_CMD_DEF(SVGA_3D_CMD_DRAW_PRIMITIVES, &vmw_cmd_draw),
1647
	VMW_CMD_DEF(SVGA_3D_CMD_SETSCISSORRECT, &vmw_cmd_cid_check,
-
 
1648
		    true, false, false),
1077
	VMW_CMD_DEF(SVGA_3D_CMD_SETSCISSORRECT, &vmw_cmd_cid_check),
1649
	VMW_CMD_DEF(SVGA_3D_CMD_BEGIN_QUERY, &vmw_cmd_begin_query,
-
 
1650
		    true, false, false),
1078
	VMW_CMD_DEF(SVGA_3D_CMD_BEGIN_QUERY, &vmw_cmd_begin_query),
1651
	VMW_CMD_DEF(SVGA_3D_CMD_END_QUERY, &vmw_cmd_end_query,
-
 
1652
		    true, false, false),
1079
	VMW_CMD_DEF(SVGA_3D_CMD_END_QUERY, &vmw_cmd_end_query),
1653
	VMW_CMD_DEF(SVGA_3D_CMD_WAIT_FOR_QUERY, &vmw_cmd_wait_query,
-
 
1654
		    true, false, false),
1080
	VMW_CMD_DEF(SVGA_3D_CMD_WAIT_FOR_QUERY, &vmw_cmd_wait_query),
1655
	VMW_CMD_DEF(SVGA_3D_CMD_PRESENT_READBACK, &vmw_cmd_ok,
1081
	VMW_CMD_DEF(SVGA_3D_CMD_PRESENT_READBACK, &vmw_cmd_ok),
1656
		    true, false, false),
1082
	VMW_CMD_DEF(SVGA_3D_CMD_BLIT_SURFACE_TO_SCREEN,
1657
	VMW_CMD_DEF(SVGA_3D_CMD_BLIT_SURFACE_TO_SCREEN,
-
 
1658
		    &vmw_cmd_blt_surf_screen_check, false, false, false),
1083
		    &vmw_cmd_blt_surf_screen_check),
1659
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DEFINE_V2, &vmw_cmd_invalid,
-
 
1660
		    false, false, false),
1084
	VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DEFINE_V2, &vmw_cmd_invalid),
1661
	VMW_CMD_DEF(SVGA_3D_CMD_GENERATE_MIPMAPS, &vmw_cmd_invalid,
-
 
1662
		    false, false, false),
1085
	VMW_CMD_DEF(SVGA_3D_CMD_GENERATE_MIPMAPS, &vmw_cmd_invalid),
1663
	VMW_CMD_DEF(SVGA_3D_CMD_ACTIVATE_SURFACE, &vmw_cmd_invalid,
-
 
1664
		    false, false, false),
-
 
1665
	VMW_CMD_DEF(SVGA_3D_CMD_DEACTIVATE_SURFACE, &vmw_cmd_invalid,
-
 
1666
		    false, false, false),
-
 
1667
	VMW_CMD_DEF(SVGA_3D_CMD_SCREEN_DMA, &vmw_cmd_invalid,
-
 
1668
		    false, false, false),
-
 
1669
	VMW_CMD_DEF(SVGA_3D_CMD_SET_UNITY_SURFACE_COOKIE, &vmw_cmd_invalid,
-
 
1670
		    false, false, false),
-
 
1671
	VMW_CMD_DEF(SVGA_3D_CMD_OPEN_CONTEXT_SURFACE, &vmw_cmd_invalid,
-
 
1672
		    false, false, false),
-
 
1673
	VMW_CMD_DEF(SVGA_3D_CMD_LOGICOPS_BITBLT, &vmw_cmd_invalid,
-
 
1674
		    false, false, false),
-
 
1675
	VMW_CMD_DEF(SVGA_3D_CMD_LOGICOPS_TRANSBLT, &vmw_cmd_invalid,
-
 
1676
		    false, false, false),
-
 
1677
	VMW_CMD_DEF(SVGA_3D_CMD_LOGICOPS_STRETCHBLT, &vmw_cmd_invalid,
-
 
1678
		    false, false, false),
-
 
1679
	VMW_CMD_DEF(SVGA_3D_CMD_LOGICOPS_COLORFILL, &vmw_cmd_invalid,
-
 
1680
		    false, false, false),
-
 
1681
	VMW_CMD_DEF(SVGA_3D_CMD_LOGICOPS_ALPHABLEND, &vmw_cmd_invalid,
-
 
1682
		    false, false, false),
-
 
1683
	VMW_CMD_DEF(SVGA_3D_CMD_LOGICOPS_CLEARTYPEBLEND, &vmw_cmd_invalid,
-
 
1684
		    false, false, false),
-
 
1685
	VMW_CMD_DEF(SVGA_3D_CMD_SET_OTABLE_BASE, &vmw_cmd_invalid,
-
 
1686
		    false, false, true),
-
 
1687
	VMW_CMD_DEF(SVGA_3D_CMD_READBACK_OTABLE, &vmw_cmd_invalid,
-
 
1688
		    false, false, true),
-
 
1689
	VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_MOB, &vmw_cmd_invalid,
-
 
1690
		    false, false, true),
-
 
1691
	VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_MOB, &vmw_cmd_invalid,
-
 
1692
		    false, false, true),
-
 
1693
	VMW_CMD_DEF(SVGA_3D_CMD_REDEFINE_GB_MOB, &vmw_cmd_invalid,
-
 
1694
		    false, false, true),
-
 
1695
	VMW_CMD_DEF(SVGA_3D_CMD_UPDATE_GB_MOB_MAPPING, &vmw_cmd_invalid,
-
 
1696
		    false, false, true),
-
 
1697
	VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_SURFACE, &vmw_cmd_invalid,
-
 
1698
		    false, false, true),
-
 
1699
	VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_SURFACE, &vmw_cmd_invalid,
-
 
1700
		    false, false, true),
-
 
1701
	VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_SURFACE, &vmw_cmd_bind_gb_surface,
-
 
1702
		    true, false, true),
-
 
1703
	VMW_CMD_DEF(SVGA_3D_CMD_COND_BIND_GB_SURFACE, &vmw_cmd_invalid,
-
 
1704
		    false, false, true),
-
 
1705
	VMW_CMD_DEF(SVGA_3D_CMD_UPDATE_GB_IMAGE, &vmw_cmd_update_gb_image,
-
 
1706
		    true, false, true),
-
 
1707
	VMW_CMD_DEF(SVGA_3D_CMD_UPDATE_GB_SURFACE,
-
 
1708
		    &vmw_cmd_update_gb_surface, true, false, true),
-
 
1709
	VMW_CMD_DEF(SVGA_3D_CMD_READBACK_GB_IMAGE,
-
 
1710
		    &vmw_cmd_readback_gb_image, true, false, true),
-
 
1711
	VMW_CMD_DEF(SVGA_3D_CMD_READBACK_GB_SURFACE,
-
 
1712
		    &vmw_cmd_readback_gb_surface, true, false, true),
-
 
1713
	VMW_CMD_DEF(SVGA_3D_CMD_INVALIDATE_GB_IMAGE,
-
 
1714
		    &vmw_cmd_invalidate_gb_image, true, false, true),
-
 
1715
	VMW_CMD_DEF(SVGA_3D_CMD_INVALIDATE_GB_SURFACE,
-
 
1716
		    &vmw_cmd_invalidate_gb_surface, true, false, true),
-
 
1717
	VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_CONTEXT, &vmw_cmd_invalid,
-
 
1718
		    false, false, true),
-
 
1719
	VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_CONTEXT, &vmw_cmd_invalid,
-
 
1720
		    false, false, true),
-
 
1721
	VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_CONTEXT, &vmw_cmd_invalid,
-
 
1722
		    false, false, true),
-
 
1723
	VMW_CMD_DEF(SVGA_3D_CMD_READBACK_GB_CONTEXT, &vmw_cmd_invalid,
-
 
1724
		    false, false, true),
-
 
1725
	VMW_CMD_DEF(SVGA_3D_CMD_INVALIDATE_GB_CONTEXT, &vmw_cmd_invalid,
-
 
1726
		    false, false, true),
-
 
1727
	VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_SHADER, &vmw_cmd_invalid,
-
 
1728
		    false, false, true),
-
 
1729
	VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_SHADER, &vmw_cmd_bind_gb_shader,
-
 
1730
		    true, false, true),
-
 
1731
	VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_SHADER, &vmw_cmd_invalid,
-
 
1732
		    false, false, true),
-
 
1733
	VMW_CMD_DEF(SVGA_3D_CMD_SET_OTABLE_BASE64, &vmw_cmd_invalid,
-
 
1734
		    false, false, false),
-
 
1735
	VMW_CMD_DEF(SVGA_3D_CMD_BEGIN_GB_QUERY, &vmw_cmd_begin_gb_query,
-
 
1736
		    true, false, true),
-
 
1737
	VMW_CMD_DEF(SVGA_3D_CMD_END_GB_QUERY, &vmw_cmd_end_gb_query,
-
 
1738
		    true, false, true),
-
 
1739
	VMW_CMD_DEF(SVGA_3D_CMD_WAIT_FOR_GB_QUERY, &vmw_cmd_wait_gb_query,
-
 
1740
		    true, false, true),
-
 
1741
	VMW_CMD_DEF(SVGA_3D_CMD_NOP, &vmw_cmd_ok,
-
 
1742
		    true, false, true),
-
 
1743
	VMW_CMD_DEF(SVGA_3D_CMD_ENABLE_GART, &vmw_cmd_invalid,
-
 
1744
		    false, false, true),
-
 
1745
	VMW_CMD_DEF(SVGA_3D_CMD_DISABLE_GART, &vmw_cmd_invalid,
-
 
1746
		    false, false, true),
-
 
1747
	VMW_CMD_DEF(SVGA_3D_CMD_MAP_MOB_INTO_GART, &vmw_cmd_invalid,
-
 
1748
		    false, false, true),
-
 
1749
	VMW_CMD_DEF(SVGA_3D_CMD_UNMAP_GART_RANGE, &vmw_cmd_invalid,
-
 
1750
		    false, false, true),
-
 
1751
	VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_SCREENTARGET, &vmw_cmd_invalid,
-
 
1752
		    false, false, true),
-
 
1753
	VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_SCREENTARGET, &vmw_cmd_invalid,
-
 
1754
		    false, false, true),
-
 
1755
	VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_SCREENTARGET, &vmw_cmd_invalid,
-
 
1756
		    false, false, true),
-
 
1757
	VMW_CMD_DEF(SVGA_3D_CMD_UPDATE_GB_SCREENTARGET, &vmw_cmd_invalid,
-
 
1758
		    false, false, true),
-
 
1759
	VMW_CMD_DEF(SVGA_3D_CMD_READBACK_GB_IMAGE_PARTIAL, &vmw_cmd_invalid,
-
 
1760
		    false, false, true),
-
 
1761
	VMW_CMD_DEF(SVGA_3D_CMD_INVALIDATE_GB_IMAGE_PARTIAL, &vmw_cmd_invalid,
-
 
1762
		    false, false, true),
1086
	VMW_CMD_DEF(SVGA_3D_CMD_ACTIVATE_SURFACE, &vmw_cmd_invalid),
1763
	VMW_CMD_DEF(SVGA_3D_CMD_SET_GB_SHADERCONSTS_INLINE, &vmw_cmd_cid_check,
Line 1087... Line 1764...
1087
	VMW_CMD_DEF(SVGA_3D_CMD_DEACTIVATE_SURFACE, &vmw_cmd_invalid),
1764
		    true, false, true)
1088
};
1765
};
1089
 
1766
 
1090
static int vmw_cmd_check(struct vmw_private *dev_priv,
1767
static int vmw_cmd_check(struct vmw_private *dev_priv,
1091
			 struct vmw_sw_context *sw_context,
1768
			 struct vmw_sw_context *sw_context,
1092
			 void *buf, uint32_t *size)
1769
			 void *buf, uint32_t *size)
1093
{
1770
{
1094
	uint32_t cmd_id;
1771
	uint32_t cmd_id;
-
 
1772
	uint32_t size_remaining = *size;
-
 
1773
	SVGA3dCmdHeader *header = (SVGA3dCmdHeader *) buf;
Line 1095... Line 1774...
1095
	uint32_t size_remaining = *size;
1774
	int ret;
1096
	SVGA3dCmdHeader *header = (SVGA3dCmdHeader *) buf;
1775
	const struct vmw_cmd_entry *entry;
1097
	int ret;
1776
	bool gb = dev_priv->capabilities & SVGA_CAP_GBOBJECTS;
1098
 
1777
 
Line 1105... Line 1784...
1105
	cmd_id = le32_to_cpu(header->id);
1784
	cmd_id = le32_to_cpu(header->id);
1106
	*size = le32_to_cpu(header->size) + sizeof(SVGA3dCmdHeader);
1785
	*size = le32_to_cpu(header->size) + sizeof(SVGA3dCmdHeader);
Line 1107... Line 1786...
1107
 
1786
 
1108
	cmd_id -= SVGA_3D_CMD_BASE;
1787
	cmd_id -= SVGA_3D_CMD_BASE;
1109
	if (unlikely(*size > size_remaining))
1788
	if (unlikely(*size > size_remaining))
Line 1110... Line 1789...
1110
		goto out_err;
1789
		goto out_invalid;
-
 
1790
 
-
 
1791
	if (unlikely(cmd_id >= SVGA_3D_CMD_MAX - SVGA_3D_CMD_BASE))
-
 
1792
		goto out_invalid;
-
 
1793
 
-
 
1794
	entry = &vmw_cmd_entries[cmd_id];
-
 
1795
	if (unlikely(!entry->user_allow && !sw_context->kernel))
-
 
1796
		goto out_privileged;
1111
 
1797
 
-
 
1798
	if (unlikely(entry->gb_disable && gb))
-
 
1799
		goto out_old;
-
 
1800
 
Line 1112... Line 1801...
1112
	if (unlikely(cmd_id >= SVGA_3D_CMD_MAX - SVGA_3D_CMD_BASE))
1801
	if (unlikely(entry->gb_enable && !gb))
1113
		goto out_err;
1802
		goto out_new;
1114
 
1803
 
Line 1115... Line 1804...
1115
	ret = vmw_cmd_funcs[cmd_id](dev_priv, sw_context, header);
1804
	ret = entry->func(dev_priv, sw_context, header);
-
 
1805
	if (unlikely(ret != 0))
-
 
1806
		goto out_invalid;
-
 
1807
 
-
 
1808
	return 0;
-
 
1809
out_invalid:
-
 
1810
	DRM_ERROR("Invalid SVGA3D command: %d\n",
-
 
1811
		  cmd_id + SVGA_3D_CMD_BASE);
-
 
1812
	return -EINVAL;
1116
	if (unlikely(ret != 0))
1813
out_privileged:
1117
		goto out_err;
1814
	DRM_ERROR("Privileged SVGA3D command: %d\n",
-
 
1815
		  cmd_id + SVGA_3D_CMD_BASE);
-
 
1816
	return -EPERM;
-
 
1817
out_old:
-
 
1818
	DRM_ERROR("Deprecated (disallowed) SVGA3D command: %d\n",
1118
 
1819
		  cmd_id + SVGA_3D_CMD_BASE);
1119
	return 0;
1820
	return -EINVAL;
1120
out_err:
1821
out_new:
Line 1121... Line 1822...
1121
	DRM_ERROR("Illegal / Invalid SVGA3D command: %d\n",
1822
	DRM_ERROR("SVGA3D command: %d not supported by virtual hardware.\n",
Line 1172... Line 1873...
1172
			reloc->location->gmrId = SVGA_GMR_FRAMEBUFFER;
1873
			reloc->location->gmrId = SVGA_GMR_FRAMEBUFFER;
1173
			break;
1874
			break;
1174
		case VMW_PL_GMR:
1875
		case VMW_PL_GMR:
1175
			reloc->location->gmrId = bo->mem.start;
1876
			reloc->location->gmrId = bo->mem.start;
1176
			break;
1877
			break;
-
 
1878
		case VMW_PL_MOB:
-
 
1879
			*reloc->mob_loc = bo->mem.start;
-
 
1880
			break;
1177
		default:
1881
		default:
1178
			BUG();
1882
			BUG();
1179
		}
1883
		}
1180
	}
1884
	}
1181
	vmw_free_relocations(sw_context);
1885
	vmw_free_relocations(sw_context);
Line 1196... Line 1900...
1196
	 */
1900
	 */
Line 1197... Line 1901...
1197
 
1901
 
1198
	list_for_each_entry_safe(val, val_next, list, head) {
1902
	list_for_each_entry_safe(val, val_next, list, head) {
1199
		list_del_init(&val->head);
1903
		list_del_init(&val->head);
-
 
1904
		vmw_resource_unreference(&val->res);
-
 
1905
		if (unlikely(val->staged_bindings))
1200
		vmw_resource_unreference(&val->res);
1906
			kfree(val->staged_bindings);
1201
		kfree(val);
1907
		kfree(val);
1202
	}
1908
	}
Line 1203... Line 1909...
1203
}
1909
}
Line 1222... Line 1928...
1222
	list_for_each_entry(val, &sw_context->resource_list, head)
1928
	list_for_each_entry(val, &sw_context->resource_list, head)
1223
		(void) drm_ht_remove_item(&sw_context->res_ht, &val->hash);
1929
		(void) drm_ht_remove_item(&sw_context->res_ht, &val->hash);
1224
}
1930
}
Line 1225... Line 1931...
1225
 
1931
 
1226
static int vmw_validate_single_buffer(struct vmw_private *dev_priv,
1932
static int vmw_validate_single_buffer(struct vmw_private *dev_priv,
-
 
1933
				      struct ttm_buffer_object *bo,
1227
				      struct ttm_buffer_object *bo)
1934
				      bool validate_as_mob)
1228
{
1935
{
Line 1229... Line 1936...
1229
	int ret;
1936
	int ret;
Line 1236... Line 1943...
1236
	if (bo == dev_priv->pinned_bo ||
1943
	if (bo == dev_priv->pinned_bo ||
1237
	    (bo == dev_priv->dummy_query_bo &&
1944
	    (bo == dev_priv->dummy_query_bo &&
1238
	     dev_priv->dummy_query_bo_pinned))
1945
	     dev_priv->dummy_query_bo_pinned))
1239
		return 0;
1946
		return 0;
Line -... Line 1947...
-
 
1947
 
-
 
1948
	if (validate_as_mob)
-
 
1949
		return ttm_bo_validate(bo, &vmw_mob_placement, true, false);
1240
 
1950
 
1241
	/**
1951
	/**
1242
	 * Put BO in VRAM if there is space, otherwise as a GMR.
1952
	 * Put BO in VRAM if there is space, otherwise as a GMR.
1243
	 * If there is no space in VRAM and GMR ids are all used up,
1953
	 * If there is no space in VRAM and GMR ids are all used up,
1244
	 * start evicting GMRs to make room. If the DMA buffer can't be
1954
	 * start evicting GMRs to make room. If the DMA buffer can't be
Line 1257... Line 1967...
1257
	DRM_INFO("Falling through to VRAM.\n");
1967
	DRM_INFO("Falling through to VRAM.\n");
1258
	ret = ttm_bo_validate(bo, &vmw_vram_placement, true, false);
1968
	ret = ttm_bo_validate(bo, &vmw_vram_placement, true, false);
1259
	return ret;
1969
	return ret;
1260
}
1970
}
Line 1261... Line -...
1261
 
-
 
1262
 
1971
 
1263
static int vmw_validate_buffers(struct vmw_private *dev_priv,
1972
static int vmw_validate_buffers(struct vmw_private *dev_priv,
1264
				struct vmw_sw_context *sw_context)
1973
				struct vmw_sw_context *sw_context)
1265
{
1974
{
1266
	struct vmw_validate_buffer *entry;
1975
	struct vmw_validate_buffer *entry;
Line 1267... Line 1976...
1267
	int ret;
1976
	int ret;
1268
 
1977
 
-
 
1978
	list_for_each_entry(entry, &sw_context->validate_nodes, base.head) {
1269
	list_for_each_entry(entry, &sw_context->validate_nodes, base.head) {
1979
		ret = vmw_validate_single_buffer(dev_priv, entry->base.bo,
1270
		ret = vmw_validate_single_buffer(dev_priv, entry->base.bo);
1980
						 entry->validate_as_mob);
1271
		if (unlikely(ret != 0))
1981
		if (unlikely(ret != 0))
1272
			return ret;
1982
			return ret;
1273
	}
1983
	}
Line 1290... Line 2000...
1290
	}
2000
	}
Line 1291... Line 2001...
1291
 
2001
 
1292
	if (sw_context->cmd_bounce != NULL)
2002
	if (sw_context->cmd_bounce != NULL)
Line 1293... Line 2003...
1293
		vfree(sw_context->cmd_bounce);
2003
		vfree(sw_context->cmd_bounce);
Line 1294... Line 2004...
1294
 
2004
 
1295
    sw_context->cmd_bounce = KernelAlloc(sw_context->cmd_bounce_size);
2005
	sw_context->cmd_bounce = vmalloc(sw_context->cmd_bounce_size);
1296
 
2006
 
1297
	if (sw_context->cmd_bounce == NULL) {
2007
	if (sw_context->cmd_bounce == NULL) {
Line 1508... Line 2218...
1508
 
2218
 
1509
		if (unlikely(ret != 0))
2219
		if (unlikely(ret != 0))
1510
			goto out_err;
2220
			goto out_err;
Line -... Line 2221...
-
 
2221
	}
-
 
2222
 
-
 
2223
	ret = mutex_lock_interruptible(&dev_priv->binding_mutex);
-
 
2224
	if (unlikely(ret != 0)) {
-
 
2225
		ret = -ERESTARTSYS;
-
 
2226
		goto out_err;
1511
	}
2227
	}
1512
 
2228
 
1513
	cmd = vmw_fifo_reserve(dev_priv, command_size);
2229
	cmd = vmw_fifo_reserve(dev_priv, command_size);
1514
	if (unlikely(cmd == NULL)) {
2230
	if (unlikely(cmd == NULL)) {
1515
		DRM_ERROR("Failed reserving fifo space for commands.\n");
2231
		DRM_ERROR("Failed reserving fifo space for commands.\n");
1516
		ret = -ENOMEM;
2232
		ret = -ENOMEM;
Line 1517... Line 2233...
1517
		goto out_err;
2233
		goto out_unlock_binding;
1518
	}
2234
	}
Line 1537... Line 2253...
1537
 
2253
 
1538
	if (ret != 0)
2254
	if (ret != 0)
Line 1539... Line 2255...
1539
		DRM_ERROR("Fence submission error. Syncing.\n");
2255
		DRM_ERROR("Fence submission error. Syncing.\n");
-
 
2256
 
-
 
2257
	vmw_resource_list_unreserve(&sw_context->resource_list, false);
1540
 
2258
	mutex_unlock(&dev_priv->binding_mutex);
1541
	vmw_resource_list_unreserve(&sw_context->resource_list, false);
2259
 
Line 1542... Line 2260...
1542
	ttm_eu_fence_buffer_objects(&ticket, &sw_context->validate_nodes,
2260
	ttm_eu_fence_buffer_objects(&ticket, &sw_context->validate_nodes,
1543
				    (void *) fence);
2261
				    (void *) fence);
Line 1567... Line 2285...
1567
	 */
2285
	 */
1568
	vmw_resource_list_unreference(&resource_list);
2286
	vmw_resource_list_unreference(&resource_list);
Line 1569... Line 2287...
1569
 
2287
 
Line -... Line 2288...
-
 
2288
	return 0;
-
 
2289
 
1570
	return 0;
2290
out_unlock_binding:
1571
 
2291
	mutex_unlock(&dev_priv->binding_mutex);
1572
out_err:
2292
out_err:
1573
	vmw_resource_relocations_free(&sw_context->res_relocations);
2293
	vmw_resource_relocations_free(&sw_context->res_relocations);
1574
	vmw_free_relocations(sw_context);
2294
	vmw_free_relocations(sw_context);