zjs_project 1 år sedan
förälder
incheckning
5ef12394f8
2 ändrade filer med 196 tillägg och 577 borttagningar
  1. 1 1
      src/pages/webgl_rxdz_vr/webgl_rxdz_vr.html
  2. 195 576
      src/pages/webgl_rxdz_vr/webgl_rxdz_vr.vue

+ 1 - 1
src/pages/webgl_rxdz_vr/webgl_rxdz_vr.html

@@ -25,5 +25,5 @@
 	  :spaceObj="curSpaceObj" ></viewMask> -->
 	<!-- 生成截屏的画布对象 -->
 	<canvas id="canvas" type="2d" :style="{'height':canvasHeight+'px','top':'100vh'}"></canvas>
-	<!-- <video id="video" style="display:none" autoplay playsinline src="https://dm.static.elab-plus.com/miniProgram/green.mp4"></video> -->
+	<!-- <video id="myvideo" loop muted autoplay width="304" height="540" webkit-playsinline="webkit-playsinline" playsinline="playsinline" src="https://dm.static.elab-plus.com/miniProgram/001.mp4"></video> -->
 </div>

+ 195 - 576
src/pages/webgl_rxdz_vr/webgl_rxdz_vr.vue

@@ -7,36 +7,22 @@
 	import {
 		OrbitControls
 	} from 'three/addons/controls/OrbitControls.js';
-	import {
-		GLTFLoader
-	} from 'three/addons/loaders/GLTFLoader.js';
-
-	import TWEEN from 'three/addons/libs/tween.module.js';
+	import { DragControls } from 'three/addons/controls/DragControls.js';
 	import {
 		getStorage
 	} from '@/utils/localStorage';
 	var requestId = "";
 	const util = require('@/utils/util.js').default;
-	// import util from '@/utils/util.js';
-	// const config = require('@/services/urlConfig.js');
-	// import requestConfig from '@/static/lib/requestConfig.js';
 	import viewShell from '@/components/newBottomCom/viewShell/viewShell.vue';
 	import viewMask from '@/components/newBottomCom/viewMask/viewMask.vue';
 
-	// import { RGBELoader } from '@/webgl/jsm/loaders/RGBELoader.js';
-	import screenshot from '@/mixins/screenshot.js';
-	import floorMethod from '@/mixins/floorMethod.js';
-	import loadModel from '@/mixins/loadModel.js';
-	import wallMethod from '@/mixins/wallMethod.js';
 	import commonPageMethod from '@/mixins/commonPageMethod.js';
 	// import commonPageMethod from '@/common/commonPageMethod.js';
-	// const app = getApp(); //获取应用实例
 	export default {
 		components: {
-			viewShell,
 			viewMask
 		},
-		mixins: [screenshot, loadModel, floorMethod, wallMethod, commonPageMethod],
+		mixins: [commonPageMethod],
 		/**
 		 * 页面的初始数据
 		 */
@@ -63,18 +49,8 @@
 					pageName: this.pvCurPageName,
 				},
 				id: '', // 户型编号
-				spaceList: [], // 空间列表
-				gltfSpaces: [], // 场景中地板模型数组
-				gltfSpaceRoofs: [],
-				curSpaceObj: null, // 当前选中的空间
-				// curSpaceIndex:-1, // 当前选中的空间索引
-				spaceId: null,
-				wallIds: [], // 空间墙体id
-				// wallList:[], // 墙体数据
-				gltfWalls: [], // 场景中墙体模型数组
 				loader: null,
 				scene: null,
-				// sky:null,
 				camera: null,
 				controlStarPosition: {
 					x: 0,
@@ -83,33 +59,15 @@
 				}, //控制器初始位置
 				cameraStarPosition: {
 					x: 0,
-					y: 20,
-					z: 0
+					y: 0,
+					z: 5
 				}, //摄像头初始位置
 				cameraLastPosition: null, //摄像头上一次移动到的位置
 				controlLastPosition: null, //观察点上一次移动到的位置
 				canvasHeight: 408, //canvas视图的高度-计算得出
-				chooseMesh: null, //标记鼠标拾取到的mesh
-				shottingImg: [],
-				progress: 1, //进度条
 				myLoadingStatus: false,
-				// textGeoList:[],
 				repeatFlag: false, //重复点击
-				// skyPlan: null, // 天空盒子
-				instancedMeshList: [],
-				screenshotResolve: null,
-				actors: [],
-				showDownView: true, //默认显示下载按钮
-				currentActor: null,
-				circleGroup: null, //圆形地标
 				isIOS: false,
-				defaulIndex: null, //默认视角的序号
-				// aiImagesList:[
-				// 	// "https://dm.static.elab-plus.com/miniProgram/plus_IM01.png",
-				// 	// "https://dm.static.elab-plus.com/miniProgram/plus_IM02.png",
-				// 	// "https://dm.static.elab-plus.com/miniProgram/plus_IM03.png",
-				// 	// "https://dm.static.elab-plus.com/miniProgram/plus_IM04.png",
-				// ]
 			}
 		},
 		beforeDestroy() {
@@ -144,16 +102,11 @@
 				this.renderer.context = null;
 				this.renderer.domElement = null;
 				this.renderer = null;;
-				this.clearHandle()
 			}
-			TWEEN && TWEEN.removeAll(); //清除所有的tween;
-			console.warn("***beforeDestroy-webgl_rxdz_roam***");
+			console.warn("***beforeDestroy-webgl_rxdz_vr***");
 		},
 		mounted(options) {
 			var that = this;
-			// alert("JavaScript 堆大小限制: "+performance.memory.jsHeapSizeLimit
-			// +"\n已使用的 JavaScript 堆大小: "+performance.memory.usedJSHeapSize
-			// +"\nJavaScript 堆的总大小: "+performance.memory.totalJSHeapSize);
 			console.warn("***webgl_rxdz_roam-options***", this.$route.query)
 			this.isIOS = !!navigator.userAgent.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/);
 
@@ -177,55 +130,37 @@
 			let camera = null,
 				renderer = null;
 			let needRender = false; //是否需要渲染 false表示不需要渲染;true 表示需要渲染
-			let loader = this.loader = new GLTFLoader();
 			let scene = this.scene = new THREE.Scene();
-			let raycaster = null;
-			let mouse = new THREE.Vector2();
-			let chooseMesh = this.chooseMesh; //标记鼠标拾取到的mesh
-			let isUserContorl = true; //是否进入漫游状态-默认是
-			//漫游时变量
-			let onPointerDownMouseX = 0,
-				onPointerDownMouseY = 0,
-				lon = 0;
-			let fingerCount = 0; //触摸时的手指数目
-			let startTime = 0; //非漫游时的移动变量
-			let tweenCameraAnma = false; //表示当前是否处在动画过程中
 			let controls = null,
 				boundary = null;
 			let stats;
+			let controls2 = null;
+			let videoMesh = null;
 			init();
-			// this.$refs.myLoading.showLoading("加载中...1%")
-			// this.myLoadingStatus = true;
-			this.clearEvent = clearEvent;
-			this.attendEvent = attendEvent;
-			this.tweenCameraAnmaChange = tweenCameraAnmaChange;
+			// this.clearEvent = clearEvent;
+			// this.attendEvent = attendEvent;
 			this.starRender = starRender; //对外暴露启动渲染的方法
 			this.stopRender = stopRender; //对外暴露停止渲染的方法
 
 			function init() {
-
-				// scene.background = new THREE.Color("#FFFFFF");
-				// scene.environment = new THREE.Color("#F2F2F2");
-				// 创建一个HDR贴图加载器
-				// const rgbeloader = new RGBELoader();
-				// // 加载HDR贴图
-				// rgbeloader.load('https://dm.static.elab-plus.com/miniProgram/environment.hdr', (texture) => {
-				// 	// 将HDR贴图设置为场景的环境贴图
-				// 	texture.mapping = THREE.EquirectangularReflectionMapping;
-				// 	scene.environment = texture;
-				// })
-
 				// 创建相机位置
-				camera = new THREE.PerspectiveCamera(90, screenWidth / that.canvasHeight, 0.1, 10000);
+				camera = new THREE.PerspectiveCamera(120, screenWidth / that.canvasHeight, 0.1, 10000);
 				// camera.up.set(0, 1, 0);//俯视状态,将相机的up向量设置为z轴负方向 {x:0,y:1,z:0}
 				camera.position.set(that.cameraStarPosition.x, that.cameraStarPosition.y, that.cameraStarPosition.z);
 				scene.add(camera);
 				that.camera = camera;
 
 				// 环境光会均匀的照亮场景中的所有物体
-				const ambientLight = new THREE.AmbientLight(0xFFEFE0, 3.5);
-				scene.add(ambientLight);
-
+				// const ambientLight = new THREE.AmbientLight(0xFFEFE0, 3.5);
+				// scene.add(ambientLight);
+				// 辅助方格
+				const axesHelper = new THREE.AxesHelper( 50 );
+				scene.add( axesHelper );
+				const gridHelper = new THREE.GridHelper(50, 10, 0xcccccc, 0xcccccc);
+				gridHelper.position.y = 0;
+				gridHelper.position.x = 0;
+				scene.add(gridHelper);
+				//加载环境720贴图
 				const loader = new THREE.TextureLoader();
 				const texture = loader.load(
 					'https://dm.static.elab-plus.com/miniProgram/tears_of_steel_bridge_2k.jpg', () => {
@@ -237,19 +172,7 @@
 				renderer = that.renderer = new THREE.WebGLRenderer({
 					canvas: canvas3d,
 					alpha: true,
-					antialias: true,
-					preserveDrawingBuffer: true,
 				});
-				if (!that.isIOS) {
-					renderer.shadowMap.enabled = true; //产生阴影
-					renderer.shadowMap.type = THREE.PCFSoftShadowMap; // 阴影属性
-				}
-				renderer.outputEncoding = THREE.sRGBEncoding;
-				renderer.outputColorSpace = THREE.SRGBColorSpace;
-				// renderer.toneMappingExposure = 0.1;//色调映射的曝光级别。默认是1
-				renderer.toneMapping = THREE.NoToneMapping; //色调映射
-				renderer.physicallyCorrectLights = true; //关键参数,模拟物理光照影响,必须设置为true
-
 				renderer.setPixelRatio(window.devicePixelRatio);
 				renderer.setSize(screenWidth, that.canvasHeight);
 				container.appendChild(renderer.domElement);
@@ -257,17 +180,13 @@
 				controls = new OrbitControls(camera, renderer.domElement);
 				controls.enableDamping = true; //启动缓动
 				controls.minDistance = 0.0001;
-				controls.maxDistance = 10;
+				controls.maxDistance = 30;
 				controls.minPolarAngle = 0; // 默认0
 				controls.maxPolarAngle = Math.PI / 2; // 默认Math.PI,即可以向下旋转到的视角。
-				// controls.target.set(  that.controlStarPosition.x, that.controlStarPosition.y, that.controlStarPosition.z);
 				controls.enableZoom = true; //启用摄像机的缩放
 				controls.enablePan = true; //禁用摄像机平移
 				controls.enableRotate = true; //启用摄像机水平或垂直旋转
-				// controls.zoomToCursor = true;
 
-				// controls.target.copy(camera.position);
-				// controls.update();
 				// 监听相机移动事件-限制只能在当前空间范围内移动
 				controls.addEventListener('change', () => {
 					// 检查相机位置是否超出边界框
@@ -276,17 +195,10 @@
 						boundary.clampPoint(camera.position, clampedPosition);
 						if (clampedPosition) {
 							camera.position.copy(clampedPosition);
-							// controls.target.copy(clampedPosition);
 						}
 					}
 				});
-				// controls.target = new THREE.Vector3( );;
 				camera.lookAt(that.controlStarPosition.x, that.controlStarPosition.y, that.controlStarPosition.z);
-				raycaster = new THREE.Raycaster();
-				// stats = new Stats();
-				// container.appendChild(stats.dom);
-				// stats.domElement.style.top = '100px';
-				attendEvent(); //注册监听事件
 				starRender(); //启动渲染
 				setTimeout(() => {
 					videoHandle()
@@ -294,404 +206,200 @@
 			}
 
 			function videoHandle() {
-				// let video = document.getElementById('video');
-
-				// let texture = new THREE.pointTexture(video);
-				// texture.colorSpace = THREE.SRGBColorSpace;
-
 				// 创建视频元素
 				var video = document.createElement('video');
-				video.src = 'https://dm.static.elab-plus.com/miniProgram/001.mp4'; // 视频文件的路径
-				// video.crossOrigin = "anonymous";this.crossOrigin = 'anonymous';
+				video.src = 'https://dm.static.elab-plus.com/miniProgram/005.mp4'; // 视频文件的路径
 				video.setAttribute("crossOrigin", "Anonymous");
 				video.loop = true;
 				video.muted = true;
-				// video.load();
 				video.play();
+				
+				console.warn("***video***",video,video.width);
 				// 创建视频纹理
-				var texture = new THREE.VideoTexture(video);
-				// texture.colorSpace = THREE.SRGBColorSpace;
-				// texture.minFilter = THREE.LinearFilter;
-				// texture.magFilter = THREE.LinearFilter;
-				// texture.format = THREE.RGBFormat;
-
+				var videoTexture = new THREE.VideoTexture(video);
+				// videoTexture.minFilter = THREE.LinearFilter;
+				// videoTexture.format = THREE.RGBFormat;
+
+				var keyColorObject = new THREE.Color("#00ff05");
+				var myuniforms = {
+				    pointTexture: {
+				        type: "t",
+				        value: videoTexture
+				    },
+				    color: {
+				        type: "c",
+				        value: keyColorObject
+				    },
+				    videowidth: {
+				        type: "f",
+				        value: 501.0
+				    },
+				    videoheight: {
+				        type: "f",
+				        value: 1024.0
+				    },
+				    filterType: {
+				        type: "i",
+				        value: 0
+				    },
+				    lightLevel: {
+				        type: "f",
+				        value: 0.2
+				    },
+				    gridSize: {
+				        type: "f",
+				        value: 0.8
+				    }
+				};
 				// 创建绿幕着色器
 				let greenScreenShader = {
-					uniforms: {
-						pointTexture: {
-							value: texture
-						},
-					},
+					uniforms: myuniforms,
+					transparent: true,
 					vertexShader: `    
 						varying vec2 vUv;    
-						void main() {      
-							vUv = uv;      
-							gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);    
+						void main() {
+							vUv = uv;
+							vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
+							gl_Position = projectionMatrix * mvPosition;
 						}  `,
-					// fragmentShader: `
-					// 	uniform sampler2D pointTexture;
-					// 		uniform float threshold;
-					// 		varying vec2 vUv;
-					// 		void main() {
-					// 			vec4 color = texture2D(pointTexture, vUv);
-					// 			float greenScreen = color.g - max(color.r, color.b);
-					// 			float alpha = 1.0 - smoothstep(threshold - 0.05, threshold + 0.05, greenScreen);
-					// 			gl_FragColor = vec4(color.rgb, alpha);
-					// 		}
-					// `,
-					// fragmentShader: `
-					// 	precision mediump float;
-					// 	uniform sampler2D pointTexture;
-					// 	uniform sampler2D colorlut;
-					// 	uniform vec3 color;
-					// 	uniform float videowidth;
-					// 	uniform float videoheight;
-					// 	uniform int filterType;
-					// 	uniform float gridSize;
-					// 	uniform float lightLevel;
-						
-					// 	varying vec2 vUv;
-					// 	float alter=3.0;
-					// 	float u_mode=0.0;
-					// 	float u_threshold=1.0;
-					// 	float u_clipBlack=0.5;
-					// 	float u_clipWhite=1.0;
-						
-					// 	float rgb2cb(float r, float g, float b){
-					// 		return 0.5 + -0.168736*r - 0.331264*g + 0.5*b;
-					// 	}
-					// 	float rgb2cr(float r, float g, float b){
-					// 		return 0.5 + 0.5*r - 0.418688*g - 0.081312*b;
-					// 	}
-					// 	float smoothclip(float low, float high, float x){
-					// 		if (x <= low){
-					// 			return 0.0;
-					// 		}
-					// 		if(x >= high){
-					// 			return 1.0;
-					// 		}
-					// 		return (x-low)/(high-low);
-					// 	}
-					// 	vec4 greenscreen(vec4 colora, float Cb_key,float Cr_key, float tola,float tolb, float clipBlack, float clipWhite){
-					// 		float cb = rgb2cb(colora.r,colora.g,colora.b);
-					// 		float cr = rgb2cr(colora.r,colora.g,colora.b);
-					// 		float alpha = distance(vec2(cb, cr), vec2(Cb_key, Cr_key));
-					// 		alpha = smoothclip(tola, tolb, alpha);
-					// 		float r = max(gl_FragColor.r - (1.0-alpha)*color.r, 0.0);
-					// 		float g = max(gl_FragColor.g - (1.0-alpha)*color.g, 0.0);
-					// 		float b = max(gl_FragColor.b - (1.0-alpha)*color.b, 0.0);
-					// 		if(alpha < clipBlack){
-					// 			alpha = r = g = b = 0.0;
-					// 		}
-					// 		if(alpha > clipWhite){
-					// 			alpha = 1.0;
-					// 		}
-					// 		if(clipWhite < 1.0){
-					// 			alpha = alpha/max(clipWhite, 0.9);
-					// 		}
-					// 		return vec4(r,g,b, alpha);
-					// 	}
-						
-					// 	void main()
-					// 	{
-					// 		gl_FragColor = texture2D( pointTexture, vUv );
-					// 		float tola = 0.0;
-					// 		float tolb = u_threshold/2.0;
-					// 		float cb_key = rgb2cb(color.r, color.g, color.b);
-					// 		float cr_key = rgb2cr(color.r, color.g, color.b);
-					// 		gl_FragColor = greenscreen(gl_FragColor, cb_key, cr_key, tola, tolb, u_clipBlack, u_clipWhite);
-					// 		if(u_mode > 0.5 && u_mode < 1.5){
-					// 				gl_FragColor = mix(vec4(1.0, 1.0, 1.0, 1.0), gl_FragColor, gl_FragColor.a);
-					// 				gl_FragColor.a = 1.0;
-					// 		}
-					// 		if(u_mode > 1.5 && u_mode < 2.5){
-					// 				gl_FragColor = vec4(gl_FragColor.a, gl_FragColor.a, gl_FragColor.a, 1.0);
-					// 		}
-					// 		if(filterType==1){
-					// 			float gray = 0.2989*gl_FragColor.r+0.5870*gl_FragColor.g+0.1140*gl_FragColor.b;
-					// 			gl_FragColor = vec4(gray,gray,gray , gl_FragColor.a);
-					// 		}else if(filterType==2){
-					// 			vec3 tColor2 = texture2D( pointTexture, vec2(vUv[0]+(1.0/videowidth) , vUv[1]) ).rgb;
-					// 			vec3 tColor3 = texture2D( pointTexture, vec2(vUv[0]-(1.0/videowidth) , vUv[1]) ).rgb;
-					// 			vec3 tColor4 = texture2D( pointTexture, vec2(vUv[0]+(1.0/videowidth) , vUv[1]+(1.0/videoheight)) ).rgb;
-					// 			vec3 tColor5 = texture2D( pointTexture, vec2(vUv[0]-(1.0/videowidth) , vUv[1]-(1.0/videoheight)) ).rgb;
-					// 			vec3 tColor6 = texture2D( pointTexture, vec2(vUv[0]+(1.0/videowidth) , vUv[1]-(1.0/videoheight)) ).rgb;
-					// 			vec3 tColor7 = texture2D( pointTexture, vec2(vUv[0]-(1.0/videowidth) , vUv[1]+(1.0/videoheight)) ).rgb;
-					// 			vec3 tColor8 = texture2D( pointTexture, vec2(vUv[0] , vUv[1]+(1.0/videoheight)) ).rgb;
-					// 			vec3 tColor9 = texture2D( pointTexture, vec2(vUv[0] , vUv[1]+(1.0/videoheight)) ).rgb;
-					// 			vec3 tColor10 = texture2D( pointTexture, vec2(vUv[0]+(2.0/videowidth) , vUv[1]) ).rgb;
-					// 			vec3 tColor11 = texture2D( pointTexture, vec2(vUv[0]+(2.0/videowidth) , vUv[1]) ).rgb;
-					// 			gl_FragColor = vec4( (gl_FragColor.r+tColor2[0]+tColor3[0]+tColor4[0]+tColor5[0]+tColor6[0]+tColor7[0]+tColor8[0]+tColor9[0]+tColor10[0]+tColor11[0])/11.0,
-					// 			(gl_FragColor.g+tColor2[1]+tColor3[1]+tColor4[1]+tColor5[1]+tColor6[1]+tColor7[1]+tColor8[1]+tColor9[1]+tColor10[1]+tColor11[1])/11.0,
-					// 			(gl_FragColor.b+tColor2[2]+tColor3[2]+tColor4[2]+tColor5[2]+tColor6[2]+tColor7[2]+tColor8[2]+tColor9[2]+tColor10[2]+tColor11[2])/11.0,
-					// 			gl_FragColor.a);
-					// 		}else if(filterType==3){
-					// 			float brightr=gl_FragColor.r+lightLevel;
-					// 			float brightg=gl_FragColor.g+lightLevel;
-					// 			float brightb=gl_FragColor.b+lightLevel;
-					// 			gl_FragColor = vec4(brightr,brightg,brightb , gl_FragColor.a);
-					// 		}else if(filterType==4){
-					// 			float reverser=1.0 - gl_FragColor.r;
-					// 			float reverseg=1.0 - gl_FragColor.g;
-					// 			float reverseb=1.0 - gl_FragColor.b;
-					// 			gl_FragColor = vec4(reverser,reverseg,reverseb,gl_FragColor.a);
-					// 		}else if(filterType==5){
-								
-					// 			float dx = fract(sin(dot(vUv ,vec2(12.9898,78.233))) * 43758.5453);
-					// 			vec3 cResult = gl_FragColor.rgb + gl_FragColor.rgb * clamp( 0.1 + dx, 0.0, 1.0 );
-					// 			vec2 sc = vec2( sin( vUv.y * 4096.0 ), cos( vUv.y * 4096.0 ) );
-					// 			cResult += gl_FragColor.rgb * vec3( sc.x, sc.y, sc.x ) * 0.025;
-					// 			cResult = gl_FragColor.rgb + clamp( 0.35, 0.0,1.0 ) * ( cResult - gl_FragColor.rgb );
-					// 			if( false ) {
-					// 				cResult = vec3( cResult.r * 0.3 + cResult.g * 0.59 + cResult.b * 0.11 );
-					// 			}
-					// 			float oldr=0.393*cResult[0]+0.769*cResult[1]+0.189*cResult[2];
-					// 			float oldg=0.349*cResult[0]+0.686*cResult[1]+0.168*cResult[2];
-					// 			float oldb=0.272*cResult[0]+0.534*cResult[1]+0.131*cResult[2];
-					// 			gl_FragColor =  vec4( oldr,oldg,oldb , gl_FragColor.a);
-					// 		}else if(filterType==6){
-					// 			float average = ( gl_FragColor.r + gl_FragColor.g + gl_FragColor.b ) / 2.0;
-					// 			float s = sin( 0.5 ), c = cos( 0.5 );
-					// 			vec2 tex = vUv * vec2(videowidth,videoheight) - vec2(0,0);
-					// 			vec2 point = vec2( c * tex.x - s * tex.y, s * tex.x + c * tex.y ) * gridSize;
-					// 			float pattern =  ( sin( point.x ) * sin( point.y ) ) * 4.0;
-					// 			float seed = average * 10.0 - 5.0 + pattern ;
-					// 			gl_FragColor = vec4(  seed*0.3+gl_FragColor.r*0.7,seed*0.3+gl_FragColor.g*0.7 ,seed*0.3+gl_FragColor.b*0.7, gl_FragColor.a );
-					// 		}
-					// 	}
-					// `,
 					fragmentShader: `
+						precision mediump float;
 						uniform sampler2D pointTexture;
+						uniform sampler2D colorlut;
+						uniform vec3 color;
+						uniform float videowidth;
+						uniform float videoheight;
+						uniform int filterType;
+						uniform float gridSize;
+						uniform float lightLevel;
+						
 						varying vec2 vUv;
-						void main() {
-						    vec4 color = texture2D(pointTexture, vUv);
-						    float threshold = 0.5;
-						    if (color.g > threshold) {
-								discard; 
-						    } else {
-								gl_FragColor = color;
-						    }
+						float alter=3.0;
+						float u_mode=0.0;
+						float u_threshold=1.0;
+						float u_clipBlack=0.9;
+						float u_clipWhite=1.0;
+						
+						float rgb2cb(float r, float g, float b){
+							return 0.5 + -0.168736*r - 0.331264*g + 0.5*b;
+						}
+						float rgb2cr(float r, float g, float b){
+							return 0.5 + 0.5*r - 0.418688*g - 0.081312*b;
+						}
+						float smoothclip(float low, float high, float x){
+							if (x <= low){
+								return 0.0;
+							}
+							if(x >= high){
+								return 1.0;
+							}
+							return (x-low)/(high-low);
+						}
+						vec4 greenscreen(vec4 colora, float Cb_key,float Cr_key, float tola,float tolb, float clipBlack, float clipWhite){
+							float cb = rgb2cb(colora.r,colora.g,colora.b);
+							float cr = rgb2cr(colora.r,colora.g,colora.b);
+							float alpha = distance(vec2(cb, cr), vec2(Cb_key, Cr_key));
+							alpha = smoothclip(tola, tolb, alpha);
+							float r = max(gl_FragColor.r - (1.0-alpha)*color.r, 0.0);
+							float g = max(gl_FragColor.g - (1.0-alpha)*color.g, 0.0);
+							float b = max(gl_FragColor.b - (1.0-alpha)*color.b, 0.0);
+							if(alpha < clipBlack){
+								alpha = r = g = b = 0.0;
+							}
+							if(alpha > clipWhite){
+								alpha = 1.0;
+							}
+							if(clipWhite < 1.0){
+								alpha = alpha/max(clipWhite, 0.9);
+							}
+							return vec4(r,g,b, alpha);
+						}
+						
+						void main()
+						{
+							gl_FragColor = texture2D( pointTexture, vUv );
+							float tola = 0.0;
+							float tolb = u_threshold/2.0;
+							float cb_key = rgb2cb(color.r, color.g, color.b);
+							float cr_key = rgb2cr(color.r, color.g, color.b);
+							gl_FragColor = greenscreen(gl_FragColor, cb_key, cr_key, tola, tolb, u_clipBlack, u_clipWhite);
+							if(u_mode > 0.5 && u_mode < 1.5){
+								gl_FragColor = mix(vec4(1.0, 1.0, 1.0, 1.0), gl_FragColor, gl_FragColor.a);
+								gl_FragColor.a = 1.0;
+							}
+							if(u_mode > 1.5 && u_mode < 2.5){
+								gl_FragColor = vec4(gl_FragColor.a, gl_FragColor.a, gl_FragColor.a, 1.0);
+							}
+							if(filterType==1){
+								float gray = 0.2989*gl_FragColor.r+0.5870*gl_FragColor.g+0.1140*gl_FragColor.b;
+								gl_FragColor = vec4(gray,gray,gray , gl_FragColor.a);
+							}else if(filterType==2){
+								vec3 tColor2 = texture2D( pointTexture, vec2(vUv[0]+(1.0/videowidth) , vUv[1]) ).rgb;
+								vec3 tColor3 = texture2D( pointTexture, vec2(vUv[0]-(1.0/videowidth) , vUv[1]) ).rgb;
+								vec3 tColor4 = texture2D( pointTexture, vec2(vUv[0]+(1.0/videowidth) , vUv[1]+(1.0/videoheight)) ).rgb;
+								vec3 tColor5 = texture2D( pointTexture, vec2(vUv[0]-(1.0/videowidth) , vUv[1]-(1.0/videoheight)) ).rgb;
+								vec3 tColor6 = texture2D( pointTexture, vec2(vUv[0]+(1.0/videowidth) , vUv[1]-(1.0/videoheight)) ).rgb;
+								vec3 tColor7 = texture2D( pointTexture, vec2(vUv[0]-(1.0/videowidth) , vUv[1]+(1.0/videoheight)) ).rgb;
+								vec3 tColor8 = texture2D( pointTexture, vec2(vUv[0] , vUv[1]+(1.0/videoheight)) ).rgb;
+								vec3 tColor9 = texture2D( pointTexture, vec2(vUv[0] , vUv[1]+(1.0/videoheight)) ).rgb;
+								vec3 tColor10 = texture2D( pointTexture, vec2(vUv[0]+(2.0/videowidth) , vUv[1]) ).rgb;
+								vec3 tColor11 = texture2D( pointTexture, vec2(vUv[0]+(2.0/videowidth) , vUv[1]) ).rgb;
+								gl_FragColor = vec4( (gl_FragColor.r+tColor2[0]+tColor3[0]+tColor4[0]+tColor5[0]+tColor6[0]+tColor7[0]+tColor8[0]+tColor9[0]+tColor10[0]+tColor11[0])/11.0,
+								(gl_FragColor.g+tColor2[1]+tColor3[1]+tColor4[1]+tColor5[1]+tColor6[1]+tColor7[1]+tColor8[1]+tColor9[1]+tColor10[1]+tColor11[1])/11.0,
+								(gl_FragColor.b+tColor2[2]+tColor3[2]+tColor4[2]+tColor5[2]+tColor6[2]+tColor7[2]+tColor8[2]+tColor9[2]+tColor10[2]+tColor11[2])/11.0,
+								gl_FragColor.a);
+							}else if(filterType==3){
+								float brightr=gl_FragColor.r+lightLevel;
+								float brightg=gl_FragColor.g+lightLevel;
+								float brightb=gl_FragColor.b+lightLevel;
+								gl_FragColor = vec4(brightr,brightg,brightb , gl_FragColor.a);
+							}else if(filterType==4){
+								float reverser=1.0 - gl_FragColor.r;
+								float reverseg=1.0 - gl_FragColor.g;
+								float reverseb=1.0 - gl_FragColor.b;
+								gl_FragColor = vec4(reverser,reverseg,reverseb,gl_FragColor.a);
+							}else if(filterType==5){
+								float dx = fract(sin(dot(vUv ,vec2(12.9898,78.233))) * 43758.5453);
+								vec3 cResult = gl_FragColor.rgb + gl_FragColor.rgb * clamp( 0.1 + dx, 0.0, 1.0 );
+								vec2 sc = vec2( sin( vUv.y * 4096.0 ), cos( vUv.y * 4096.0 ) );
+								cResult += gl_FragColor.rgb * vec3( sc.x, sc.y, sc.x ) * 0.025;
+								cResult = gl_FragColor.rgb + clamp( 0.35, 0.0,1.0 ) * ( cResult - gl_FragColor.rgb );
+								if( false ) {
+									cResult = vec3( cResult.r * 0.3 + cResult.g * 0.59 + cResult.b * 0.11 );
+								}
+								float oldr=0.393*cResult[0]+0.769*cResult[1]+0.189*cResult[2];
+								float oldg=0.349*cResult[0]+0.686*cResult[1]+0.168*cResult[2];
+								float oldb=0.272*cResult[0]+0.534*cResult[1]+0.131*cResult[2];
+								gl_FragColor =  vec4( oldr,oldg,oldb , gl_FragColor.a);
+							}else if(filterType==6){
+								float average = ( gl_FragColor.r + gl_FragColor.g + gl_FragColor.b ) / 2.0;
+								float s = sin( 0.5 ), c = cos( 0.5 );
+								vec2 tex = vUv * vec2(videowidth,videoheight) - vec2(0,0);
+								vec2 point = vec2( c * tex.x - s * tex.y, s * tex.x + c * tex.y ) * gridSize;
+								float pattern =  ( sin( point.x ) * sin( point.y ) ) * 4.0;
+								float seed = average * 10.0 - 5.0 + pattern ;
+								gl_FragColor = vec4(  seed*0.3+gl_FragColor.r*0.7,seed*0.3+gl_FragColor.g*0.7 ,seed*0.3+gl_FragColor.b*0.7, gl_FragColor.a );
+							}
 						}
 					`,
-
-							// fragmentShader: `    
-							// 	uniform sampler2D pointTexture;
-							// 	uniform vec3 backColor;
-							// 	varying vec2 vUv;
-							// 	uniform float u_threshold;
-							// 	float u_clipBlack = 0.2;
-							// 	float u_clipWhite = 4.0;
-
-							// 	float rgb2cb(float r, float g, float b){ return 0.5 + -0.168736*r - 0.331264*g + 0.5*b; } 
-							// 	float rgb2cr(float r, float g, float b){ return 0.5 + 0.5*r - 0.418688*g - 0.081312*b; } 
-							// 	float smoothclip(float low, float high, float x){ if (x <= low){ return 0.0; } if(x >= high){ return 1.0; } return (x-low)/(high-low); }
-							// 	vec4 greenscreen(vec4 colora, float Cb_key,float Cr_key, float tola,float tolb, float clipBlack, float clipWhite)
-							// 	{ 
-							// 		float cb = rgb2cb(colora.r,colora.g,colora.b); 
-							// 		float cr = rgb2cr(colora.r,colora.g,colora.b); 
-							// 		float alpha = distance(vec2(cb, cr), vec2(Cb_key, Cr_key)); 
-							// 		alpha = smoothclip(tola, tolb, alpha); 
-							// 		float r = max(gl_FragColor.r - (1.0-alpha)*backColor.r, 0.0); 
-							// 		float g = max(gl_FragColor.g - (1.0-alpha)*backColor.g, 0.0); 
-							// 		float b = max(gl_FragColor.b - (1.0-alpha)*backColor.b, 0.0); 
-							// 		if(alpha < clipBlack){ alpha = r = g = b = 0.0; } 
-							// 		if(alpha > clipWhite){ alpha = 1.0; } 
-							// 		if(clipWhite < 1.0){ alpha = alpha/max(clipWhite, 0.9); } 
-							// 		return vec4(r,g,b, alpha); 
-							// 	}
-							// 	void main( void ) {
-
-							// 		gl_FragColor = vec4(texture2D(pointTexture, vUv).rgb, 1);
-
-							// 		float tola = 0.0; 
-							// 		float tolb = u_threshold/2.0; 
-							// 		float cb_key = rgb2cb(backColor.r, backColor.g, backColor.b); 
-							// 		float cr_key = rgb2cr(backColor.r, backColor.g, backColor.b); 
-							// 		gl_FragColor = greenscreen(gl_FragColor, cb_key, cr_key, tola, tolb, u_clipBlack, u_clipWhite);
-
-							// 	} 
-							// 	`,
-
 				};
 				// 创建绿幕材质
 				let greenScreenMaterial = new THREE.ShaderMaterial(greenScreenShader);
-
-				let geometry = new THREE.PlaneGeometry(16, 9);
-				// var geometry = new THREE.BoxGeometry(9, 9, 9);
-				// geometry.scale(0.5, 0.5, 0.5);
+				greenScreenMaterial.side = THREE.DoubleSide;
+				let geometry = new THREE.PlaneGeometry(9, 16);
 				// 应用绿幕材质到对象上
 				let mesh = new THREE.Mesh(geometry, greenScreenMaterial);
-				mesh.position.set(0, 0, 0);
-				// mesh.rotation.x =  -Math.PI / 2 ;  // 旋转 180 度
-				// scene.add(object);
-				// const mesh = new THREE.Mesh(geometry, material);
-				// mesh.position.setFromSphericalCoords(radius, phi, theta);
-				mesh.lookAt(camera.position);
+				mesh.position.set(0, 0, -10);//设置视频位置
+				// mesh.lookAt(camera.position);
 				scene.add(mesh);
-			}
-
-			function tweenCameraAnmaChange(value) {
-				tweenCameraAnma = value
-			}
-
-			function attendEvent() {
-				renderer.domElement.addEventListener('touchstart', onPointerStart, false);
-				renderer.domElement.addEventListener('touchmove', onPointerMove, false);
-				renderer.domElement.addEventListener('touchend', onPointerUp, false);
-			}
-			//取消事件监听-避免二次进入时触发多次事件
-			function clearEvent() {
-				console.warn("**clearEvent****")
-				renderer && renderer.domElement && renderer.domElement.removeEventListener('touchstart', onPointerStart);
-				renderer && renderer.domElement && renderer.domElement.removeEventListener('touchmove', onPointerMove);
-				renderer && renderer.domElement && renderer.domElement.removeEventListener('touchend', onPointerUp);
-			}
-			// 手指移动开始
-			function onPointerStart(event) {
-				startTime = (new Date()).getTime();
-				fingerCount = event.touches.length; //手指个数
-				console.log('开始触摸事件:', lon, fingerCount, camera.position.y)
-				if (fingerCount === 1) {
-					// 只有一个手指时记录当前点的坐标作为平移起始点
-					onPointerDownMouseX = event.changedTouches[0].clientX;
-					onPointerDownMouseY = event.changedTouches[0].clientY;
-				}
-
-			}
-			//持续触摸中
-			function onPointerMove(event) {
-				fingerCount = event.touches.length; //手指个数
-			}
-			//触摸结束
-			function onPointerUp(event) {
-				fingerCount = event.touches.length; //手指个数
-				console.warn("***触摸结束***", fingerCount, startTime)
-				if (fingerCount == 0) {
-					let now = new Date().getTime()
-					if (Math.abs(event.changedTouches[0].clientX - onPointerDownMouseX) < 10 &&
-						Math.abs(event.changedTouches[0].clientY - onPointerDownMouseY) < 10 &&
-						(now - startTime) < 300) {
-						checkIntersection(event);
-					}
-				}
-			}
-			//射线检测handle
-			function checkIntersection(event) {
-				let x = (event.changedTouches[0].clientX / screenWidth) * 2 - 1;
-				let y = -(event.changedTouches[0].clientY / that.canvasHeight) * 2 + 1;
-				mouse.x = x;
-				mouse.y = y;
-				//更新射线
-				raycaster.setFromCamera(mouse, camera);
-				let intersects = raycaster.intersectObjects(scene.children, true);
-				console.warn("***checkIntersection***", intersects.length)
-				if (intersects.length > 0) {
-					//找到最近的那个网格模型物体
-					let mesh = intersects.find((it) => {
-						if (it.object && it.object.isMesh == true &&
-							it.object.parent && it.object.parent.name == 'actor' &&
-							it.object.parent.visible == true) {
-							return true;
-						}
-					});
-
-					//拾取到了视角,就不继续拾取了
-					if (mesh) {
-						moveActor(mesh.object.parent);
-						return false;
-					}
-					mesh = intersects.find((it) => {
-						if (it.object && it.object.isInstancedMesh &&
-							(it.object.name == '地板' || it.object.name == '花园') && it.object.visible == true) {
-							return true;
-						}
-					});
-					//拾取到了地板
-					if (mesh) {
-						let floor = mesh.object;
-						let index = mesh.instanceId; //射线相交是的实例序号
-						let spaceId = that.gltfSpaces[index].spaceId; //获取选中实例的空间id
-						if (floor.name == "花园") { //花园
-							return false;
-							let selectMesh = that.gltfSpaces.find(it => {
-								return it.spaceType == 14 && it.instancedMeshIndexList[0].instancedAtIndex == index
-							})
-							spaceId = selectMesh.spaceId;
-						} else { //室内
-							// floor.name = "地板";
-							let selectMesh = that.gltfSpaces.find(it => {
-								return it.spaceType != 14 && it.instancedMeshIndexList[0].instancedAtIndex == index
-							})
-							spaceId = selectMesh.spaceId;
-						}
-						// let spaceId = that.gltfSpaces[index].spaceId;//获取选中实例的空间id
-						console.warn("***checkIntersection-地板***", floor, index, spaceId, that.spaceId)
-						//当前拾取到的是本空间的底部-意味着用户点击了地板
-						if (floor && spaceId == that.spaceId) {
-							moveCarmer(mesh.point);
-							return false;
-						}
-					}
-				}
-			}
-
-			function tweenCamera(oldP, oldT, newP, newT, oldUp, newUp, time = 1000) {
-				if (JSON.stringify(oldP) == JSON.stringify(newP) && JSON.stringify(oldT) == JSON.stringify(newT)) {
-					that.repeatFlag = false; //放开限制,可以再次点击
-					return false;
-				}
-				if (!chooseMesh) {
-					that.repeatFlag = false; //放开限制,可以再次点击
-					return false;
-				}
-				tweenCameraAnma = true;
-				var tween = new TWEEN.Tween({
-						x1: oldP.x, // 相机x
-						y1: oldP.y, // 相机y
-						z1: oldP.z, // 相机z
-						x2: oldT.x, // 控制点的中心点x
-						y2: oldT.y, // 控制点的中心点y
-						z2: oldT.z, // 控制点的中心点z
-						x3: oldUp.x, // 控制点的中心点x
-						y3: oldUp.y, // 控制点的中心点y
-						z3: oldUp.z // 控制点的中心点z
-					})
-					.to({
-						x1: newP.x,
-						y1: newP.y,
-						z1: newP.z,
-						x2: newT.x,
-						y2: newT.y,
-						z2: newT.z,
-						x3: newUp.x, // up向量
-						y3: newUp.y, // 控制点的中心点y
-						z3: newUp.z // 控制点的中心点z
-					}, time)
-					.easing(TWEEN.Easing.Quadratic.InOut)
-					.onUpdate((object) => {
-						camera.position.x = object.x1;
-						camera.position.y = object.y1;
-						camera.position.z = object.z1;
-						// let newTarget = new THREE.Vector3(object.x3,object.y3,object.z3);
-						// camera.up.copy(newTarget);
-						camera.lookAt(object.x2, object.y2, object.z2);
-						// controls.target.x = object.x2;
-						// controls.target.y = object.y2;
-						// controls.target.z = object.z2;
-						// controls.update();
-					}).onComplete(() => {
-						controls.target.x = newT.x;
-						controls.target.y = newT.y;
-						controls.target.z = newT.z;
-						//修正最后的视角
-						// let up = new THREE.Vector3(newUp.x,newUp.y,newUp.z);
-						// camera.up.copy(up);
-						camera.lookAt(newT.x, newT.y, newT.z);
-						tweenCameraAnma = false;
-						that.repeatFlag = false; //放开限制,可以再次点击
-					})
-				// 开始动画
-				tween.start();
+				videoMesh = mesh;
+				controls2 = new DragControls([mesh], camera, renderer.domElement );
+				controls2.addEventListener('dragstart', function ( event ) {
+					controls.enabled = false;
+				});
+				controls2.addEventListener('dragend', function ( event ) {
+					controls.enabled = true;
+				});
 			}
 
 			function stopRender() {
@@ -710,26 +418,15 @@
 				if (needRender == false) {
 					return false;
 				}
-				TWEEN && TWEEN.update();
 				// stats.update();
+				if(videoMesh){
+					videoMesh.lookAt(camera.position);
+				}
 				renderer.render(scene, camera); //单次渲染
 				requestId = requestAnimationFrame(render, canvas3d);
-				if (that.screenshotResolve) {
-					stopRender();
-					that.screenshotResolve()
-					that.screenshotResolve = null; //释放Promise
-				}
 			}
 
 		},
-		// computed: {
-		// 	curHouseObj() {
-		// 		return this.$store.state.curHouseObj;
-		// 	},
-		// 	wallList() {
-		// 		return this.$store.state.wallList;
-		// 	},
-		// },
 		methods: {
 			navbarBackClk() {
 
@@ -737,84 +434,6 @@
 			clearHandle() {
 				this.clearEvent();
 			},
-			save() {
-				this.$refs.viewMask.save(); //下载
-			},
-			/**
-			 * 设置户型详情信息
-			 * @param {Object} data 户型详情
-			 */
-			setHouseDetail(data) {
-				if (data) {
-					this.id = data.id;
-					this.spaceId = this.$route.query.spaceId ? this.$route.query.spaceId : '';
-					console.warn("***curHouseObj***", data)
-					// 加载户型
-					this.loadSpace();
-				} else {
-					this.curHouseObj = {}
-				}
-			},
-			// 绘制空间-即地板
-			async loadSpace() {
-				this.spaceList = [];
-				this.wallIds = [];
-				if (!this.curHouseObj || !this.spaceId) { //减少重复请求
-					alert("数据错误")
-					console.warn("***数据错误***")
-					return false
-				}
-				if (this.curHouseObj) {
-					const spaceDetail = this.curHouseObj;
-					const spaceList = JSON.parse(spaceDetail.houseJson);
-					// 交换centerX, centerY;上一页面已经处理过了,这里不在需要处理
-					for (let index = 0; index < spaceList.length; index++) {
-						var element = spaceList[index];
-						// const centerX = JSON.parse(JSON.stringify(element.centerX))
-						if (!element.actors || element.actors.length == 0) {
-							element.actors = [{
-								actorLocation: element.actorLocation,
-								actorTransform: element.actorTransform,
-								isSelected: true,
-							}]
-						}
-						element.actors.forEach(actor => {
-							let _actorLoaction = actor.actorLocation.split(','); //x y z
-							let X_C = parseInt(_actorLoaction[0]); //X轴偏移量-UE原因
-							let Y_C = -parseInt(_actorLoaction[1]); //Y轴偏移量-取反,UE里面的Y轴方向跟Three.js相反
-							let _x = element.centerX + X_C;
-							let _z = -element.centerY + Y_C; //centerY 要取反,因为UE里面是反向的
-
-							// let _x = parseInt(_actorLoaction[1]) || element.centerX;//观察点 X轴坐标 
-							// let _z = parseInt(_actorLoaction[0]) || element.centerY;//观察点 Z轴坐标
-							let _presentX = (_x - element.centerX) / ((element.spaceWidth / 2) -
-								10); //10是墙壁厚度-单位cm
-							let _presentY = (_z + element.centerY) / ((element.spaceHeight / 2) - 10);
-							//注意如果一开始就设置大超过空间大小,则处理成贴近空间边界
-							actor.presentX = Math.abs(_presentX) > 1 ? (_presentX > 1 ? 1 : -1) :
-								_presentX; //观察点跟空间中心原点的距离比例
-							actor.presentY = Math.abs(_presentY) > 1 ? (_presentY > 1 ? 1 : -1) : _presentY;
-						})
-						element.wallMoveValue = "[0,0,0,0]"
-						this.spaceList.push(element);
-						this.wallIds.push(element.wallId);
-
-						if (element.spaceId == this.spaceId) { // 默认选中空间
-							this.curSpaceObj = element;
-						}
-					}
-					if (!this.curSpaceObj && this.spaceList.length > 0) {
-						this.curSpaceObj = this.spaceList[0];
-					}
-				}
-				let curSpaceArea = parseFloat((this.curSpaceObj.spaceWidth * this.curSpaceObj.spaceHeight) / 10000)
-					.toFixed(1);
-				this.navbar.title = this.curSpaceObj.spaceName + "  " + curSpaceArea + "㎡"
-				console.log("该户型空间数据:", this.spaceList);
-				console.log("当前选中的空间:", this.curSpaceObj);
-				// 获取墙体数据并且绘制墙体
-			},
-
 		}
 	}
 </script>