初次提交

This commit is contained in:
2022-09-19 18:05:01 +08:00
commit 57051fc44b
5401 changed files with 325410 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>Sample15_9</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@@ -0,0 +1,12 @@
#Wed Jan 05 21:56:15 CST 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.source=1.6

View File

@@ -0,0 +1,15 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
android:versionCode="1"
android:versionName="1.0" package="com.bn.Sample15_9">
<application android:icon="@drawable/icon" android:label="@string/app_name">
<activity android:name=".Sample15_9_Activity"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:targetSdkVersion="8"></uses-sdk>
</manifest>

View File

@@ -0,0 +1,48 @@
precision mediump float;
uniform vec3 uColor; //顶点颜色
uniform vec3 uPosition;//顶点位置:变换之后的
uniform vec3 uNormal;//法向量:变换之后的
uniform vec3 uLightLocation;//光源位置
uniform vec3 uCamera; //摄像机位置
uniform int isShadow; //阴影绘制标志
//定位光光照计算的方法
void pointLight
(
inout vec4 ambient,//环境光分量
inout vec4 diffuse,//散射光分量
inout vec4 specular,//镜面反射光分量
in vec4 lightAmbient,//光的环境光分量
in vec4 lightDiffuse,//光的散射光分量
in vec4 lightSpecular//光的镜面反射光分量
)
{
ambient=lightAmbient; //直接得出环境光的最终强度
vec3 newNormal=normalize(uNormal);
//计算从表面点到摄像机的矢量
vec3 eye= normalize(uCamera-uPosition);
//计算从表面点到光源位置的矢量
vec3 vp = normalize(uLightLocation-uPosition);
vec3 halfVector=normalize(vp+eye); //求视线与光线的半向量
float shininess=50.0; //粗糙度,越小越光滑
float nDotViewPosition=max(0.0,dot(newNormal,vp)); //求法向量与vp的点积与0的最大值
diffuse=lightDiffuse*nDotViewPosition; //计算散射光的最终强度
float nDotViewHalfVector=dot(newNormal,halfVector); //法线与半向量的点积
float powerFactor=max(0.0,pow(nDotViewHalfVector,shininess)); //镜面反射光强度因子
specular=lightSpecular*powerFactor; //计算镜面光的最终强度
}
void main()
{
//基本颜色
vec4 baseColor = vec4(uColor,1.0);
if(isShadow == 0){
vec4 ambient, diffuse, specular;
pointLight(ambient,diffuse,specular,
vec4(0.15,0.15,0.15,1.0),vec4(0.9,0.9,0.9,1.0),vec4(0.7,0.7,0.7,1.0));
//给此片元颜色值
gl_FragColor=baseColor*ambient + baseColor*diffuse + baseColor*specular;
} else {//如果在阴影中只加入环境光
gl_FragColor=baseColor*vec4(0.15,0.15,0.15,1.0);
}
}

View File

@@ -0,0 +1,46 @@
precision mediump float;
uniform vec3 uLightLocation;//光源位置
uniform mat4 uMMatrix; //变换矩阵
uniform vec3 uCamera; //摄像机位置
uniform vec3 uColor; //顶点颜色
varying vec3 vPosition;//接收从顶点着色器过来的顶点位置
varying vec3 vNormal;//接收从顶点着色器过来的顶点法向量
//定位光光照计算的方法
void pointLight( //定位光光照计算的方法
in vec3 normal, //法向量
inout vec4 ambient, //环境光最终强度
inout vec4 diffuse, //散射光最终强度
inout vec4 specular, //镜面光最终强度
in vec3 lightLocation, //光源位置
in vec4 lightAmbient, //环境光强度
in vec4 lightDiffuse, //散射光强度
in vec4 lightSpecular //镜面光强度
){
ambient=lightAmbient; //直接得出环境光的最终强度
vec3 normalTarget=vPosition+normal; //计算变换后的法向量
vec3 newNormal=(uMMatrix*vec4(normalTarget,1)).xyz-(uMMatrix*vec4(vPosition,1)).xyz;
newNormal=normalize(newNormal); //对法向量规格化
//计算从表面点到摄像机的向量
vec3 eye= normalize(uCamera-(uMMatrix*vec4(vPosition,1)).xyz);
//计算从表面点到光源位置的向量vp
vec3 vp= normalize(lightLocation-(uMMatrix*vec4(vPosition,1)).xyz);
vec3 halfVector=normalize(vp+eye); //求视线与光线的半向量
float shininess=50.0; //粗糙度,越小越光滑
float nDotViewPosition=max(0.0,dot(newNormal,vp)); //求法向量与vp的点积与0的最大值
diffuse=lightDiffuse*nDotViewPosition; //计算散射光的最终强度
float nDotViewHalfVector=dot(newNormal,halfVector); //法线与半向量的点积
float powerFactor=max(0.0,pow(nDotViewHalfVector,shininess)); //镜面反射光强度因子
specular=lightSpecular*powerFactor; //计算镜面光的最终强度
}
void main()
{
//基本颜色
vec4 baseColor=vec4(uColor,1.0);
vec4 ambient, diffuse, specular;
pointLight(normalize(vNormal),ambient,diffuse,specular,uLightLocation,
vec4(0.15,0.15,0.15,1.0),vec4(0.9,0.9,0.9,1.0),vec4(0.7,0.7,0.7,1.0));
//给此片元颜色值
gl_FragColor=baseColor*ambient + baseColor*diffuse + baseColor*specular;
}

View File

@@ -0,0 +1,50 @@
precision mediump float;
uniform highp vec3 uLightLocation;//光源位置
uniform highp mat4 uMMatrix; //变换矩阵
uniform vec3 uCamera; //摄像机位置
uniform vec3 uColor; //顶点颜色
uniform highp int isShadow; //阴影绘制标志
varying vec3 vPosition;//接收从顶点着色器过来的顶点位置
varying vec3 vNormal;//接收从顶点着色器过来的顶点法向量
//定位光光照计算的方法
void pointLight( //定位光光照计算的方法
in vec3 normal, //法向量
inout vec4 ambient, //环境光最终强度
inout vec4 diffuse, //散射光最终强度
inout vec4 specular, //镜面光最终强度
in vec3 lightLocation, //光源位置
in vec4 lightAmbient, //环境光强度
in vec4 lightDiffuse, //散射光强度
in vec4 lightSpecular //镜面光强度
){
ambient=lightAmbient; //直接得出环境光的最终强度
vec3 normalTarget=vPosition+normal; //计算变换后的法向量
vec3 newNormal=(uMMatrix*vec4(normalTarget,1)).xyz-(uMMatrix*vec4(vPosition,1)).xyz;
newNormal=normalize(newNormal); //对法向量规格化
//计算从表面点到摄像机的向量
vec3 eye= normalize(uCamera-(uMMatrix*vec4(vPosition,1)).xyz);
//计算从表面点到光源位置的向量vp
vec3 vp= normalize(lightLocation-(uMMatrix*vec4(vPosition,1)).xyz);
vec3 halfVector=normalize(vp+eye); //求视线与光线的半向量
float shininess=50.0; //粗糙度,越小越光滑
float nDotViewPosition=max(0.0,dot(newNormal,vp)); //求法向量与vp的点积与0的最大值
diffuse=lightDiffuse*nDotViewPosition; //计算散射光的最终强度
float nDotViewHalfVector=dot(newNormal,halfVector); //法线与半向量的点积
float powerFactor=max(0.0,pow(nDotViewHalfVector,shininess)); //镜面反射光强度因子
specular=lightSpecular*powerFactor; //计算镜面光的最终强度
}
void main()
{
if(isShadow==0) {//不是阴影
vec4 baseColor=vec4(uColor,1.0);//基本颜色
vec4 ambient, diffuse, specular;
pointLight(normalize(vNormal),ambient,diffuse,specular,uLightLocation,
vec4(0.15,0.15,0.15,1.0),vec4(0.9,0.9,0.9,1.0),vec4(0.7,0.7,0.7,1.0));
gl_FragColor = baseColor*diffuse + baseColor*specular + baseColor*ambient;
} else {//是阴影
vec4 baseColor = vec4(0.2,0.8,0.2,1.0);//基本颜色
gl_FragColor = baseColor*vec4(0.15,0.15,0.15,1.0);//片元最终颜色为阴影的颜色
}
}

View File

@@ -0,0 +1,6 @@
uniform mat4 uMVPMatrix; //总变换矩阵
attribute vec3 aPosition; //顶点位置
void main()
{
gl_Position = uMVPMatrix * vec4(aPosition,1.0); //根据总变换矩阵计算此次绘制此顶点位置
}

View File

@@ -0,0 +1,16 @@
uniform mat4 uMVPMatrix; //总变换矩阵
attribute vec3 aPosition; //顶点位置
attribute vec3 aNormal; //法向量
varying vec3 vPosition;//用于传递给片元着色器的顶点位置
varying vec3 vNormal;//用于传递给片元着色器的顶点法向量
void main()
{
//根据总变换矩阵计算此次绘制此顶点位置
gl_Position = uMVPMatrix * vec4(aPosition,1);
//将顶点的位置传给片元着色器
vPosition = aPosition;
//将顶点的法向量传给片元着色器
vNormal = aNormal;
}

View File

@@ -0,0 +1,32 @@
uniform mat4 uMVPMatrix; //总变换矩阵
uniform int isShadow;//阴影绘制标志
uniform vec3 uLightLocation; //光源位置
uniform mat4 uMMatrix; //变换矩阵
uniform mat4 uMProjCameraMatrix; //投影、摄像机组合矩阵
attribute vec3 aPosition; //顶点位置
attribute vec3 aNormal; //法向量
varying vec3 vPosition;//用于传递给片元着色器的顶点位置
varying vec3 vNormal;//用于传递给片元着色器的顶点法向量
void main()
{
if(isShadow==1)
{//绘制本影,计算阴影顶点位置
vec3 A=vec3(0.0,0.0,0.0);//投影平面上任一点坐标
vec3 n=vec3(0.0,1.0,0.0);//投影平面法向量
vec3 S=uLightLocation; //光源位置
vec3 V=(uMMatrix*vec4(aPosition,1)).xyz; //经过平移和旋转变换后的点的坐标
vec3 VL=S+(V-S)*(dot(n,(A-S))/dot(n,(V-S)));//求得的投影点坐标
gl_Position = uMProjCameraMatrix*vec4(VL,1); //根据总变换矩阵计算此次绘制此顶点位置
}
else
{
gl_Position = uMVPMatrix * vec4(aPosition,1); //根据总变换矩阵计算此次绘制此顶点位置
}
//将顶点的位置传给片元着色器
vPosition = aPosition;
//将顶点的法向量传给片元着色器
vNormal = aNormal;
}

View File

@@ -0,0 +1,13 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Indicates whether an apk should be generated for each density.
split.density=false
# Project target.
target=android-8

View File

@@ -0,0 +1,19 @@
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package com.bn.Sample15_9;
public final class R {
public static final class attr {
}
public static final class drawable {
public static final int icon=0x7f020000;
}
public static final class string {
public static final int app_name=0x7f030000;
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Sample15_9</string>
</resources>

View File

@@ -0,0 +1,89 @@
package com.bn.Sample15_9;
/*
* 球心在原点半径为1的标准球
* 注意:其变换成椭球后法向量也是对的
*/
public class Ball extends HitObject {
public Ball(Camera cam, Color3f color){
this.cam=cam;
this.color=color;
}
@Override
public boolean hit(Ray r,Intersection inter) {
/*
* 求解光线S+ct与变换后物体的交点需要以下步骤
* 1、求出逆变换光线S'+c't
* 2、求出逆变换光线与通用物体的碰撞时间t
* 3、把碰撞时间t代入等式S+ct得到实际的交点坐标
*
*
* 因此genRay只是变换后的光线
* 只用于求解碰撞时间t
* 用t求交点时用变换前的光线r
*/
Ray genRay=new Ray();//变换后的光线
xfrmRay(genRay, getInvertMatrix(), r);//获取变换后的光线将r按逆变换矩阵变换后赋值给genRay
double A,B,C;
A = Vector3.dot(genRay.dir,genRay.dir);
B = Vector3.dot(genRay.start, genRay.dir);
C = Vector3.dot(genRay.start, genRay.start)-1.0f;
double discrim = B*B-A*C; //求判别式
if(discrim<0.0){//没有交点
return false;
}
int num=0;//目前的交点个数
double discRoot = (float) Math.sqrt(discrim);
double t1 = (-B-discRoot)/A; //第一次相交时间
if(t1>0.00001){
inter.hit[0].hitTime=t1;
inter.hit[0].hitObject=this;
inter.hit[0].isEntering=true;
inter.hit[0].surface=0;
Point3 P = rayPos(r,t1);//交点坐标(使用变换前的光线)
inter.hit[0].hitPoint.set(P);//变换后的顶点位置
Point3 preP = xfrmPtoPreP(P);//通过变换后的点求变换前的点
inter.hit[0].hitNormal.set(preP);//变换前的点就是变换前的法向量
num=1;//有一个交点
}
double t2 = (-B+discRoot)/A;//第2个有效交点
if(t2>0.00001){
inter.hit[num].hitTime=t2;
inter.hit[num].hitObject=this;
inter.hit[num].isEntering=true;
inter.hit[num].surface=0;
Point3 P = rayPos(r,t2);//交点坐标(使用变换前的光线)
inter.hit[num].hitPoint.set(P);
Point3 preP = xfrmPtoPreP(P);//通过变换后的点求变换前的点
inter.hit[num].hitNormal.set(preP);//变换前的点就是变换前的法向量
num++;//另一个有效交点
}
inter.numHits=num;
return (num>0);//true或者false
}
@Override
public boolean hit(Ray r) {
Ray genRay=new Ray();//变换后的光线
xfrmRay(genRay, getInvertMatrix(), r);//获取变换后的光线将r按逆变换矩阵变换后赋值给genRay
double A,B,C;
A = Vector3.dot(genRay.dir,genRay.dir);
B = Vector3.dot(genRay.start, genRay.dir);
C = Vector3.dot(genRay.start, genRay.start)-1.0f;
double discrim = B*B-A*C; //求判别式
if(discrim<0.0){//没有交点
return false;
}
double discRoot = (float) Math.sqrt(discrim);
double t1 = (-B-discRoot)/A; //第一次相交时间
//只接受从0到1之间的碰撞因为在光源另外一侧不会产生阴影
if(t1<0 || t1>1){
return false;
}
return true;
}
}

View File

@@ -0,0 +1,85 @@
package com.bn.Sample15_9;
import static com.bn.Sample15_9.Constant.*;
import android.opengl.Matrix;
public class Camera {
Point3 eye,look,up;
Vector3 u,v,n;
Light light;
public Camera(Light light){
this.light = light;
eye=new Point3(); look=new Point3(); up=new Point3();
u=new Vector3(); v=new Vector3(); n=new Vector3();
}
private float[] vMatrix = new float[16];//摄像机位置朝向9参数矩阵
//设置摄像机位置的方法主要是为了计算u、v、n三个向量的值
public void setMyCamera
(
float cx, float cy, float cz, //摄像机位置
float tx, float ty, float tz, //摄像机目标点
float upx, float upy, float upz //摄像机UP向量
){
Matrix.setLookAtM
(
vMatrix,
0,
cx, cy, cz,
tx, ty, tz,
upx, upy, upz
);
eye.x=cx; eye.y=cy; eye.z=cz;
look.x=tx; look.y=ty; look.z=tz;
up.x=upx; up.y=upy; up.z=upz;
//从矩阵中取出u、v、n三个向量的值注意OpenGL中的向量按列存储
u.x=vMatrix[0]; u.y=vMatrix[4]; u.z=vMatrix[8];
v.x=vMatrix[1]; v.y=vMatrix[5]; v.z=vMatrix[9];
n.x=vMatrix[2]; n.y=vMatrix[6]; n.z=vMatrix[10];
}
//光线跟踪的渲染方法
public void raytrace(Scene scn, ColorRect rect) {
//打印开始时间和开始标志
System.out.println("start...");
long start = System.currentTimeMillis();
Ray theRay = new Ray();
theRay.setStart(eye);//光线从眼睛开始
//开始光线跟踪
for (int col = 0; col < nCols; col += blockSize) {
for (int row = 0; row < nRows; row += blockSize) {
//根据所在行列数,计算光线方向
Vector3 dir1 = n.multiConst(-N_3D);
Vector3 dir2 = u.multiConst(W_3D*(2*col/nCols-1));
Vector3 dir3 = v.multiConst(H_3D*(2*row/nRows-1));
Vector3 dir = dir1.add(dir2).add(dir3);
//设置光线发射方向
theRay.setDir(dir);
Color3f clr = new Color3f();
Point3 vertexPos = new Point3();
Vector3 normal = new Vector3();
//计算此光线对应的各个量的值
int isShadowFlag = scn.shade(theRay, clr, vertexPos, normal);
//如果光线和物体没有交点,说明是背景色,不进行绘制,继续下一条光线
if(isShadowFlag == -1){
continue;
}
rect.setColor(clr.red, clr.green, clr.blue);//颜色
rect.setPos3D(vertexPos.x, vertexPos.y, vertexPos.z);//变换后顶点位置
rect.setNormal3D(normal.x, normal.y, normal.z);//变换后顶点处法向量
rect.setLightPos3D(light.pos.x, light.pos.y, light.pos.z);//光源位置
rect.setCameraPos3D(eye.x, eye.y, eye.z);//摄像机位置
rect.setShadow(isShadowFlag);//是否在阴影中
rect.setColRow(col, row);//设置基本块在哪一行,哪一列
rect.drawSelf();//绘制基本块
}
}
//打印结束时间和结束标志
long end = System.currentTimeMillis();
System.out.println("time="+(end-start)/1000.0+"s");
System.out.println("finish...");
}
}

View File

@@ -0,0 +1,29 @@
package com.bn.Sample15_9;
public class Color3f {
float red;
float green;
float blue;
public Color3f(){}
public Color3f(float red, float green, float blue) {
super();
this.red = red;
this.green = green;
this.blue = blue;
}
public Color3f(float[] clr) {
this.red = clr[0];
this.green = clr[1];
this.blue = clr[2];
}
public void set(Color3f clr){
this.red = clr.red;
this.green = clr.green;
this.blue = clr.blue;
}
@Override
public String toString() {
return "Color3:("+this.red+","+this.green+","+this.blue+")";
}
}

View File

@@ -0,0 +1,174 @@
package com.bn.Sample15_9;
import static com.bn.Sample15_9.Constant.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import android.opengl.GLES20;
//矩形
public class ColorRect
{
int mProgram;//自定义渲染管线着色器程序id
int muMVPMatrixHandle;//总变换矩阵引用
int maPositionHandle; //顶点位置属性引用
//3D世界中的量
int muColorHandle; //片元颜色属性引用
int mu3DPosHandle; //3D世界中顶点位置属性引用
int muNormalHandle; //顶点法向量属性引用
int muLightLocationHandle;//光源位置属性引用
int muCameraHandle; //摄像机位置属性引用
int muIsShadow;//是否绘制阴影属性引用
String mVertexShader;//顶点着色器
String mFragmentShader;//片元着色器
FloatBuffer mVertexBuffer;//顶点坐标数据缓冲
int vCount=0;
float[] color3 = new float[3];//3D世界中顶点颜色
float[] vertexPos3D = new float[3];//3D世界中顶点位置
float[] normal3D = new float[3];//3D世界中顶点法向量
float[] lightPos3D = new float[3];//3D世界中光源位置
float[] cameraPos3D = new float[3];//3D世界中摄像机位置
int isShadow;//是否在阴影中的标志
float u;//基本块在视口上的位置
float v;
public ColorRect(MySurfaceView mv)
{
//初始化顶点坐标与着色数据
initVertexData();
//初始化shader
intShader(mv);
}
//初始化顶点坐标与着色数据的方法
public void initVertexData()
{
//顶点坐标数据的初始化================begin============================
vCount=6;
float vertices[]=new float[]
{
0,0,0,//0
Constant.blockSize,0,0,//1
Constant.blockSize,Constant.blockSize,0,//2
0,0,0,//0
Constant.blockSize,Constant.blockSize,0,//2
0,Constant.blockSize,0//3
};
//创建顶点坐标数据缓冲
//vertices.length*4是因为一个整数四个字节
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length*4);
vbb.order(ByteOrder.nativeOrder());//设置字节顺序
mVertexBuffer = vbb.asFloatBuffer();//转换为Float型缓冲
mVertexBuffer.put(vertices);//向缓冲区中放入顶点坐标数据
mVertexBuffer.position(0);//设置缓冲区起始位置
//特别提示由于不同平台字节顺序不同数据单元不是字节的一定要经过ByteBuffer
//转换关键是要通过ByteOrder设置nativeOrder(),否则有可能会出问题
//顶点坐标数据的初始化================end============================
}
//初始化shader
public void intShader(MySurfaceView mv)
{
//加载顶点着色器的脚本内容
mVertexShader=ShaderUtil.loadFromAssetsFile("vertex.sh", mv.getResources());
//加载片元着色器的脚本内容
mFragmentShader=ShaderUtil.loadFromAssetsFile("frag.sh", mv.getResources());
//基于顶点着色器与片元着色器创建程序
mProgram = ShaderUtil.createProgram(mVertexShader, mFragmentShader);
//获取程序中顶点位置属性引用
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
//获取程序中总变换矩阵引用
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
//3D世界中的量
//获取程序中3D世界中顶点颜色属性引用
muColorHandle = GLES20.glGetUniformLocation(mProgram, "uColor");
//获取程序中3D世界中顶点位置属性引用
mu3DPosHandle = GLES20.glGetUniformLocation(mProgram, "uPosition");
//获取程序中顶点法向量属性引用
muNormalHandle = GLES20.glGetUniformLocation(mProgram, "uNormal");
//获取程序中光源位置引用
muLightLocationHandle = GLES20.glGetUniformLocation(mProgram, "uLightLocation");
//获取程序中摄像机位置引用
muCameraHandle = GLES20.glGetUniformLocation(mProgram, "uCamera");
//获取程序中是否绘制阴影属性引用
muIsShadow=GLES20.glGetUniformLocation(mProgram, "isShadow");
}
public void drawSelf() {
MatrixState.pushMatrix();
MatrixState.translate(u, v, 0);
// 制定使用某套着色器程序
GLES20.glUseProgram(mProgram);
// 将最终变换矩阵传入着色器程序
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false,
MatrixState.getFinalMatrix(), 0);
//3D世界中的量
// 将3D世界中顶点颜色传入着色器程序
GLES20.glUniform3fv(muColorHandle, 1, color3, 0);
// 将3D世界中顶点的位置传入着色器程序
GLES20.glUniform3fv(mu3DPosHandle, 1, vertexPos3D, 0);
// 将3D世界中顶点的法向量传入着色器程序
GLES20.glUniform3fv(muNormalHandle, 1, normal3D, 0);
// 将3D世界中灯光位置传入着色器程序
GLES20.glUniform3fv(muLightLocationHandle, 1, lightPos3D, 0);
// 将3D世界中摄像机的位置传入着色器程序
GLES20.glUniform3fv(muCameraHandle, 1, cameraPos3D, 0);
//将是否绘制阴影属性传入着色器程序
GLES20.glUniform1i(muIsShadow, isShadow);
// 将顶点位置数据传入渲染管线
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT,
false, 3 * 4, mVertexBuffer);
// 允许顶点位置数据数组
GLES20.glEnableVertexAttribArray(maPositionHandle);
// 绘制矩形
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vCount);
MatrixState.popMatrix();
}
public void setColor(float r,float g,float b){
this.color3[0] = r;
this.color3[1] = g;
this.color3[2] = b;
}
public void setPos3D(float x,float y,float z){
this.vertexPos3D[0] = x;
this.vertexPos3D[1] = y;
this.vertexPos3D[2] = z;
}
public void setNormal3D(float x,float y,float z){
this.normal3D[0] = x;
this.normal3D[1] = y;
this.normal3D[2] = z;
}
public void setLightPos3D(float x,float y,float z){
this.lightPos3D[0] = x;
this.lightPos3D[1] = y;
this.lightPos3D[2] = z;
}
public void setCameraPos3D(float x,float y,float z){
this.cameraPos3D[0] = x;
this.cameraPos3D[1] = y;
this.cameraPos3D[2] = z;
}
public void setShadow(int isShadow){
this.isShadow = isShadow;
}
public void setPos(float u,float v){
this.u=u;
this.v=v;
}
public void setColRow(int col,int row){
//根据行列数计算基本块在屏幕上的位置
float u=-W+W*(2*col/nCols);
float v=-H+H*(2*row/nRows);
this.setPos(u, v);//设置基本块位置
}
}

View File

@@ -0,0 +1,38 @@
package com.bn.Sample15_9;
//常量类
public class Constant {
// 关于屏幕的量
public static final float SCREEN_WIDTH = 800;// 屏幕的宽度
public static final float SCREEN_HEIGHT = 480;// 屏幕的高度
// 关于渲染时的量
public static final float blockSize = 8f;//基本块的尺寸
public static final float W = SCREEN_WIDTH / 2.0f;// 视口半宽高
public static final float H = SCREEN_HEIGHT / 2.0f;
public static final float ratio = W / H;//视口宽高比
public static final float nRows = SCREEN_HEIGHT;// 像素总行列数
public static final float nCols = SCREEN_WIDTH;
//关于真实世界中近平面的量
public static final float N_3D = 24;// 近平面到摄像机的距离
public static final float W_3D = ratio;//近平面半宽
public static final float H_3D = 1.0f;//近平面半高
//关于真实世界中各物体的量
public static final float R = 0.6f;//球的半径
public static final float CENTER_DIS = 0.7f;//球与中心的距离
public static final float PLANE_WIDTH = 3.5f;//平面宽度
public static final float PLANE_HEIGHT = 4f;//平面高度
public static final float[] BALL1_COLOR = {0.8f,0.2f,0.2f};//球1的颜色
public static final float[] BALL2_COLOR = {0.2f,0.2f,0.8f};//球2的颜色
public static final float[] PLANE_COLOR = {0.2f,0.8f,0.2f};//平面的颜色
//关于摄像机的参数
public static final float CAM_X = 15;
public static final float CAM_Y = 7;
public static final float CAM_Z = 32;
//关于光源的参数
public static final float LIGHT_X = 100;
public static final float LIGHT_Y = 80;
public static final float LIGHT_Z = 0;
//计算阴影时用的极小的正数
public static final float MNIMUM = 0.00001f;
}

View File

@@ -0,0 +1,32 @@
package com.bn.Sample15_9;
//一个交点的信息
public class HitInfo{
double hitTime;//相交时间
HitObject hitObject;//相交的物体
boolean isEntering;//光线是进入还是出来
int surface;//相交于哪个表面
Point3 hitPoint;//交点的坐标,变换后的
Vector3 hitNormal;//交点处的法向量,变换前的
public HitInfo(){
hitPoint = new Point3();
hitNormal = new Vector3();
}
/*
* 此方法可能会不对,复制问题可能出现
* 如果有解决不了的问题可以回来看
*/
public void set(HitInfo hit){
this.hitTime=hit.hitTime;
this.hitObject=hit.hitObject;//指向的物体不用复制
this.isEntering=hit.isEntering;
this.surface=hit.surface;
this.hitPoint.set(hit.hitPoint);
this.hitNormal.set(hit.hitNormal);
}
@Override
public String toString() {
return "hitTime"+hitTime+",hitPoint"+hitPoint;
}
}

View File

@@ -0,0 +1,82 @@
package com.bn.Sample15_9;
import android.opengl.Matrix;
public abstract class HitObject {
Color3f color;
private float[] myMatrix;//变换矩阵
Camera cam;
public abstract boolean hit(Ray ray,Intersection inter); //判断交点的方法
public abstract boolean hit(Ray ray); //只判断是否相交的方法
public Color3f getColor() {
return color;
}
//返回光线在t时刻的位置的方法(使用变换前的光线)
public Point3 rayPos(Ray r,double t){
return cam.eye.addVec(r.dir.multiConst((float)t)); //eye+dir*t 得到当前点的坐标
}
//获取变换后的光线将r按逆变换矩阵变换后赋值给genRay(左乘变换矩阵的逆矩阵)
public void xfrmRay(Ray genRay, float[] invTransf, Ray r){
//求变换后的start
float[] genStart = new float[4];
Matrix.multiplyMV(genStart, 0, invTransf, 0, r.start.toQici4(), 0);
genRay.start.set(genStart);//只取齐次坐标和前三个分量作为start
//求变换后的dir
float[] genDir = new float[4];
Matrix.multiplyMV(genDir, 0, invTransf, 0, r.dir.toQici4(), 0);
genRay.dir.set(genDir);//只取齐次坐标和前三个分量作为dir
}
//获取变换后的法向量(左乘变换矩阵的逆转置矩阵)
public void xfrmNormal(Vector3 genNormal, float[] invTranspM, Vector3 normal){
//求变换后的Normal
float[] tmpNormal = new float[4];
Matrix.multiplyMV(tmpNormal, 0, invTranspM, 0, normal.toQici4(), 0);
genNormal.set(tmpNormal);//只取齐次坐标和前三个分量作为normal
}
//通过变换后的点求变换前的点的方法:乘以逆矩阵
public Point3 xfrmPtoPreP(Point3 P){
//通过逆变换,得到变换之前的点
float[] inverM = getInvertMatrix();//获取逆变换矩阵
float[] preP = new float[4];
Matrix.multiplyMV(preP, 0, inverM, 0, P.toQici4(), 0);//求变换前的点
return new Point3(preP);//变换前的点就是变换之前的法向量
}
//初始化变换矩阵的方法
public void initMyMatrix() {
myMatrix = new float[16];
Matrix.setIdentityM(myMatrix, 0);
}
//获取对象的变换矩阵的方法
public float[] getMatrix(){
return myMatrix;
}
//获取对象的逆矩阵的方法
public float[] getInvertMatrix(){
float[] invM = new float[16];
Matrix.invertM(invM, 0, myMatrix, 0);//求逆矩阵
return invM;
}
//获取对象的逆转置矩阵的方法
public float[] getInvertTransposeMatrix(){
float[] invTranspM = new float[16];
Matrix.transposeM(invTranspM, 0, myMatrix, 0);//求转置矩阵
Matrix.invertM(invTranspM, 0, invTranspM, 0);//求逆转置矩阵,两个参数要相同!
return invTranspM;
}
// 设置沿xyz轴移动
public void translate(float x, float y, float z) {
Matrix.translateM(myMatrix, 0, x, y, z);
}
// 设置绕xyz轴旋转
public void rotate(float angle, float x, float y, float z) {
Matrix.rotateM(myMatrix, 0, angle, x, y, z);
}
// 设置沿xyz轴缩放
public void scale(float x, float y, float z) {
Matrix.scaleM(myMatrix, 0, x, y, z);
}
}

View File

@@ -0,0 +1,20 @@
package com.bn.Sample15_9;
//保存光线每与某个物体相交信息的类
public class Intersection {
int numHits;//有正的相交时间相交物体的数目
HitInfo[] hit=new HitInfo[8];//交点列表以后可能需要多于8个交点
public Intersection(){
for(int i=0; i<8; i++){//初始化HitInfo数组
hit[i]=new HitInfo();
}
}
public void set(Intersection inter){
for(int i=0; i<8; i++){//复制一份,绝不可以直接给引用
this.hit[i].set(inter.hit[i]);
}
this.numHits = inter.numHits;
}
}

View File

@@ -0,0 +1,12 @@
package com.bn.Sample15_9;
//µÆ¹âÀà
public class Light {
Point3 pos;//µÆ¹âλÖÃ
public Light(){
pos = new Point3();
}
public Light(Point3 pos) {
this.pos = pos;
}
}

View File

@@ -0,0 +1,190 @@
package com.bn.Sample15_9;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import android.opengl.Matrix;
//存储系统矩阵状态的类
public class MatrixState
{
private static float[] mProjMatrix = new float[16];//4x4矩阵 投影用
private static float[] mVMatrix = new float[16];//摄像机位置朝向9参数矩阵
private static float[] currMatrix;//当前变换矩阵
public static float[] lightLocation=new float[]{0,0,0};//定位光光源位置
public static FloatBuffer cameraFB;
public static FloatBuffer lightPositionFB;
//保护变换矩阵的栈
static float[][] mStack=new float[10][16];
static int stackTop=-1;
public static void setInitStack()//获取不变换初始矩阵
{
currMatrix=new float[16];
Matrix.setRotateM(currMatrix, 0, 0, 1, 0, 0);
}
public static void pushMatrix()//保护变换矩阵
{
stackTop++;
for(int i=0;i<16;i++)
{
mStack[stackTop][i]=currMatrix[i];
}
}
public static void popMatrix()//恢复变换矩阵
{
for(int i=0;i<16;i++)
{
currMatrix[i]=mStack[stackTop][i];
}
stackTop--;
}
public static void translate(float x,float y,float z)//设置沿xyz轴移动
{
Matrix.translateM(currMatrix, 0, x, y, z);
}
public static void rotate(float angle,float x,float y,float z)//设置绕xyz轴移动
{
Matrix.rotateM(currMatrix,0,angle,x,y,z);
}
public static void scale(float x,float y,float z)
{
Matrix.scaleM(currMatrix,0, x, y, z);
}
//插入自带矩阵
public static void matrix(float[] self)
{
float[] result=new float[16];
Matrix.multiplyMM(result,0,currMatrix,0,self,0);
currMatrix=result;
}
//设置摄像机
static ByteBuffer llbb= ByteBuffer.allocateDirect(3*4);
static float[] cameraLocation=new float[3];//摄像机位置
public static void setCamera
(
float cx, //摄像机位置x
float cy, //摄像机位置y
float cz, //摄像机位置z
float tx, //摄像机目标点x
float ty, //摄像机目标点y
float tz, //摄像机目标点z
float upx, //摄像机UP向量X分量
float upy, //摄像机UP向量Y分量
float upz //摄像机UP向量Z分量
)
{
Matrix.setLookAtM
(
mVMatrix,
0,
cx,
cy,
cz,
tx,
ty,
tz,
upx,
upy,
upz
);
cameraLocation[0]=cx;
cameraLocation[1]=cy;
cameraLocation[2]=cz;
llbb.clear();
llbb.order(ByteOrder.nativeOrder());//设置字节顺序
cameraFB=llbb.asFloatBuffer();
cameraFB.put(cameraLocation);
cameraFB.position(0);
}
//设置透视投影参数
public static void setProjectFrustum
(
float left, //near面的left
float right, //near面的right
float bottom, //near面的bottom
float top, //near面的top
float near, //near面距离
float far //far面距离
)
{
Matrix.frustumM(mProjMatrix, 0, left, right, bottom, top, near, far);
}
//设置正交投影参数
public static void setProjectOrtho
(
float left, //near面的left
float right, //near面的right
float bottom, //near面的bottom
float top, //near面的top
float near, //near面距离
float far //far面距离
)
{
Matrix.orthoM(mProjMatrix, 0, left, right, bottom, top, near, far);
}
//获取具体物体的总变换矩阵
static float[] mMVPMatrix=new float[16];
public static float[] getFinalMatrix()
{
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, currMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
return mMVPMatrix;
}
//获取具体物体的变换矩阵
public static float[] getMMatrix()
{
return currMatrix;
}
//获取投影矩阵
public static float[] getProjMatrix()
{
return mProjMatrix;
}
//获取摄像机朝向的矩阵
public static float[] getCaMatrix()
{
return mVMatrix;
}
//设置灯光位置的方法
static ByteBuffer llbbL = ByteBuffer.allocateDirect(3*4);
public static void setLightLocation(float x,float y,float z)
{
llbbL.clear();
lightLocation[0]=x;
lightLocation[1]=y;
lightLocation[2]=z;
llbbL.order(ByteOrder.nativeOrder());//设置字节顺序
lightPositionFB=llbbL.asFloatBuffer();
lightPositionFB.put(lightLocation);
lightPositionFB.position(0);
}
public static float[] getViewProjMatrix()
{
float[] mMVPMatrix=new float[16];
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
return mMVPMatrix;
}
}

View File

@@ -0,0 +1,78 @@
package com.bn.Sample15_9;
import android.opengl.GLSurfaceView;
import android.opengl.GLES20;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import static com.bn.Sample15_9.Constant.*;
class MySurfaceView extends GLSurfaceView
{
private SceneRenderer mRenderer;//场景渲染器
public MySurfaceView(Context context) {
super(context);
this.setEGLContextClientVersion(2); //设置使用OPENGL ES2.0
mRenderer = new SceneRenderer(); //创建场景渲染器
setRenderer(mRenderer); //设置渲染器
//设置渲染模式为=========只渲染一帧=========
//The renderer only renders when the surface is created, or when requestRender() is called
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
private class SceneRenderer implements GLSurfaceView.Renderer
{
Camera cam;
Scene scn;
Light light;
ColorRect rect;
public void onDrawFrame(GL10 gl)
{
//清除颜色缓冲
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
/*
* 此处设置正交投影和摄像机的位置,
* 只是为了能将3D世界中的正方形直接绘制在视口上
*/
//调用此方法计算产生正交投影矩阵
MatrixState.setProjectOrtho(-W, W, -H, H, 1, 2);
//设置绘制矩形时的camera位置
MatrixState.setCamera(0, 0, 1, 0, 0, 0, 0, 1, 0);
//设置光线跟踪算法中的真实摄像机位置
cam.setMyCamera(CAM_X, CAM_Y, CAM_Z, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
//场景中物体进行变换
scn.transform();
//开始光线跟踪渲染物体
cam.raytrace(scn, rect);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
//设置视窗大小及位置
GLES20.glViewport(0, 0, (int)nCols, (int)nRows);
/*
* 此处设置正交投影和摄像机的位置,
* 只是为了能将3D世界中的正方形直接绘制在视口上
*/
//调用此方法计算产生正交投影矩阵
MatrixState.setProjectOrtho(-W, W, -H, H, 1, 2);
//设置camera位置
MatrixState.setCamera(0, 0, 1, 0, 0, 0, 0, 1, 0);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
//设置屏幕背景色RGBA
GLES20.glClearColor(0,0,0,1);
//打开背面剪裁
GLES20.glEnable(GLES20.GL_CULL_FACE);
//打开深度检测
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
//初始化变换矩阵
MatrixState.setInitStack();
rect = new ColorRect(MySurfaceView.this);
light = new Light(new Point3(LIGHT_X,LIGHT_Y,LIGHT_Z));
cam=new Camera(light);
scn=new Scene(cam, light);
}
}
}

View File

@@ -0,0 +1,64 @@
package com.bn.Sample15_9;
public class Point3 {
float x;
float y;
float z;
public Point3(){}
public Point3(float x, float y, float z) {
super();
this.x = x;
this.y = y;
this.z = z;
}
public Point3(float[] data){
this.x=data[0];
this.y=data[1];
this.z=data[2];
}
public void set(Point3 p){
this.x=p.x;
this.y=p.y;
this.z=p.z;
}
public void set(Vector3 vec){
this.x=vec.x;
this.y=vec.y;
this.z=vec.z;
}
public void set(float p[]){
this.x=p[0];
this.y=p[1];
this.z=p[2];
}
//将一个点按向量vec移动得到另一个点的方法
public Point3 addVec(Vector3 vec){
return new Point3(this.x+vec.x,this.y+vec.y,this.z+vec.z);
}
//点和点相减得到一个向量
public Vector3 minus(Point3 p){
return new Vector3(this.x-p.x,this.y-p.y,this.z-p.z);
}
//点和点相减得到一个向量
public Vector3 minus(Vector3 vec){
return new Vector3(this.x-vec.x,this.y-vec.y,this.z-vec.z);
}
@Override
public String toString() {
return "Point:("+this.x+","+this.y+","+this.z+")";
}
/*
* 齐次坐标:
* 若增加第四个分量1以表示该四元组为点
* 若增加第四个分量0以表示该四元组为向量
*/
//将点转换成齐次坐标表示的方法
public float[] toQici4(){
return new float[]{this.x,this.y,this.z,1};
}
}

View File

@@ -0,0 +1,24 @@
package com.bn.Sample15_9;
//光线类
public class Ray {
Point3 start;//起始位置
Vector3 dir;//光线方向
public Ray(){
start = new Point3();
dir = new Vector3();
}
//设置光线起始位置的方法
public void setStart(Point3 start){
this.start.x = start.x;
this.start.y = start.y;
this.start.z = start.z;
}
//设置光线发射方向的方法
public void setDir(Vector3 dir){
this.dir.x = dir.x;
this.dir.y = dir.y;
this.dir.z = dir.z;
}
}

View File

@@ -0,0 +1,39 @@
package com.bn.Sample15_9;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.Window;
import android.view.WindowManager;
public class Sample15_9_Activity extends Activity {
private MySurfaceView mGLSurfaceView;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
//设置为全屏
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN ,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
//设置为横屏模式
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
//初始化GLSurfaceView
mGLSurfaceView = new MySurfaceView(this);
setContentView(mGLSurfaceView);
mGLSurfaceView.requestFocus();//获取焦点
mGLSurfaceView.setFocusableInTouchMode(true);//设置为可触控
}
@Override
protected void onResume() {
super.onResume();
mGLSurfaceView.onResume();
}
@Override
protected void onPause() {
super.onPause();
mGLSurfaceView.onPause();
}
}

View File

@@ -0,0 +1,127 @@
package com.bn.Sample15_9;
import java.util.ArrayList;
import java.util.List;
import static com.bn.Sample15_9.Constant.*;
//场景类
public class Scene {
Camera cam;
Light light;
Ray feeler = new Ray();//阴影探测器
List<HitObject> hitObjects;//物体列表
Ball ball1;//球1
Ball ball2;//球2
Square sqare;//矩形平面
public Scene(Camera cam, Light light){
this.cam=cam;
this.light = light;
hitObjects = new ArrayList<HitObject>();
//创建一个红色的球
ball1 = new Ball(cam, new Color3f(BALL1_COLOR));
//创建一个蓝色的球
ball2 = new Ball(cam, new Color3f(BALL2_COLOR));
//创建一个绿色平面
sqare = new Square(cam, new Color3f(PLANE_COLOR));
//将物体加入场景中
hitObjects.add(ball1);
hitObjects.add(ball2);
hitObjects.add(sqare);
}
//场景中的物体进行变换的方法
public void transform(){
//为所有物体初始化变换矩阵
for(HitObject pObj:hitObjects){
pObj.initMyMatrix();
}
//设置平面的变换
sqare.rotate(-90, 1, 0, 0);
sqare.scale(PLANE_WIDTH/2.0f, PLANE_HEIGHT/2.0f, 1);
//设置球1的变换
ball1.translate(-CENTER_DIS, R, 0);
ball1.scale(R, R, R);
//设置球2的变换
ball2.translate(CENTER_DIS, R, 0);
ball2.scale(R, R, R);
}
/*
* 返回光线对应的像素各信息,
*
* 返回值:
* -1表示没有交点
* 0表示有交点且最佳碰撞点不在阴影中
* 1表示有交点且最佳碰撞点在阴影中
*/
public int shade(
Ray ray, //光线
Color3f color, //物体颜色
Point3 vetex, //变换后的顶点位置
Vector3 normal//变换后的法向量
){
Intersection best = new Intersection();//用于保存目前为止最佳的碰撞记录
getFirstHit(ray, best);//填充最佳碰撞记录
if(best.numHits==0){//如果没有物体与光线相交
return -1;
}
//如果有物体与光线相交返回碰撞点的各信息
color.set(best.hit[0].hitObject.getColor());//物体颜色
vetex.set(best.hit[0].hitPoint);//顶点位置
//通过逆转置变换,求变换之后的法向量
float[] inverTranspM = best.hit[0].hitObject.getInvertTransposeMatrix();//逆转置矩阵
Vector3 preN = best.hit[0].hitNormal;//变换前的法向量
best.hit[0].hitObject.xfrmNormal(normal, inverTranspM, preN);//求变换后的法向量
//探测是否在阴影中
Point3 hitPoint = best.hit[0].hitPoint;
//阴影探测器的起点为,碰撞点朝人眼方向移动一个微小的距离
feeler.start.set(hitPoint.minus(ray.dir.multiConst(MNIMUM)));
//阴影探测器的方向,从碰撞点指向光源
feeler.dir = light.pos.minus(hitPoint);
if(isInShadow(feeler)){
return 1;//有交点,且最佳碰撞点在阴影中
}
return 0;//有交点,且最佳碰撞点不在阴影中
}
public void getFirstHit(Ray ray, Intersection best){
Intersection inter = new Intersection();//实例化相交记录
best.numHits=0;//还没有交点
/*
* 此处检测光线与每个物体是否相交,
* 与每个物体相交的信息都会存储在best中。
* 由于光线与单个物体相交时,
* 总会将光线与该物体的最近相交点保存在best.hit[0]中(由每个物体的hit方法决定)
* 因此只要将所有物体的“最近点”信息做比较并将最终结果存入best.hit[0]中,
* 即可得出光线与所有物体的最近的交点信息
*/
for(HitObject pObj:hitObjects){//检查场景中的每一个物体
if(!pObj.hit(ray, inter)){//光线是否和pObj相交此处已经调用hit方法如果有相交会将相交信息记录在inter中
continue;//无交点:检测下一个物体
}
if(best.numHits==0 || //best中还没有交点信息或best中的交点不是最近点
inter.hit[0].hitTime<best.hit[0].hitTime){
/*
* 注意这里一定是复制一份,而不能直接给其引用,
* 否则里面的值一变会导致best的值也变
*/
best.set(inter);//复制inter到best
}
}
}
//检测是否在阴影中的方法,参数为光线跟踪器
public boolean isInShadow(Ray feeler){
for(HitObject pObj:hitObjects){
if(pObj.hit(feeler)){//光线与任何物体相交,在阴影中
return true;
}
}
return false;//没有相交的物化,不在阴影中
}
}

View File

@@ -0,0 +1,126 @@
package com.bn.Sample15_9;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import android.content.res.Resources;
import android.opengl.GLES20;
import android.util.Log;
//加载顶点Shader与片元Shader的工具类
public class ShaderUtil
{
//加载制定shader的方法
public static int loadShader
(
int shaderType, //shader的类型 GLES20.GL_VERTEX_SHADER GLES20.GL_FRAGMENT_SHADER
String source //shader的脚本字符串
)
{
//创建一个新shader
int shader = GLES20.glCreateShader(shaderType);
//若创建成功则加载shader
if (shader != 0)
{
//加载shader的源代码
GLES20.glShaderSource(shader, source);
//编译shader
GLES20.glCompileShader(shader);
//存放编译成功shader数量的数组
int[] compiled = new int[1];
//获取Shader的编译情况
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0)
{//若编译失败则显示错误日志并删除此shader
Log.e("ES20_ERROR", "Could not compile shader " + shaderType + ":");
Log.e("ES20_ERROR", GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
//创建shader程序的方法
public static int createProgram(String vertexSource, String fragmentSource)
{
//加载顶点着色器
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0)
{
return 0;
}
//加载片元着色器
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0)
{
return 0;
}
//创建程序
int program = GLES20.glCreateProgram();
//若程序创建成功则向程序中加入顶点着色器与片元着色器
if (program != 0)
{
//向程序中加入顶点着色器
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
//向程序中加入片元着色器
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
//链接程序
GLES20.glLinkProgram(program);
//存放链接成功program数量的数组
int[] linkStatus = new int[1];
//获取program的链接情况
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
//若链接失败则报错并删除程序
if (linkStatus[0] != GLES20.GL_TRUE)
{
Log.e("ES20_ERROR", "Could not link program: ");
Log.e("ES20_ERROR", GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
//检查每一步操作是否有错误的方法
public static void checkGlError(String op)
{
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR)
{
Log.e("ES20_ERROR", op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
//从sh脚本中加载shader内容的方法
public static String loadFromAssetsFile(String fname,Resources r)
{
String result=null;
try
{
InputStream in=r.getAssets().open(fname);
int ch=0;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while((ch=in.read())!=-1)
{
baos.write(ch);
}
byte[] buff=baos.toByteArray();
baos.close();
in.close();
result=new String(buff,"UTF-8");
result=result.replaceAll("\\r\\n","\n");
}
catch(Exception e)
{
e.printStackTrace();
}
return result;
}
}

View File

@@ -0,0 +1,84 @@
package com.bn.Sample15_9;
/*
* 位于xoy平面中心边长为2的正方形
*/
public class Square extends HitObject {
public Square(Camera cam, Color3f color){
this.cam = cam;
this.color = color;
}
@Override
public boolean hit(Ray r,Intersection inter) {
/*
* 求解光线S+ct与变换后物体的交点需要以下步骤
* 1、求出逆变换光线S'+c't
* 2、求出逆变换光线与通用物体的碰撞时间t
* 3、把碰撞时间t代入等式S+ct得到实际的交点坐标
*
*
* 因此genRay只是变换后的光线
* 只用于求解碰撞时间t
* 用t求交点时用变换前的光线r
*/
Ray genRay=new Ray();//用于生产变换后的光线
xfrmRay(genRay, getInvertMatrix(), r);//获取变换后的光线将r按逆变换矩阵变换后赋值给genRay
double denom = genRay.dir.z;//分母
if(Math.abs(denom)<0.0001){//光线和平面平行:无交点
return false;
}
double time=-genRay.start.z/denom;//相交时间
if(time<=0.0){//交点落在视点后方
return false;
}
double hx=genRay.start.x+genRay.dir.x*time;//交点的x坐标
double hy=genRay.start.y+genRay.dir.y*time;//交点的y坐标
if (hx > 1.0 || hx < -1.0) {//x不在范围内
return false;
}
if (hy > 1.0 || hy < -1.0) {//y不在范围内
return false;
}
inter.numHits=1;//有一个有效交点
//将光线和物体的相交信息存入inter中
inter.hit[0].hitTime=time;
inter.hit[0].hitObject=this;
inter.hit[0].isEntering=true;
inter.hit[0].surface=0;
Point3 P = rayPos(r,time);//交点坐标(使用变换前的光线)
inter.hit[0].hitPoint.set(P);//变换后的顶点位置
inter.hit[0].hitNormal.set(0,0,1);//变换前的法向量
return true;
}
@Override
public boolean hit(Ray r) {
Ray genRay=new Ray();//用于生产变换后的光线
xfrmRay(genRay, getInvertMatrix(), r);//获取变换后的光线将r按逆变换矩阵变换后赋值给genRay
double denom = genRay.dir.z;//分母
if(Math.abs(denom)<0.0001){//光线和平面平行:无交点
return false;
}
double time=-genRay.start.z/denom;//相交时间
//只接受从0到1之间的碰撞因为在光源另外一侧不会产生阴影
if(time<0.0 ||time>1){//交点落在视点后方
return false;
}
double hx=genRay.start.x+genRay.dir.x*time;//交点的x坐标
double hy=genRay.start.y+genRay.dir.y*time;//交点的y坐标
if (hx > 1.0 || hx < -1.0) {//x不在范围内
return false;
}
if (hy > 1.0 || hy < -1.0) {//y不在范围内
return false;
}
return true;
}
}

View File

@@ -0,0 +1,77 @@
package com.bn.Sample15_9;
public class Vector3 {
float x;
float y;
float z;
public Vector3(){};
public Vector3(float x, float y, float z) {
super();
this.x = x;
this.y = y;
this.z = z;
}
public void set(Vector3 vec){
this.x=vec.x;
this.y=vec.y;
this.z=vec.z;
}
public void set(Point3 p){
this.x=p.x;
this.y=p.y;
this.z=p.z;
}
public void set(float vec[]){
this.x=vec[0];
this.y=vec[1];
this.z=vec[2];
}
public void set(float x, float y, float z){
this.x = x;
this.y = y;
this.z = z;
}
//与常量相乘
public Vector3 multiConst(float constant){
return new Vector3(this.x*constant,this.y*constant,this.z*constant);
}
//点积
public static float dot(Vector3 v1,Vector3 v2){
return v1.x*v2.x + v1.y*v2.y + v1.z*v2.z;
}
//点积
public static float dot(Point3 p1,Vector3 v2){
return p1.x*v2.x + p1.y*v2.y + p1.z*v2.z;
}
//点积
public static float dot(Point3 p1,Point3 p2){
return p1.x*p2.x + p1.y*p2.y + p1.z*p2.z;
}
//加法
public Vector3 add(Vector3 v){
return new Vector3(this.x+v.x,this.y+v.y,this.z+v.z);
}
public String toString(){
return "vector:["+this.x+","+this.y+","+this.z+"]";
}
/*
* 齐次坐标:
* 若增加第四个分量1以表示该四元组为点
* 若增加第四个分量0以表示该四元组为向量
*/
//将向量转换成齐次坐标表示的方法
public float[] toQici4(){
return new float[]{this.x,this.y,this.z,0};
}
}