背景のレンダリングと3Dモデルのレンダリングで、別々の射影変換とモデルビュー変換を指定するようにした。
Cameraのインスタンスは毎フレーム生成されるので、Viewer3DにCameraのインスタンスを保持させないようにした。
1 parent 5ceeafc commit 2a2f0d3165518288bcad7b1667e27ce33a27dfdd
n-nittta authored on 19 Dec 2019
Showing 5 changed files
View
117
app/src/main/java/com/google/ar/core/examples/java/common/framework/RWT/RWTRenderer.java
package com.google.ar.core.examples.java.common.framework.RWT;
 
import android.opengl.GLES20;
import android.opengl.GLSurfaceView.Renderer;
 
import com.google.ar.core.Camera;
import com.google.ar.core.Frame;
import com.google.ar.core.Session;
import com.google.ar.core.examples.java.common.framework.model3D.Universe;
import com.google.ar.core.examples.java.common.framework.view3D.Camera3D;
import com.google.ar.core.examples.java.common.framework.view3D.Viewer3D;
import com.google.ar.core.examples.java.common.java3d.GraphicsContext3D;
import com.google.ar.core.examples.java.common.java3d.Light;
import com.google.ar.core.examples.java.common.rendering.BackgroundRenderer;
import com.google.ar.core.examples.java.helloar.HelloArActivity;
import com.google.ar.core.exceptions.CameraNotAvailableException;
 
import java.util.ArrayList;
 
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
 
this.backgroundRenderer = backgroundRenderer;
}
 
public void setSession(Session session) {
this.session = session;
}
this.session = session;
}
 
// public void setBackgroundRenderer(BackgroundRenderer backgroundRenderer) {
// this.backgroundRenderer = backgroundRenderer;
// }
// }
 
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// gc3D = new GraphicsContext3D(gl);
if (viewer == null) {
gc3D = new GraphicsContext3D(gl);
viewer = new Viewer3D(gc3D);
} else {
gc3D.setGL10(gl);
}
// ArrayList<Light> lights = universe.getLights();
// for (int i = 0; i < lights.size(); i++){
// Light l = lights.get(i);
// gc3D.setLight(l,i);
// }
// viewer.setGraphicsContext3D(gc3D);
 
helloArActivity.onSurfaceCreated(gl, config);
}
 
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// GLES20.glViewport(0, 0, width, height);
// if (gc3D == null) {
// gc3D = new GraphicsContext3D(gl);
// viewer.setGraphicsContext3D(gc3D);
// } else {
// viewer.setGraphicsContext3D(gc3D.setGL10(gl));
// }
// viewer.surfaceChanged(width, height);
gl.glViewport(0, 0, width, height);
 
if (viewer == null) {
gc3D = new GraphicsContext3D(gl);
viewer = new Viewer3D(gc3D);
} else {
gc3D.setGL10(gl);
}
viewer.surfaceChanged(width, height);
 
helloArActivity.onSurfaceChanged(gl, width, height);
}
 
public void onDrawFrame(GL10 gl) {
// GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
 
if(session == null) {
return;
}
try {
if(session == null) {
return;
}
try {
// Viewerの更新
if (viewer == null) {
gc3D = new GraphicsContext3D(gl);
viewer = new Viewer3D(gc3D);
} else {
gc3D.setGL10(gl);
}
viewer.onDrawFrame();
 
// Frameの取得
session.setCameraTextureName(backgroundRenderer.getTextureId());
Frame frame = session.update();
 
Frame frame = session.update();
Camera camera = frame.getCamera();
 
if (viewer == null) {
viewer = new Viewer3D(camera);
}
 
if (gc3D == null) {
gc3D = new GraphicsContext3D(gl);
viewer.setGraphicsContext3D(gc3D);
} else {
viewer.setGraphicsContext3D(gc3D.setGL10(gl));
}
viewer.onDrawFrame();
 
// 背景(カメラ映像)の描画
backgroundRenderer.draw(gl, frame);
 
// 3Dモデルのレンダリング
gc3D.pushMatrix();
universe.render(viewer);
gc3D.popMatrix();
// 3Dモデルのレンダリング
gc3D.pushMatrix();
Camera camera = frame.getCamera();
viewer.updateCamera(camera);
universe.render(viewer);
gc3D.popMatrix();
helloArActivity.onDrawFrame(gl, frame, camera);
} catch (CameraNotAvailableException e) {
e.printStackTrace();
}
} catch (CameraNotAvailableException e) {
e.printStackTrace();
}
}
}
View
3
■■
app/src/main/java/com/google/ar/core/examples/java/common/framework/model3D/IViewer3D.java
package com.google.ar.core.examples.java.common.framework.model3D;
 
import com.google.ar.core.Camera;
import com.google.ar.core.examples.java.common.java3d.GraphicsContext3D;
import com.google.ar.core.examples.java.common.java3d.Light;
import com.google.ar.core.examples.java.common.java3d.Node;
import com.google.ar.core.examples.java.common.java3d.Transform3D;
 
import java.util.ArrayList;
 
public interface IViewer3D {
public abstract void setGraphicsContext3D(GraphicsContext3D gc3D);
public abstract void surfaceChanged(int width, int height);
public abstract void onDrawFrame();
public abstract void updateCamera(Camera camera);
public abstract void update(ArrayList<Light> lights, BackgroundBox skyBox);
public abstract void pushTransform(Transform3D t);
public abstract void popTransform();
public abstract void draw(Node obj);
View
84
app/src/main/java/com/google/ar/core/examples/java/common/framework/view3D/Viewer3D.java
public class Viewer3D implements IViewer3D {
private GraphicsContext3D gc3D = null;
private ArrayList<Light> lights = null;
private BackgroundBox skyBox = null;
// private Camera3D camera = null;
private int width;
private int height;
private Camera camera = null;
public Viewer3D(Camera camera) {
this.camera = camera;
}
 
@Override
public void setGraphicsContext3D(GraphicsContext3D gc3D) {
if (this.gc3D != gc3D) {
this.gc3D = gc3D;
}
public Viewer3D(GraphicsContext3D gc3D) {
this.gc3D = gc3D;
}
 
@Override
public void surfaceChanged(int width, int height) {
float focalLength = camera.getImageIntrinsics().getFocalLength()[0];
// gc3D.update(width, height, (float)camera.getFieldOfView(), (float)camera.getFrontClipDistance(), (float)camera.getBackClipDistance(), false);
gc3D.update(width, height, (float)Math.atan(width/2.0f/focalLength) * 2.0f, 0.1f, 100.0f, false);
this.width =width;
this.height = height;
if (camera != null) {
float focalLength = camera.getImageIntrinsics().getFocalLength()[0];
// gc3D.update(width, height, (float)camera.getFieldOfView(), (float)camera.getFrontClipDistance(), (float)camera.getBackClipDistance(), false);
gc3D.update(width, height, (float) Math.atan(width / 2.0f / focalLength) * 2.0f, 0.1f, 100.0f, false);
}
}
 
 
@Override
public void onDrawFrame() {
float viewPoint[] = camera.getPose().getTranslation();
float viewLine[] = camera.getPose().getZAxis();
float viewUp[] = camera.getPose().getYAxis();
int width = (camera.getImageIntrinsics().getImageDimensions()[0]);
int height = (camera.getImageIntrinsics().getImageDimensions()[1]);
float focalLength = camera.getImageIntrinsics().getFocalLength()[0];
Position3D eye = new Position3D(viewPoint[0], viewPoint[1], viewPoint[2]);
Position3D center = eye.clone().add(new Vector3d(-viewLine[0], -viewLine[1], -viewLine[2]));
Vector3d up = new Vector3d(viewUp[0], viewUp[1], viewUp[2]);
// gc3D.update(width, height, (float)Math.atan((float)width/2.0f/focalLength) * 2.0f, 0.1f, 100.0f, eye, center, up, false);
gc3D.onDrawFrame(width, height);
}
 
@Override
public void updateCamera(Camera camera) {
// カメラの更新
this.camera = camera;
if (camera != null) {
float viewPoint[] = camera.getPose().getTranslation();
float viewLine[] = camera.getPose().getZAxis();
float viewUp[] = camera.getPose().getYAxis();
float focalLength = camera.getImageIntrinsics().getFocalLength()[0];
Position3D eye = new Position3D(viewPoint[0], viewPoint[1], viewPoint[2]);
Position3D center = eye.clone().add(new Vector3d(-viewLine[0], -viewLine[1], -viewLine[2]));
Vector3d up = new Vector3d(viewUp[0], viewUp[1], viewUp[2]);
gc3D.update(width, height, (float) Math.atan((float) width / 2.0f / focalLength) * 2.0f, 0.1f, 100.0f, eye, center, up, false);
}
}
 
@Override
public void update(ArrayList<Light> lights, BackgroundBox skyBox) {
// 光源の更新
if (this.lights != lights) {
this.lights = lights;
}
 
// スカイボックスの更新
if (this.skyBox != skyBox) {
this.skyBox = skyBox;
}
}
 
@Override
public void draw(Node node) {
if (node instanceof Box) {
gc3D.draw(((Box)node).getShape(Box.FRONT));
gc3D.draw(((Box)node).getShape(Box.RIGHT));
gc3D.draw(((Box)node).getShape(Box.TOP));
gc3D.draw(((Box)node).getShape(Box.BOTTOM));
} else if (node instanceof Cone) {
gc3D.draw(((Cone)node).getShape(Cone.BODY));
gc3D.draw(((Cone)node).getShape(Cone.CAP));
gc3D.draw(((Cone)node).getShape(Cone.BODY));
gc3D.draw(((Cone)node).getShape(Cone.CAP));
} else if (node instanceof Cylinder) {
gc3D.draw(((Cylinder)node).getShape(Cylinder.BODY));
gc3D.draw(((Cylinder)node).getShape(Cylinder.TOP));
gc3D.draw(((Cylinder)node).getShape(Cylinder.BOTTOM));
gc3D.draw(((Cylinder)node).getShape(Cylinder.BODY));
gc3D.draw(((Cylinder)node).getShape(Cylinder.TOP));
gc3D.draw(((Cylinder)node).getShape(Cylinder.BOTTOM));
} else if (node instanceof Sphere) {
gc3D.draw(((Sphere)node).getShape(Sphere.BODY));
gc3D.draw(((Sphere)node).getShape(Sphere.BODY));
} else if (node instanceof Shape3D) {
gc3D.draw((Shape3D)node);
} else if (node instanceof Light){
gc3D.updateLightState((Light)node);
View
127
app/src/main/java/com/google/ar/core/examples/java/common/java3d/GraphicsContext3D.java
private boolean bFixAspect = false;
private float aspect;
private HashMap<Texture, Integer> textureRegistry = new HashMap<Texture, Integer>();
private HashMap<Light, Integer> lightRegistry = new HashMap<Light, Integer>();
 
public GraphicsContext3D(GL10 gl) {
init(gl);
}
 
init(gl);
}
return this;
}
 
public void fixAspect(float aspect) {
this.bFixAspect = true;
this.aspect = aspect;
}
 
public void init(GL10 gl) {
this.gl = gl;
// デプスバッファのテスト機能を有効にする
gl.glEnable(GL10.GL_DEPTH_TEST);
// 陰面消去の動作を設定
gl.glDepthFunc(GL10.GL_LEQUAL);
gl.glDepthMask(true);
 
// ライトを有効にする
gl.glEnable(GL10.GL_LIGHTING);
// どの光源を使用するか指定
gl.glEnable(GL10.GL_LIGHT0);
 
gl.glClearColor(0.0f,0.0f,0.0f,0.0f);
gl.glClearDepthf(1.0f);
}
gl.glClearDepthf(1.0f);
}
 
public void update(int width, int height, float fovx, float zNear, float zFar,boolean fParallel) {
setGL10(gl);
if (!bFixAspect) {
aspect = (float)width / (float)height;
}
// ビューポートの設定
// gl.glViewport(0, 0, width, height);
GLES20.glViewport(0, 0, width, height);
 
// カメラの設定
gl.glMatrixMode(GL10.GL_PROJECTION); // 射影変換
gl.glLoadIdentity(); // 座標の初期化
// 画角の設定
aspect, //アスペクト比
zNear, //ニアクリップ
zFar);//ファークリップ
}else{
float top = zNear * (float) Math.tan(fovy * (Math.PI / 360.0));
float bottom = -top;
float left = bottom * aspect;
float right = top * aspect;
gl.glOrthof(left, right, bottom, top, zNear, zFar);
}
}
 
public void update(int width, int height, float fovx, float zNear, float zFar, Position3D eye, Position3D center, Vector3d up, boolean fParallel) {
float top = zNear * (float) Math.tan(fovy * (Math.PI / 360.0));
float bottom = -top;
float left = bottom * aspect;
float right = top * aspect;
gl.glOrthof(left, right, bottom, top, zNear, zFar);
}
}
 
public void onDrawFrame(int width, int height) {
// 表示画面とデプスバッファのクリア
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
 
// ビューポートの設定
GLES20.glViewport(0, 0, width, height);
 
// カメラの設定
gl.glMatrixMode(GL10.GL_PROJECTION); // 射影変換
gl.glLoadIdentity(); // 座標の初期化
 
// モデルビュー行列の指定
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity(); // 座標の初期化
}
 
public void update(int width, int height, float fovx, float zNear, float zFar, Position3D eye, Position3D center, Vector3d up, boolean fParallel) {
aspect = (float)width / (float)height;
 
// カメラの設定
gl.glMatrixMode(GL10.GL_PROJECTION); // 射影変換
gl.glLoadIdentity(); // 座標の初期化
// 画角の設定
float fovy = (float)(Math.atan(Math.tan(fovx / 2.0) / aspect) / Math.PI * 360.0f);
if (!fParallel) {
GLU.gluPerspective(gl,
fovy, //Y方向の画角
aspect, //アスペクト比
zNear, //ニアクリップ
zFar);//ファークリップ
} else {
float top = zNear * (float) Math.tan(fovy * (Math.PI / 360.0));
float bottom = -top;
float left = bottom * aspect;
float right = top * aspect;
gl.glOrthof(left, right, bottom, top, zNear, zFar);
GLU.gluPerspective(gl,
fovy, //Y方向の画角
aspect, //アスペクト比
zNear, //ニアクリップ
zFar);//ファークリップ
} else {
float top = zNear * (float) Math.tan(fovy * (Math.PI / 360.0));
float bottom = -top;
float left = bottom * aspect;
float right = top * aspect;
gl.glOrthof(left, right, bottom, top, zNear, zFar);
}
// モデルビュー行列の指定
gl.glMatrixMode(GL10.GL_MODELVIEW);
// 座標の初期化
gl.glLoadIdentity();
 
// カメラ外部パラメータの設定
GLU.gluLookAt(gl,
(float)eye.getX(), (float)eye.getY(), (float)eye.getZ(),
(float)center.getX(), (float)center.getY(), (float)center.getZ(),
(float)up.getX(), (float)up.getY(), (float)up.getZ());
}
GLU.gluLookAt(gl,
(float)eye.getX(), (float)eye.getY(), (float)eye.getZ(),
(float)center.getX(), (float)center.getY(), (float)center.getZ(),
(float)up.getX(), (float)up.getY(), (float)up.getZ());
}
 
public void setLight(Light l, int i) {
lightRegistry.put(l, i);
Color3f c = l.getColor();
float color[] = {c.r, c.g, c.b, 1.0f};
gl.glLightfv(GL10.GL_LIGHT0 + i, GL10.GL_DIFFUSE, color, 0);
gl.glLightfv(GL10.GL_LIGHT0 + i, GL10.GL_SPECULAR, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
}
}
 
public void updateLightState(Light l) {
Integer i = lightRegistry.get(l);
if (i == null) {
// Object3D内部に配置されている光源に対しては、初期化時にsetLight()が呼ばれないため
public void multMatrix(Transform3D transform) {
float m[] = transform.getMatrix();
gl.glMultMatrixf(m, 0);
}
 
public void setAppearance(Appearance appearance) {
this.appearance = appearance;
Material material = appearance.getMaterial();
if (material != null) {
// 立方体マップの場合(GL10では対応していない)
} else {
// 通常の場合
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId[0]);
ImageComponent[] imageComponents = tex.getImages();
ImageComponent[] imageComponents = tex.getImages();
for (int level = 0; level < imageComponents.length; level++) {
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, level, ((ImageComponent2D)imageComponents[level]).getBitmap(), 0);
// ((ImageComponent2D)imageComponents[i]).getBitmap().recycle();
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_NEAREST);
}
}
textureRegistry.put(tex, textureId[0]);
}
}
 
public void draw(Shape3D node) {
if (node == null) return;
setAppearance(node.getAppearance());
setAppearance(node.getAppearance());
draw(node.getGeometry());
}
 
public void draw(Geometry g) {
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureRegistry.get(tex)); // テクスチャが登録されていることを前提
TextureAttributes ta = tus.getTextureAttributes();
if (ta != null) setTextureAttributes(ta);
}
}
}
}
if (g instanceof GeometryArray) {
// バッファの設定
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
FloatBuffer uvBuffer = ((GeometryArray)g).getUVBuffer();
gl.glTexCoordPointer(4, GL10.GL_FLOAT, 0, uvBuffer);
}
 
// ジオメトリの描画
if (g instanceof TriangleArray) {
TriangleArray ta = (TriangleArray)g;
int vertexCount = ta.getVertexCount();
gl.glDrawElements(GL10.GL_TRIANGLE_STRIP, vertexCount, GL10.GL_UNSIGNED_SHORT, indexBuffer);
start += vertexCount;
}
}
 
if ((((GeometryArray) g).getVertexFormat() & GeometryArray.TEXTURE_COORDINATE_2) != 0) {
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
} else if ((((GeometryArray) g).getVertexFormat() & GeometryArray.TEXTURE_COORDINATE_3) != 0) {
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// テクスチャユニットを使っている場合
for (int n = 0; n < appearance.getTextureUnitCount(); n++) {
gl.glActiveTexture(GL10.GL_TEXTURE0 + n);
gl.glDisable(GL10.GL_TEXTURE_2D);
}
}
gl.glActiveTexture(GL10.GL_TEXTURE0);
}
}
 
private void setTextureAttributes(TextureAttributes ta) {
int textureMode = ta.getTextureMode();
switch (textureMode) {
case TextureAttributes.REPLACE:
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
break;
case TextureAttributes.BLEND:
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_BLEND);
break;
case TextureAttributes.REPLACE:
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
break;
case TextureAttributes.BLEND:
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_BLEND);
break;
// case TextureAttributes.COMBINE:
// gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_COMBINE);
// break;
case TextureAttributes.MODULATE:
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_MODULATE);
break;
case TextureAttributes.DECAL:
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_DECAL);
break;
case TextureAttributes.MODULATE:
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_MODULATE);
break;
case TextureAttributes.DECAL:
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_DECAL);
break;
}
// int perspCorrectionMode = ta.getPerspectiveCorrectionMode();
// switch (perspCorrectionMode) {
// case TextureAttributes.NICEST:
View
app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java