Merge branch 'LAX1DUDE-main'
This commit is contained in:
commit
9e80cb2e8f
5
.gitattributes
vendored
5
.gitattributes
vendored
|
@ -1,6 +1,7 @@
|
|||
#
|
||||
# https://help.github.com/articles/dealing-with-line-endings/
|
||||
#
|
||||
# These are explicitly windows files and should use crlf
|
||||
*.bat text eol=crlf
|
||||
|
||||
*.bat text eol=crlf
|
||||
*.sh text eol=lf
|
||||
gradlew text eol=lf
|
||||
|
|
Binary file not shown.
58277
javascript/classes.js
58277
javascript/classes.js
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
BIN
javascript/testvideo.mp4
Normal file
BIN
javascript/testvideo.mp4
Normal file
Binary file not shown.
|
@ -181,14 +181,21 @@ void main(){
|
|||
/* https://bugs.chromium.org/p/angleproject/issues/detail?id=4994 */
|
||||
uv = ((uv * anisotropic_fix) - fract(uv * anisotropic_fix) + 0.5) / anisotropic_fix;
|
||||
|
||||
color *= texture(tex0, uv).bgra;
|
||||
vec4 texColor = texture(tex0, uv);
|
||||
#else
|
||||
color *= texture(tex0, (matrix_t * vec4(v_texture0, 0.0, 1.0)).xy).bgra;
|
||||
vec4 texColor = texture(tex0, (matrix_t * vec4(v_texture0, 0.0, 1.0)).xy);
|
||||
#endif
|
||||
|
||||
#else
|
||||
color *= texture(tex0, (matrix_t * vec4(texCoordV0, 0.0, 1.0)).xy).bgra;
|
||||
vec4 texColor = texture(tex0, (matrix_t * vec4(texCoordV0, 0.0, 1.0)).xy);
|
||||
#endif
|
||||
|
||||
#ifdef CC_swap_rb
|
||||
color *= texColor.rgba;
|
||||
#else
|
||||
color *= texColor.bgra;
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
#ifdef CC_alphatest
|
||||
|
|
283
samples/ayunami2000/VideoMapPacketCodec.java
Normal file
283
samples/ayunami2000/VideoMapPacketCodec.java
Normal file
|
@ -0,0 +1,283 @@
|
|||
package ayunami2000;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
public class VideoMapPacketCodec {
|
||||
|
||||
public final int[][] mapIds;
|
||||
private boolean loop;
|
||||
private String url;
|
||||
private int duration;
|
||||
private long timestamp;
|
||||
private long pauseTimestamp;
|
||||
private double posX;
|
||||
private double posY;
|
||||
private double posZ;
|
||||
private float volume;
|
||||
private int frameRate;
|
||||
private boolean requiresFullResetPacket;
|
||||
private boolean requiresPositionPacket;
|
||||
private boolean isDisabled;
|
||||
|
||||
/**
|
||||
* @param mapIds 2D grid of map IDs that make up the screen (mapIds[y][x])
|
||||
* @param posX audio playback X coord
|
||||
* @param posY audio playback Y coord
|
||||
* @param posZ audio playback Z coord
|
||||
* @param volume the volume of the clip
|
||||
*/
|
||||
public VideoMapPacketCodec(int[][] mapIds, double posX, double posY, double posZ, float volume) {
|
||||
this.mapIds = mapIds;
|
||||
this.url = null;
|
||||
this.posX = posX;
|
||||
this.posY = posY;
|
||||
this.posZ = posZ;
|
||||
this.volume = volume;
|
||||
this.frameRate = 30;
|
||||
this.requiresPositionPacket = true;
|
||||
this.requiresFullResetPacket = true;
|
||||
this.isDisabled = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param mapIds 2D grid of map IDs that make up the screen (mapIds[y][x])
|
||||
* @param posX audio playback X coord
|
||||
* @param posY audio playback Y coord
|
||||
* @param posZ audio playback Z coord
|
||||
*/
|
||||
public VideoMapPacketCodec(int[][] mapIds, double posX, double posY, double posZ) {
|
||||
this(mapIds, posX, posY, posZ, 1.0f);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param posX audio playback X coord
|
||||
* @param posY audio playback Y coord
|
||||
* @param posZ audio playback Z coord
|
||||
* @param volume the volume of the clip
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public byte[] moveAudioSource(double posX, double posY, double posZ, float volume) {
|
||||
this.posX = posX;
|
||||
this.posY = posY;
|
||||
this.posZ = posZ;
|
||||
this.volume = volume;
|
||||
this.requiresPositionPacket = true;
|
||||
return syncPlaybackWithPlayers();
|
||||
}
|
||||
|
||||
/**
|
||||
* unloads video and resets all map object to vanilla renderer
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public byte[] disableVideo() {
|
||||
isDisabled = true;
|
||||
return syncPlaybackWithPlayers();
|
||||
}
|
||||
|
||||
/**
|
||||
* syncs the server side video timestamp with players
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public byte[] syncPlaybackWithPlayers() {
|
||||
try {
|
||||
ByteArrayOutputStream bao = new ByteArrayOutputStream();
|
||||
DataOutputStream str = new DataOutputStream(bao);
|
||||
|
||||
if(isDisabled) {
|
||||
str.write(0);
|
||||
int x = mapIds[0].length;
|
||||
int y = mapIds.length;
|
||||
str.write((x << 4) | y);
|
||||
for(int yy = 0; yy < y; ++yy) {
|
||||
for(int xx = 0; xx < x; ++xx) {
|
||||
str.writeShort(mapIds[yy][xx]);
|
||||
}
|
||||
}
|
||||
return bao.toByteArray();
|
||||
}
|
||||
|
||||
int packetType = 1;
|
||||
if(requiresFullResetPacket) {
|
||||
packetType = packetType | 2;
|
||||
}
|
||||
if(requiresFullResetPacket || requiresPositionPacket) {
|
||||
packetType = packetType | 4;
|
||||
}
|
||||
|
||||
str.write(packetType);
|
||||
|
||||
if(requiresFullResetPacket) {
|
||||
int x = mapIds[0].length;
|
||||
int y = mapIds.length;
|
||||
str.write((x << 4) | y);
|
||||
for(int yy = 0; yy < y; ++yy) {
|
||||
for(int xx = 0; xx < x; ++xx) {
|
||||
str.writeShort(mapIds[yy][xx]);
|
||||
}
|
||||
}
|
||||
str.write(frameRate);
|
||||
str.writeInt(duration);
|
||||
str.writeUTF(url);
|
||||
}
|
||||
|
||||
if(requiresFullResetPacket || requiresPositionPacket) {
|
||||
str.writeFloat(volume);
|
||||
str.writeDouble(posX);
|
||||
str.writeDouble(posY);
|
||||
str.writeDouble(posZ);
|
||||
}
|
||||
|
||||
str.writeInt(getElapsedMillis());
|
||||
str.writeBoolean(loop);
|
||||
str.writeBoolean(pauseTimestamp > 0l);
|
||||
|
||||
requiresFullResetPacket = false;
|
||||
requiresPositionPacket = false;
|
||||
|
||||
return bao.toByteArray();
|
||||
}catch(IOException e) {
|
||||
throw new RuntimeException("serialization error", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* this is dual purpose, it calculates elapsed time but also loops or pauses the video if it is finished playing
|
||||
*/
|
||||
private int getElapsedMillis() {
|
||||
if(pauseTimestamp > 0l) {
|
||||
return (int)(pauseTimestamp - timestamp);
|
||||
}
|
||||
int t = (int)(System.currentTimeMillis() - timestamp);
|
||||
if(loop) {
|
||||
while(t > duration) {
|
||||
t -= duration;
|
||||
timestamp += duration;
|
||||
}
|
||||
}else {
|
||||
if(t > duration) {
|
||||
timestamp = (int)(System.currentTimeMillis() - duration);
|
||||
return duration;
|
||||
}
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param url URL to an MP4 or other HTML5 supported video file
|
||||
* @param loop If the video file should loop
|
||||
* @param durationSeconds duration of the video in seconds
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public byte[] beginPlayback(String url, boolean loop, float duration) {
|
||||
this.url = url;
|
||||
this.loop = loop;
|
||||
this.duration = (int)(duration * 1000.0f);
|
||||
this.pauseTimestamp = 0l;
|
||||
this.timestamp = 0l;
|
||||
this.requiresFullResetPacket = true;
|
||||
this.isDisabled = false;
|
||||
return syncPlaybackWithPlayers();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells the browser to pre-load a URL to a video to be played in the future
|
||||
* @param url the URL of the video
|
||||
* @param ttl the amount of time the video should stay loaded
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public static byte[] bufferVideo(String url, int ttl) {
|
||||
try {
|
||||
ByteArrayOutputStream bao = new ByteArrayOutputStream();
|
||||
DataOutputStream str = new DataOutputStream(bao);
|
||||
str.write(8);
|
||||
str.writeInt(ttl);
|
||||
str.writeUTF(url);
|
||||
return bao.toByteArray();
|
||||
}catch(IOException e) {
|
||||
throw new RuntimeException("serialization error", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the duration of the current clip
|
||||
*/
|
||||
public float getDuration() {
|
||||
return duration * 0.001f;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the URL of the current clip
|
||||
*/
|
||||
public String getURL() {
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the server's current timestamp
|
||||
*/
|
||||
public float getPlaybackTime() {
|
||||
return getElapsedMillis() * 0.001f;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param time time in seconds to seek the video to
|
||||
*/
|
||||
public byte[] setPlaybackTime(float time) {
|
||||
timestamp = System.currentTimeMillis() - (int)(time * 1000.0f);
|
||||
return syncPlaybackWithPlayers();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return if playback is complete (false if loop)
|
||||
*/
|
||||
public boolean isPlaybackFinished() {
|
||||
return !loop && getElapsedMillis() == duration;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param loop video should loop
|
||||
*/
|
||||
public byte[] setLoopEnable(boolean loop) {
|
||||
this.loop = loop;
|
||||
return syncPlaybackWithPlayers();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return if loop is enabled
|
||||
*/
|
||||
public boolean isLoopEnable() {
|
||||
return loop;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param pause set if video should pause
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public byte[] setPaused(boolean pause) {
|
||||
getElapsedMillis();
|
||||
if(pause && pauseTimestamp <= 0l) {
|
||||
pauseTimestamp = System.currentTimeMillis();
|
||||
}else if(!pause && pauseTimestamp > 0l) {
|
||||
timestamp = System.currentTimeMillis() - (pauseTimestamp - timestamp);
|
||||
pauseTimestamp = 0l;
|
||||
}
|
||||
return syncPlaybackWithPlayers();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return if video is currently paused
|
||||
*/
|
||||
public boolean isPaused() {
|
||||
return pauseTimestamp > 0l;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return current server-side volume
|
||||
*/
|
||||
public float getVolume() {
|
||||
return volume;
|
||||
}
|
||||
|
||||
}
|
132
samples/ayunami2000/VideoMapPacketCodecBukkit.java
Normal file
132
samples/ayunami2000/VideoMapPacketCodecBukkit.java
Normal file
|
@ -0,0 +1,132 @@
|
|||
package ayunami2000;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.bukkit.craftbukkit.v1_5_R3.entity.CraftPlayer;
|
||||
import org.bukkit.entity.Player;
|
||||
|
||||
import net.minecraft.server.v1_5_R3.Packet;
|
||||
import net.minecraft.server.v1_5_R3.Packet131ItemData;
|
||||
|
||||
public class VideoMapPacketCodecBukkit extends VideoMapPacketCodec {
|
||||
|
||||
/**
|
||||
* @param mapIds 2D grid of map IDs that make up the screen (mapIds[y][x])
|
||||
* @param posX audio playback X coord
|
||||
* @param posY audio playback Y coord
|
||||
* @param posZ audio playback Z coord
|
||||
* @param volume the volume of the clip
|
||||
*/
|
||||
public VideoMapPacketCodecBukkit(int[][] mapIds, double posX, double posY, double posZ, float volume) {
|
||||
super(mapIds, posX, posY, posZ, volume);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param mapIds 2D grid of map IDs that make up the screen (mapIds[y][x])
|
||||
* @param posX audio playback X coord
|
||||
* @param posY audio playback Y coord
|
||||
* @param posZ audio playback Z coord
|
||||
*/
|
||||
public VideoMapPacketCodecBukkit(int[][] mapIds, double posX, double posY, double posZ) {
|
||||
super(mapIds, posX, posY, posZ, 1.0f);
|
||||
}
|
||||
|
||||
public static class VideoMapPacket {
|
||||
protected final Object packet;
|
||||
protected VideoMapPacket(byte[] packet) {
|
||||
this.packet = new Packet131ItemData((short)104, (short)0, packet);
|
||||
}
|
||||
public Object getNativePacket() {
|
||||
return packet;
|
||||
}
|
||||
public void send(Player p) {
|
||||
nativeSendPacketToPlayer(p, packet);
|
||||
}
|
||||
public void send(Player... p) {
|
||||
for(Player pp : p) {
|
||||
nativeSendPacketToPlayer(pp, packet);
|
||||
}
|
||||
}
|
||||
public void send(List<Player> p) {
|
||||
for(Player pp : p) {
|
||||
nativeSendPacketToPlayer(pp, packet);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param posX audio playback X coord
|
||||
* @param posY audio playback Y coord
|
||||
* @param posZ audio playback Z coord
|
||||
* @param volume the volume of the clip
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public VideoMapPacket moveAudioSourceBukkit(double posX, double posY, double posZ, float volume) {
|
||||
return new VideoMapPacket(moveAudioSource(posX, posY, posZ, volume));
|
||||
}
|
||||
|
||||
/**
|
||||
* unloads video and resets all map object to vanilla renderer
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public VideoMapPacket disableVideoBukkit() {
|
||||
return new VideoMapPacket(disableVideo());
|
||||
}
|
||||
|
||||
/**
|
||||
* syncs the server side video timestamp with players
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public VideoMapPacket syncPlaybackWithPlayersBukkit() {
|
||||
return new VideoMapPacket(syncPlaybackWithPlayers());
|
||||
}
|
||||
|
||||
/**
|
||||
* @param url URL to an MP4 or other HTML5 supported video file
|
||||
* @param loop If the video file should loop
|
||||
* @param durationSeconds duration of the video in seconds
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public VideoMapPacket beginPlaybackBukkit(String url, boolean loop, float duration) {
|
||||
return new VideoMapPacket(beginPlayback(url, loop, duration));
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells the browser to pre-load a URL to a video to be played in the future
|
||||
* @param url the URL of the video
|
||||
* @param ttl the amount of time the video should stay loaded
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public static VideoMapPacket bufferVideoBukkit(String url, int ttl) {
|
||||
return new VideoMapPacket(bufferVideo(url, ttl));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param time time in seconds to seek the video to
|
||||
*/
|
||||
public VideoMapPacket setPlaybackTimeBukkit(float time) {
|
||||
return new VideoMapPacket(setPlaybackTime(time));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param loop video should loop
|
||||
*/
|
||||
public VideoMapPacket setLoopEnableBukkit(boolean loop) {
|
||||
return new VideoMapPacket(setLoopEnable(loop));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param pause set if video should pause
|
||||
* @return packet to send to players
|
||||
*/
|
||||
public VideoMapPacket setPausedBukkit(boolean pause) {
|
||||
return new VideoMapPacket(setPaused(pause));
|
||||
}
|
||||
|
||||
public static void nativeSendPacketToPlayer(Player player, Object obj) {
|
||||
if(obj == null) {
|
||||
return;
|
||||
}
|
||||
((CraftPlayer)player).getHandle().playerConnection.sendPacket((Packet)obj);
|
||||
}
|
||||
}
|
|
@ -9,3 +9,7 @@ commands:
|
|||
description: test ayunami map system
|
||||
usage: /samplemap <get|disable|set> [mapid] [image file] [16bpp|24bpp] [compress]
|
||||
permission: eaglersamples.samplemap
|
||||
videomap:
|
||||
description: test video map system (place video_map_config.txt in your server's working directory)
|
||||
usage: /videomap <begin|stop|preload|pause|resume|loop|move> [url] [duration or ttl]
|
||||
permission: eaglersamples.videomap
|
155
samples/plugin/CommandVideoMap.java
Normal file
155
samples/plugin/CommandVideoMap.java
Normal file
|
@ -0,0 +1,155 @@
|
|||
package plugin;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bukkit.ChatColor;
|
||||
import org.bukkit.Location;
|
||||
import org.bukkit.command.Command;
|
||||
import org.bukkit.command.CommandExecutor;
|
||||
import org.bukkit.command.CommandSender;
|
||||
import org.bukkit.entity.Player;
|
||||
|
||||
import ayunami2000.VideoMapPacketCodecBukkit;
|
||||
|
||||
public class CommandVideoMap implements CommandExecutor {
|
||||
|
||||
private VideoMapPacketCodecBukkit currentCodecInstance = null;
|
||||
|
||||
@Override
|
||||
public boolean onCommand(CommandSender arg0, Command arg1, String arg2, String[] arg3) {
|
||||
if(!(arg0 instanceof Player)) {
|
||||
arg0.sendMessage(ChatColor.RED + "Internal Error: " + ChatColor.WHITE + "CommmandSender must be a Player");
|
||||
return false;
|
||||
}
|
||||
if(arg3.length == 3 && arg3[0].equalsIgnoreCase("begin")) {
|
||||
try {
|
||||
List<int[]> mapRows = new ArrayList();
|
||||
double x = 0.0d;
|
||||
double y = 0.0d;
|
||||
double z = 0.0d;
|
||||
float volume = -1.0f;
|
||||
BufferedReader r = new BufferedReader(new FileReader(new File("video_map_config.txt")));
|
||||
String str;
|
||||
while((str = r.readLine()) != null) {
|
||||
str = str.trim();
|
||||
if(str.startsWith("#")) {
|
||||
continue;
|
||||
}else if(str.startsWith("x")) {
|
||||
int i = str.indexOf('=');
|
||||
if(i > 0) {
|
||||
x = Double.parseDouble(str.substring(i + 1).trim());
|
||||
}
|
||||
}else if(str.startsWith("y")) {
|
||||
int i = str.indexOf('=');
|
||||
if(i > 0) {
|
||||
y = Double.parseDouble(str.substring(i + 1).trim());
|
||||
}
|
||||
}else if(str.startsWith("z")) {
|
||||
int i = str.indexOf('=');
|
||||
if(i > 0) {
|
||||
z = Double.parseDouble(str.substring(i + 1).trim());
|
||||
}
|
||||
}else if(str.startsWith("volume")) {
|
||||
int i = str.indexOf('=');
|
||||
if(i > 0) {
|
||||
volume = Float.parseFloat(str.substring(i + 1).trim());
|
||||
}
|
||||
}else {
|
||||
try {
|
||||
String[] digits = str.split(",");
|
||||
int firstInt = Integer.parseInt(digits[0].trim());
|
||||
int[] newRow = new int[digits.length];
|
||||
newRow[0] = firstInt;
|
||||
for(int i = 1; i < digits.length; ++i) {
|
||||
newRow[i] = Integer.parseInt(digits[i].trim());
|
||||
}
|
||||
if(mapRows.size() > 0 && mapRows.get(0).length != newRow.length) {
|
||||
throw new IOException("All rows in map list must be the same length (" + mapRows.get(0).length + " != " + newRow.length + ")");
|
||||
}
|
||||
mapRows.add(newRow);
|
||||
}catch(NumberFormatException t) {
|
||||
}
|
||||
}
|
||||
}
|
||||
r.close();
|
||||
if(mapRows.size() > 0) {
|
||||
if(currentCodecInstance != null) {
|
||||
currentCodecInstance.disableVideoBukkit().send((Player)arg0);
|
||||
currentCodecInstance = null;
|
||||
}
|
||||
int[][] matrix = new int[mapRows.size()][mapRows.get(0).length];
|
||||
for(int i = 0, l = mapRows.size(); i < l; ++i) {
|
||||
for(int j = 0; j < matrix[i].length; ++j) {
|
||||
matrix[i][j] = mapRows.get(i)[j];
|
||||
}
|
||||
}
|
||||
currentCodecInstance = new VideoMapPacketCodecBukkit(matrix, x, y, z, volume);
|
||||
currentCodecInstance.beginPlaybackBukkit(arg3[1], true, arg3[2].indexOf('.') > 0 ? Float.parseFloat(arg3[2]) : Float.parseFloat(arg3[2] + ".0")).send((Player)arg0);
|
||||
arg0.sendMessage(ChatColor.GREEN + "Enabled video map, URL:" + ChatColor.WHITE + " " + arg3[1]);
|
||||
return true;
|
||||
}else {
|
||||
throw new IOException("No map rows were defined");
|
||||
}
|
||||
}catch(IOException ex) {
|
||||
arg0.sendMessage(ChatColor.RED + "Internal Error while reading \'video_map_config.txt\': " + ChatColor.WHITE + ex.toString());
|
||||
}
|
||||
}else if((arg3.length == 2 || arg3.length == 3) && arg3[0].equalsIgnoreCase("preload")) {
|
||||
int ttl = arg3.length == 3 ? Integer.parseInt(arg3[2]) * 1000 : 180 * 1000;
|
||||
VideoMapPacketCodecBukkit.bufferVideoBukkit(arg3[1], ttl).send((Player)arg0);
|
||||
arg0.sendMessage(ChatColor.GREEN + "Buffered video URL:" + ChatColor.WHITE + " " + arg3[1] + " " + ChatColor.GREEN + "for " + ChatColor.WHITE + (ttl / 1000) + ChatColor.GREEN + " seconds");
|
||||
return true;
|
||||
}else {
|
||||
if(arg3.length == 1 && arg3[0].equalsIgnoreCase("stop")) {
|
||||
if(currentCodecInstance != null) {
|
||||
currentCodecInstance.disableVideoBukkit().send((Player)arg0);
|
||||
currentCodecInstance = null;
|
||||
arg0.sendMessage(ChatColor.GREEN + "Disabled video map");
|
||||
return true;
|
||||
}else {
|
||||
arg0.sendMessage(ChatColor.RED + "Error: " + ChatColor.WHITE + "no video is loaded");
|
||||
}
|
||||
}else if(arg3.length == 1 && arg3[0].equalsIgnoreCase("pause")) {
|
||||
if(currentCodecInstance != null) {
|
||||
currentCodecInstance.setPausedBukkit(true).send((Player)arg0);
|
||||
arg0.sendMessage(ChatColor.GREEN + "Paused video map");
|
||||
return true;
|
||||
}else {
|
||||
arg0.sendMessage(ChatColor.RED + "Error: " + ChatColor.WHITE + "no video is loaded");
|
||||
}
|
||||
}else if(arg3.length == 1 && arg3[0].equalsIgnoreCase("resume")) {
|
||||
if(currentCodecInstance != null) {
|
||||
currentCodecInstance.setPausedBukkit(false).send((Player)arg0);
|
||||
arg0.sendMessage(ChatColor.GREEN + "Resumed video map");
|
||||
return true;
|
||||
}else {
|
||||
arg0.sendMessage(ChatColor.RED + "Error: " + ChatColor.WHITE + "no video is loaded");
|
||||
}
|
||||
}else if((arg3.length == 1 || arg3.length == 2) && arg3[0].equalsIgnoreCase("loop")) {
|
||||
if(currentCodecInstance != null) {
|
||||
boolean gottaLoop = arg3.length == 1 || arg3[1].equalsIgnoreCase("true");
|
||||
currentCodecInstance.setLoopEnableBukkit(gottaLoop).send((Player)arg0);
|
||||
arg0.sendMessage(ChatColor.GREEN + (gottaLoop ? "Enabled video map loop" : "Disabled video map loop"));
|
||||
return true;
|
||||
}else {
|
||||
arg0.sendMessage(ChatColor.RED + "Error: " + ChatColor.WHITE + "no video is loaded");
|
||||
}
|
||||
}else if(arg3.length == 1 && arg3[0].equalsIgnoreCase("move")) {
|
||||
if(currentCodecInstance != null) {
|
||||
Location l = ((Player)arg0).getLocation();
|
||||
currentCodecInstance.moveAudioSourceBukkit(l.getX(), l.getY(), l.getZ(), currentCodecInstance.getVolume()).send((Player)arg0);
|
||||
arg0.sendMessage(ChatColor.GREEN + "Repositioned audio source to " + l.getBlockX() + ", " + l.getBlockY() + ", " + l.getBlockZ());
|
||||
return true;
|
||||
}else {
|
||||
arg0.sendMessage(ChatColor.RED + "Error: " + ChatColor.WHITE + "no video is loaded");
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
|
@ -6,6 +6,7 @@ public class EaglerSamplesPlugin extends JavaPlugin {
|
|||
|
||||
public void onEnable() {
|
||||
getCommand("samplemap").setExecutor(new CommandSampleMap(this));
|
||||
getCommand("videomap").setExecutor(new CommandVideoMap());
|
||||
}
|
||||
|
||||
public void onDisable() {
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
These are sample source files to assist the process of integrating Eaglercraft into other projects, or to assist the process of integrating other projects into Eaglercraft
|
||||
|
||||
place "video_map_config.txt" in the working directory of the server to use /videomap
|
14
samples/video_map_config.txt
Normal file
14
samples/video_map_config.txt
Normal file
|
@ -0,0 +1,14 @@
|
|||
# type map ids like this to create the video screen:
|
||||
|
||||
1, 2, 3
|
||||
4, 5, 6
|
||||
|
||||
# position and volume:
|
||||
|
||||
x = 0.0
|
||||
y = 0.0
|
||||
z = 0.0
|
||||
|
||||
volume = 1.0
|
||||
|
||||
# set volume to '-1.0' to disable directional audio
|
|
@ -635,6 +635,73 @@ public class EaglerAdapterImpl2 {
|
|||
return GL20.glGetAttribLocation(p1.obj, p2);
|
||||
}
|
||||
|
||||
public static final boolean isVideoSupported() {
|
||||
return false;
|
||||
}
|
||||
public static final void loadVideo(String src, boolean autoplay) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void loadVideo(String src, boolean autoplay, String setJavascriptPointer) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void loadVideo(String src, boolean autoplay, String setJavascriptPointer, String javascriptOnloadFunction) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void bufferVideo(String src, int ttl) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void unloadVideo() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final boolean isVideoLoaded() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final boolean isVideoPaused() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void setVideoPaused(boolean pause) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void setVideoLoop(boolean pause) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void setVideoVolume(float x, float y, float z, float v) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void updateVideoTexture() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void bindVideoTexture() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final int getVideoWidth() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final int getVideoHeight() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final float getVideoCurrentTime() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void setVideoCurrentTime(float seconds) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final float getVideoDuration() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
public static final void setVideoFrameRate(float seconds) {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
|
||||
public static final int VIDEO_ERR_NONE = -1;
|
||||
public static final int VIDEO_ERR_ABORTED = 1;
|
||||
public static final int VIDEO_ERR_NETWORK = 2;
|
||||
public static final int VIDEO_ERR_DECODE = 3;
|
||||
public static final int VIDEO_ERR_SRC_NOT_SUPPORTED = 4;
|
||||
|
||||
public static final int getVideoError() {
|
||||
throw new UnsupportedOperationException("Video is not supported in LWJGL runtime");
|
||||
}
|
||||
|
||||
// =======================================================================================
|
||||
// =======================================================================================
|
||||
|
@ -1147,16 +1214,12 @@ public class EaglerAdapterImpl2 {
|
|||
return s;
|
||||
}
|
||||
public static final void setListenerPos(float x, float y, float z, float vx, float vy, float vz, float pitch, float yaw) {
|
||||
float var11 = MathHelper.cos(-yaw * 0.017453292F - (float) Math.PI);
|
||||
float var12 = MathHelper.sin(-yaw * 0.017453292F - (float) Math.PI);
|
||||
float var13 = -var12;
|
||||
float var14 = -MathHelper.sin(-pitch * 0.017453292F - (float) Math.PI);
|
||||
float var15 = -var11;
|
||||
float var16 = 0.0F;
|
||||
float var17 = 1.0F;
|
||||
float var18 = 0.0F;
|
||||
float var2 = MathHelper.cos(-yaw * 0.017453292F);
|
||||
float var3 = MathHelper.sin(-yaw * 0.017453292F);
|
||||
float var4 = -MathHelper.cos(pitch * 0.017453292F);
|
||||
float var5 = MathHelper.sin(pitch * 0.017453292F);
|
||||
ss.setListenerPosition(x, y, z);
|
||||
ss.setListenerOrientation(var13, var14, var15, var16, var17, var18);
|
||||
ss.setListenerOrientation(-var3 * var4, -var5, -var2 * var4, 0.0f, 1.0f, 0.0f);
|
||||
ss.setListenerVelocity(vx, vy, vz);
|
||||
}
|
||||
public static final void setPlaybackOffsetDelay(float f) {
|
||||
|
|
|
@ -6,13 +6,16 @@ public class ConfigConstants {
|
|||
|
||||
public static boolean profanity = false;
|
||||
|
||||
public static final String version = "22w15d";
|
||||
public static final String version = "22w16h";
|
||||
public static final String mainMenuString = "ayuncraft " + version;
|
||||
|
||||
public static final String forkMe = "https://github.com/ayunami2000/ayuncraft";
|
||||
|
||||
public static final boolean html5build = true;
|
||||
|
||||
public static String ayonullTitle = null;
|
||||
public static String ayonullLink = null;
|
||||
|
||||
public static String[] proxies = new String[]{"pproxy.rom1504.fr","webmcproxy.glitch.me","net-browserify.glitch.me"};
|
||||
|
||||
public static Pattern ipPattern = Pattern.compile("^"
|
||||
|
|
101
src/main/java/net/lax1dude/eaglercraft/GuiScreenLicense.java
Normal file
101
src/main/java/net/lax1dude/eaglercraft/GuiScreenLicense.java
Normal file
|
@ -0,0 +1,101 @@
|
|||
package net.lax1dude.eaglercraft;
|
||||
|
||||
import net.minecraft.src.GuiButton;
|
||||
import net.minecraft.src.GuiScreen;
|
||||
|
||||
public class GuiScreenLicense extends GuiScreen {
|
||||
|
||||
private final GuiScreen continueScreen;
|
||||
private boolean hasCheckedBox = false;
|
||||
private int beginOffset = 0;
|
||||
private GuiButton acceptButton;
|
||||
|
||||
public GuiScreenLicense(GuiScreen scr) {
|
||||
continueScreen = scr;
|
||||
}
|
||||
|
||||
public void initGui() {
|
||||
beginOffset = this.height / 2 - 100;
|
||||
if(beginOffset < 5) {
|
||||
beginOffset = 5;
|
||||
}
|
||||
this.buttonList.add(new GuiButton(1, this.width / 2 - 120, beginOffset + 180, 115, 20, new String(License.line61)));
|
||||
this.buttonList.add(acceptButton = new GuiButton(2, this.width / 2 + 5, beginOffset + 180, 115, 20, new String(License.line60)));
|
||||
acceptButton.enabled = false;
|
||||
}
|
||||
|
||||
protected void actionPerformed(GuiButton par1GuiButton) {
|
||||
if(par1GuiButton.id == 2) {
|
||||
LocalStorageManager.profileSettingsStorage.setBoolean("acceptLicense", true);
|
||||
LocalStorageManager.saveStorageP();
|
||||
mc.displayGuiScreen(continueScreen);
|
||||
}else if(par1GuiButton.id == 1) {
|
||||
mc.displayGuiScreen(new GuiScreenLicenseDeclined());
|
||||
}
|
||||
}
|
||||
|
||||
private static final TextureLocation beaconx = new TextureLocation("/gui/beacon.png");
|
||||
|
||||
public void drawScreen(int mx, int my, float par3) {
|
||||
drawDefaultBackground();
|
||||
acceptButton.enabled = hasCheckedBox;
|
||||
super.drawScreen(mx, my, par3);
|
||||
|
||||
EaglerAdapter.glPushMatrix();
|
||||
EaglerAdapter.glScalef(1.33f, 1.33f, 1.33f);
|
||||
drawCenteredString(fontRenderer, new String(License.line00), width * 3 / 8, beginOffset * 3 / 4, 0xDDDD55);
|
||||
EaglerAdapter.glPopMatrix();
|
||||
|
||||
drawCenteredString(fontRenderer, new String(License.line10), width / 2, beginOffset + 22, 0xFF7777);
|
||||
drawCenteredString(fontRenderer, new String(License.line11), width / 2, beginOffset + 33, 0xFF7777);
|
||||
drawCenteredString(fontRenderer, new String(License.line12), width / 2, beginOffset + 44, 0xFF7777);
|
||||
|
||||
drawCenteredString(fontRenderer, new String(License.line20), width / 2, beginOffset + 62, 0x448844);
|
||||
drawCenteredString(fontRenderer, new String(License.line21), width / 2, beginOffset + 71, 0x448844);
|
||||
|
||||
EaglerAdapter.glPushMatrix();
|
||||
EaglerAdapter.glScalef(0.75f, 0.75f, 0.75f);
|
||||
drawCenteredString(fontRenderer, new String(License.line30), width * 4 / 6, (beginOffset + 89) * 4 / 3, 0x666666);
|
||||
drawCenteredString(fontRenderer, new String(License.line31), width * 4 / 6, (beginOffset + 97) * 4 / 3, 0x999999);
|
||||
drawCenteredString(fontRenderer, new String(License.line32), width * 4 / 6, (beginOffset + 105) * 4 / 3, 0x999999);
|
||||
EaglerAdapter.glPopMatrix();
|
||||
|
||||
drawCenteredString(fontRenderer, new String(License.line40), width / 2, beginOffset + 120, 0xFF7777);
|
||||
|
||||
boolean mouseOverCheck = width / 2 - 100 < mx && width / 2 - 83 > mx && beginOffset + 142 < my && beginOffset + 159 > my;
|
||||
|
||||
if(mouseOverCheck) {
|
||||
EaglerAdapter.glColor4f(0.7f, 0.7f, 1.0f, 1.0f);
|
||||
}else {
|
||||
EaglerAdapter.glColor4f(0.6f, 0.6f, 0.6f, 1.0f);
|
||||
}
|
||||
|
||||
beaconx.bindTexture();
|
||||
|
||||
EaglerAdapter.glPushMatrix();
|
||||
EaglerAdapter.glScalef(0.75f, 0.75f, 0.75f);
|
||||
drawTexturedModalRect((width / 2 - 100) * 4 / 3, (beginOffset + 142) * 4 / 3, 22, 219, 22, 22);
|
||||
EaglerAdapter.glPopMatrix();
|
||||
|
||||
if(hasCheckedBox) {
|
||||
EaglerAdapter.glPushMatrix();
|
||||
EaglerAdapter.glColor4f(1.1f, 1.1f, 1.1f, 1.0f);
|
||||
EaglerAdapter.glTranslatef(0.5f, 0.5f, 0.0f);
|
||||
drawTexturedModalRect((width / 2 - 100), (beginOffset + 142), 90, 222, 16, 16);
|
||||
EaglerAdapter.glPopMatrix();
|
||||
}
|
||||
|
||||
EaglerAdapter.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
|
||||
drawString(fontRenderer, new String(License.line50), width / 2 - 75, beginOffset + 147, 0xEEEEEE);
|
||||
}
|
||||
|
||||
protected void mouseClicked(int par1, int par2, int par3) {
|
||||
super.mouseClicked(par1, par2, par3);
|
||||
if(width / 2 - 100 < par1 && width / 2 - 83 > par1 && beginOffset + 142 < par2 && beginOffset + 159 > par2) {
|
||||
this.mc.sndManager.playSoundFX("random.click", 1.0F, 1.0F);
|
||||
hasCheckedBox = !hasCheckedBox;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
package net.lax1dude.eaglercraft;
|
||||
|
||||
import net.minecraft.src.GuiScreen;
|
||||
|
||||
public class GuiScreenLicenseDeclined extends GuiScreen {
|
||||
|
||||
public void drawScreen(int mx, int my, float par3) {
|
||||
this.drawDefaultBackground();
|
||||
drawCenteredString(fontRenderer, new String(License.line70), width / 2, height / 3 - 10, 0xFFFFFF);
|
||||
drawCenteredString(fontRenderer, new String(License.line71), width / 2, height / 3 + 18, 0xFF7777);
|
||||
drawCenteredString(fontRenderer, new String(License.line72), width / 2, height / 3 + 35, 0x666666);
|
||||
}
|
||||
}
|
129
src/main/java/net/lax1dude/eaglercraft/License.java
Normal file
129
src/main/java/net/lax1dude/eaglercraft/License.java
Normal file
|
@ -0,0 +1,129 @@
|
|||
package net.lax1dude.eaglercraft;
|
||||
|
||||
public class License {
|
||||
|
||||
/*
|
||||
* This is the text on the 'License Agreement' screen
|
||||
* It is encoded to stop people from easily modifying it
|
||||
* in classes.js via find/replace in a text editor
|
||||
*/
|
||||
|
||||
static final byte[] line00 = new byte[] {
|
||||
(byte)76,(byte)105,(byte)99,(byte)101,(byte)110,(byte)115,(byte)101,(byte)32,(byte)39,(byte)65,
|
||||
(byte)103,(byte)114,(byte)101,(byte)101,(byte)109,(byte)101,(byte)110,(byte)116,(byte)39
|
||||
};
|
||||
|
||||
static final byte[] line10 = new byte[] {
|
||||
(byte)69,(byte)97,(byte)103,(byte)108,(byte)101,(byte)114,(byte)99,(byte)114,(byte)97,(byte)102,
|
||||
(byte)116,(byte)32,(byte)105,(byte)115,(byte)32,(byte)194,(byte)167,(byte)100,(byte)102,(byte)114,
|
||||
(byte)101,(byte)101,(byte)32,(byte)115,(byte)111,(byte)102,(byte)116,(byte)119,(byte)97,(byte)114,
|
||||
(byte)101,(byte)44,(byte)194,(byte)167,(byte)114,(byte)32,(byte)105,(byte)102,(byte)32,(byte)115,
|
||||
(byte)111,(byte)109,(byte)101,(byte)111,(byte)110,(byte)101,(byte)32,(byte)105,(byte)110,(byte)116,
|
||||
(byte)101,(byte)110,(byte)116,(byte)105,(byte)111,(byte)110,(byte)97,(byte)108,(byte)108,(byte)121,
|
||||
(byte)32,(byte)99,(byte)104,(byte)97,(byte)114,(byte)103,(byte)101,(byte)100
|
||||
};
|
||||
|
||||
static final byte[] line11 = new byte[] {(byte)121,(byte)111,(byte)117,(byte)32,(byte)97,(byte)110,
|
||||
(byte)121,(byte)32,(byte)115,(byte)117,(byte)109,(byte)32,(byte)111,(byte)102,(byte)32,(byte)109,
|
||||
(byte)111,(byte)110,(byte)101,(byte)121,(byte)32,(byte)116,(byte)111,(byte)32,(byte)103,(byte)97,
|
||||
(byte)105,(byte)110,(byte)32,(byte)97,(byte)99,(byte)99,(byte)101,(byte)115,(byte)115,(byte)32,
|
||||
(byte)116,(byte)111,(byte)32,(byte)116,(byte)104,(byte)105,(byte)115,(byte)32,(byte)102,(byte)105,
|
||||
(byte)108,(byte)101,(byte)44,(byte)32,(byte)121,(byte)111,(byte)117,(byte)32,(byte)97,(byte)114,
|
||||
(byte)101,(byte)32,(byte)97,(byte)110
|
||||
};
|
||||
|
||||
static final byte[] line12 = new byte[] {(byte)105,(byte)100,(byte)105,(byte)111,(byte)116,(byte)32,
|
||||
(byte)97,(byte)110,(byte)100,(byte)32,(byte)97,(byte)32,(byte)118,(byte)105,(byte)99,(byte)116,
|
||||
(byte)105,(byte)109,(byte)32,(byte)111,(byte)102,(byte)32,(byte)112,(byte)105,(byte)114,(byte)97,
|
||||
(byte)99,(byte)121,(byte)46,(byte)32,(byte)83,(byte)116,(byte)111,(byte)112,(byte)32,(byte)112,
|
||||
(byte)108,(byte)97,(byte)121,(byte)105,(byte)110,(byte)103,(byte)32,(byte)121,(byte)111,(byte)117,
|
||||
(byte)114,(byte)115,(byte)101,(byte)108,(byte)102,(byte)46
|
||||
};
|
||||
|
||||
static final byte[] line20 = new byte[] {
|
||||
(byte)67,(byte)108,(byte)105,(byte)99,(byte)107,(byte)32,(byte)39,(byte)70,(byte)111,(byte)114,
|
||||
(byte)107,(byte)32,(byte)111,(byte)110,(byte)32,(byte)71,(byte)105,(byte)116,(byte)104,(byte)117,
|
||||
(byte)98,(byte)39,(byte)32,(byte)111,(byte)110,(byte)32,(byte)116,(byte)104,(byte)101,(byte)32,
|
||||
(byte)109,(byte)97,(byte)105,(byte)110,(byte)32,(byte)109,(byte)101,(byte)110,(byte)117,(byte)32,
|
||||
(byte)116,(byte)111,(byte)32,(byte)97,(byte)99,(byte)99,(byte)101,(byte)115,(byte)115,(byte)32,
|
||||
(byte)116,(byte)104,(byte)101,(byte)32,(byte)111,(byte)102,(byte)102,(byte)105,(byte)99,(byte)97,(byte)108
|
||||
};
|
||||
|
||||
static final byte[] line21 = new byte[] {
|
||||
(byte)115,(byte)111,(byte)117,(byte)114,(byte)99,(byte)101,(byte)32,(byte)99,(byte)111,(byte)100,
|
||||
(byte)101,(byte)32,(byte)116,(byte)111,(byte)32,(byte)100,(byte)111,(byte)119,(byte)110,(byte)108,
|
||||
(byte)111,(byte)97,(byte)100,(byte)32,(byte)116,(byte)104,(byte)105,(byte)115,(byte)32,(byte)101,
|
||||
(byte)100,(byte)117,(byte)99,(byte)97,(byte)116,(byte)105,(byte)111,(byte)110,(byte)97,(byte)108,
|
||||
(byte)32,(byte)112,(byte)114,(byte)111,(byte)106,(byte)101,(byte)99,(byte)116,(byte)32,(byte)108,
|
||||
(byte)101,(byte)103,(byte)105,(byte)116,(byte)105,(byte)109,(byte)97,(byte)116,(byte)101,(byte)108,(byte)121
|
||||
};
|
||||
|
||||
static final byte[] line30 = new byte[] {
|
||||
(byte)73,(byte)32,(byte)97,(byte)109,(byte)32,(byte)97,(byte)119,(byte)97,(byte)114,(byte)101,
|
||||
(byte)32,(byte)116,(byte)104,(byte)97,(byte)116,(byte)32,(byte)116,(byte)104,(byte)105,(byte)115,
|
||||
(byte)32,(byte)112,(byte)114,(byte)111,(byte)106,(byte)101,(byte)99,(byte)116,(byte)32,(byte)118,
|
||||
(byte)105,(byte)111,(byte)108,(byte)97,(byte)116,(byte)101,(byte)100,(byte)32,(byte)77,(byte)111,
|
||||
(byte)106,(byte)97,(byte)110,(byte)103,(byte)39,(byte)115,(byte)32,(byte)84,(byte)101,(byte)114,
|
||||
(byte)109,(byte)115,(byte)32,(byte)111,(byte)102,(byte)32,(byte)83,(byte)101,(byte)114,(byte)118,
|
||||
(byte)105,(byte)99,(byte)101
|
||||
};
|
||||
|
||||
static final byte[] line31 = new byte[] {
|
||||
(byte)84,(byte)104,(byte)105,(byte)115,(byte)32,(byte)105,(byte)115,(byte)32,(byte)103,(byte)111,
|
||||
(byte)105,(byte)110,(byte)103,(byte)32,(byte)116,(byte)111,(byte)32,(byte)99,(byte)104,(byte)97,
|
||||
(byte)110,(byte)103,(byte)101,(byte)32,(byte)105,(byte)110,(byte)32,(byte)97,(byte)32,(byte)102,
|
||||
(byte)101,(byte)119,(byte)32,(byte)119,(byte)101,(byte)101,(byte)107,(byte)115,(byte)44,(byte)32,
|
||||
(byte)119,(byte)104,(byte)101,(byte)110,(byte)32,(byte)73,(byte)32,(byte)99,(byte)111,(byte)110,
|
||||
(byte)118,(byte)101,(byte)114,(byte)116,(byte)32,(byte)116,(byte)104,(byte)105,(byte)115,(byte)32,
|
||||
(byte)103,(byte)97,(byte)109,(byte)101
|
||||
};
|
||||
|
||||
static final byte[] line32 = new byte[] {
|
||||
(byte)105,(byte)110,(byte)116,(byte)111,(byte)32,(byte)97,(byte)110,(byte)32,(byte)111,(byte)110,
|
||||
(byte)108,(byte)105,(byte)110,(byte)101,(byte)45,(byte)109,(byte)111,(byte)100,(byte)101,(byte)32,
|
||||
(byte)112,(byte)108,(byte)117,(byte)103,(byte)105,(byte)110,(byte)32,(byte)116,(byte)104,(byte)97,
|
||||
(byte)116,(byte)32,(byte)114,(byte)101,(byte)113,(byte)117,(byte)105,(byte)114,(byte)101,(byte)115,
|
||||
(byte)32,(byte)97,(byte)32,(byte)109,(byte)105,(byte)99,(byte)114,(byte)111,(byte)115,(byte)111,
|
||||
(byte)102,(byte)116,(byte)32,(byte)97,(byte)99,(byte)99,(byte)111,(byte)117,(byte)110,(byte)116,(byte)46
|
||||
};
|
||||
|
||||
static final byte[] line40 = new byte[] {
|
||||
(byte)85,(byte)110,(byte)116,(byte)105,(byte)108,(byte)32,(byte)116,(byte)104,(byte)101,(byte)110,
|
||||
(byte)44,(byte)32,(byte)68,(byte)79,(byte)32,(byte)78,(byte)79,(byte)84,(byte)32,(byte)66,(byte)85,
|
||||
(byte)89,(byte)32,(byte)79,(byte)82,(byte)32,(byte)83,(byte)69,(byte)76,(byte)76,(byte)32,(byte)65,
|
||||
(byte)67,(byte)67,(byte)69,(byte)83,(byte)83,(byte)32,(byte)84,(byte)79,(byte)32,(byte)84,(byte)72,
|
||||
(byte)73,(byte)83,(byte)32,(byte)80,(byte)82,(byte)79,(byte)74,(byte)69,(byte)67,(byte)84
|
||||
};
|
||||
|
||||
static final byte[] line50 = new byte[] {
|
||||
(byte)73,(byte)32,(byte)117,(byte)110,(byte)100,(byte)101,(byte)114,(byte)115,(byte)116,(byte)97,
|
||||
(byte)110,(byte)100,(byte)32,(byte)97,(byte)110,(byte)100,(byte)32,(byte)107,(byte)110,(byte)111,
|
||||
(byte)119,(byte)32,(byte)104,(byte)111,(byte)119,(byte)32,(byte)116,(byte)111,(byte)32,(byte)114,
|
||||
(byte)101,(byte)97,(byte)100
|
||||
};
|
||||
|
||||
static final byte[] line60 = new byte[] {(byte)65,(byte)99,(byte)99,(byte)101,(byte)112,(byte)116};
|
||||
|
||||
static final byte[] line61 = new byte[] {(byte)68,(byte)101,(byte)99,(byte)108,(byte)105,(byte)110,(byte)101};
|
||||
|
||||
static final byte[] line70 = new byte[] {
|
||||
(byte)84,(byte)101,(byte)114,(byte)109,(byte)115,(byte)32,(byte)111,(byte)102,(byte)32,(byte)83,
|
||||
(byte)101,(byte)114,(byte)118,(byte)105,(byte)99,(byte)101,(byte)32,(byte)68,(byte)101,(byte)99,
|
||||
(byte)108,(byte)105,(byte)110,(byte)101,(byte)100
|
||||
};
|
||||
|
||||
static final byte[] line71 = new byte[] {
|
||||
(byte)121,(byte)111,(byte)117,(byte)32,(byte)99,(byte)97,(byte)110,(byte)110,(byte)111,(byte)116,
|
||||
(byte)32,(byte)117,(byte)115,(byte)101,(byte)32,(byte)116,(byte)104,(byte)105,(byte)115,(byte)32,
|
||||
(byte)115,(byte)111,(byte)102,(byte)116,(byte)119,(byte)97,(byte)114,(byte)101,(byte)32,(byte)105,
|
||||
(byte)102,(byte)32,(byte)121,(byte)111,(byte)117,(byte)32,(byte)100,(byte)111,(byte)32,(byte)110,
|
||||
(byte)111,(byte)116,(byte)32,(byte)97,(byte)99,(byte)99,(byte)101,(byte)112,(byte)116
|
||||
};
|
||||
|
||||
static final byte[] line72 = new byte[] {
|
||||
(byte)114,(byte)101,(byte)102,(byte)114,(byte)101,(byte)115,(byte)104,(byte)32,(byte)116,(byte)104,
|
||||
(byte)101,(byte)32,(byte)112,(byte)97,(byte)103,(byte)101,(byte)32,(byte)116,(byte)111,(byte)32,
|
||||
(byte)116,(byte)114,(byte)121,(byte)32,(byte)97,(byte)103,(byte)97,(byte)105,(byte)110
|
||||
};
|
||||
|
||||
}
|
|
@ -16,116 +16,116 @@ import net.lax1dude.eaglercraft.glemu.vector.Vector4f;
|
|||
public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
||||
public static boolean glBlendEnabled=false;
|
||||
|
||||
public static final int GL_ZERO = 0;
|
||||
public static final int GL_ONE = 1;
|
||||
public static final int GL_TEXTURE_2D = 2;
|
||||
public static final int GL_SMOOTH = 3;
|
||||
public static final int GL_DEPTH_TEST = 4;
|
||||
public static final int GL_LEQUAL = 5;
|
||||
public static final int GL_ALPHA_TEST = 6;
|
||||
public static final int GL_GREATER = 7;
|
||||
public static final int GL_BACK = 8;
|
||||
public static final int GL_PROJECTION = 9;
|
||||
public static final int GL_MODELVIEW = 10;
|
||||
public static final int GL_COLOR_BUFFER_BIT = 1;
|
||||
public static final int GL_DEPTH_BUFFER_BIT = 2;
|
||||
public static final int GL_LIGHTING = 13;
|
||||
public static final int GL_FOG = 14;
|
||||
public static final int GL_COLOR_MATERIAL = 15;
|
||||
public static final int GL_BLEND = 16;
|
||||
public static final int GL_RGBA = 18;
|
||||
public static final int GL_UNSIGNED_BYTE = 19;
|
||||
public static final int GL_TEXTURE_WIDTH = 20;
|
||||
public static final int GL_LIGHT0 = 21;
|
||||
public static final int GL_LIGHT1 = 22;
|
||||
public static final int GL_POSITION = 30;
|
||||
public static final int GL_DIFFUSE = 31;
|
||||
public static final int GL_SPECULAR = 32;
|
||||
public static final int GL_AMBIENT = 33;
|
||||
public static final int GL_FLAT = 34;
|
||||
public static final int GL_LIGHT_MODEL_AMBIENT = 35;
|
||||
public static final int GL_FRONT_AND_BACK = 36;
|
||||
public static final int GL_AMBIENT_AND_DIFFUSE = 37;
|
||||
public static final int GL_MODELVIEW_MATRIX = 38;
|
||||
public static final int GL_PROJECTION_MATRIX = 39;
|
||||
public static final int GL_VIEWPORT = 40;
|
||||
public static final int GL_RESCALE_NORMAL = 41;
|
||||
public static final int GL_SRC_ALPHA = 42;
|
||||
public static final int GL_ONE_MINUS_SRC_ALPHA = 43;
|
||||
public static final int GL_ONE_MINUS_DST_COLOR = 44;
|
||||
public static final int GL_ONE_MINUS_SRC_COLOR = 45;
|
||||
public static final int GL_CULL_FACE = 46;
|
||||
public static final int GL_TEXTURE_MIN_FILTER = 47;
|
||||
public static final int GL_TEXTURE_MAG_FILTER = 48;
|
||||
public static final int GL_LINEAR = 49;
|
||||
public static final int GL_COLOR_LOGIC_OP = 50;
|
||||
public static final int GL_OR_REVERSE = 51;
|
||||
public static final int GL_EQUAL = 52;
|
||||
public static final int GL_SRC_COLOR = 53;
|
||||
public static final int GL_TEXTURE = 54;
|
||||
public static final int GL_FRONT = 55;
|
||||
public static final int GL_COMPILE = 56;
|
||||
public static final int GL_S = 57;
|
||||
public static final int GL_T = 58;
|
||||
public static final int GL_R = 59;
|
||||
public static final int GL_Q = 60;
|
||||
public static final int GL_TEXTURE_GEN_S = 61;
|
||||
public static final int GL_TEXTURE_GEN_T = 62;
|
||||
public static final int GL_TEXTURE_GEN_R = 63;
|
||||
public static final int GL_TEXTURE_GEN_Q = 64;
|
||||
public static final int GL_TEXTURE_GEN_MODE = 65;
|
||||
public static final int GL_OBJECT_PLANE = 66;
|
||||
public static final int GL_EYE_PLANE = 67;
|
||||
public static final int GL_OBJECT_LINEAR = 68;
|
||||
public static final int GL_EYE_LINEAR = 69;
|
||||
public static final int GL_NEAREST = 70;
|
||||
public static final int GL_CLAMP = 71;
|
||||
public static final int GL_TEXTURE_WRAP_S = 72;
|
||||
public static final int GL_TEXTURE_WRAP_T = 73;
|
||||
public static final int GL_REPEAT = 74;
|
||||
public static final int GL_BGRA = 75;
|
||||
public static final int GL_UNSIGNED_INT_8_8_8_8_REV = 76;
|
||||
public static final int GL_DST_COLOR = 77;
|
||||
public static final int GL_POLYGON_OFFSET_FILL = 78;
|
||||
public static final int GL_NORMALIZE = 79;
|
||||
public static final int GL_DST_ALPHA = 80;
|
||||
public static final int GL_FLOAT = 81;
|
||||
public static final int GL_TEXTURE_COORD_ARRAY = 82;
|
||||
public static final int GL_SHORT = 83;
|
||||
public static final int GL_COLOR_ARRAY = 84;
|
||||
public static final int GL_VERTEX_ARRAY = 85;
|
||||
public static final int GL_TRIANGLES = 86;
|
||||
public static final int GL_NORMAL_ARRAY = 87;
|
||||
public static final int GL_TEXTURE_3D = 88;
|
||||
public static final int GL_FOG_MODE = 89;
|
||||
public static final int GL_EXP = 90;
|
||||
public static final int GL_FOG_DENSITY = 91;
|
||||
public static final int GL_FOG_START = 92;
|
||||
public static final int GL_FOG_END = 93;
|
||||
public static final int GL_FOG_COLOR = 94;
|
||||
public static final int GL_TRIANGLE_STRIP = 95;
|
||||
public static final int GL_PACK_ALIGNMENT = 96;
|
||||
public static final int GL_UNPACK_ALIGNMENT = 97;
|
||||
public static final int GL_QUADS = 98;
|
||||
public static final int GL_TEXTURE0 = 99;
|
||||
public static final int GL_TEXTURE1 = 100;
|
||||
public static final int GL_TEXTURE2 = 101;
|
||||
public static final int GL_TEXTURE3 = 102;
|
||||
public static final int GL_INVALID_ENUM = 140;
|
||||
public static final int GL_INVALID_VALUE= 141;
|
||||
public static final int GL_INVALID_OPERATION = 142;
|
||||
public static final int GL_OUT_OF_MEMORY = 143;
|
||||
public static final int GL_CONTEXT_LOST_WEBGL = 144;
|
||||
public static final int GL_TRIANGLE_FAN = 145;
|
||||
public static final int GL_LINE_STRIP = 146;
|
||||
public static final int GL_LIGHTING2 = 147;
|
||||
public static final int GL_LINES = 148;
|
||||
public static final int GL_NEAREST_MIPMAP_LINEAR = 149;
|
||||
public static final int GL_TEXTURE_MAX_ANISOTROPY = 150;
|
||||
public static final int GL_TEXTURE_MAX_LEVEL = 151;
|
||||
public static final int GL_LINEAR_MIPMAP_LINEAR = 152;
|
||||
public static final int GL_LINEAR_MIPMAP_NEAREST = 153;
|
||||
public static final int GL_NEAREST_MIPMAP_NEAREST = 154;
|
||||
public static final int GL_ZERO = RealOpenGLEnums.GL_ZERO;
|
||||
public static final int GL_ONE = RealOpenGLEnums.GL_ONE;
|
||||
public static final int GL_TEXTURE_2D = RealOpenGLEnums.GL_TEXTURE_2D;
|
||||
public static final int GL_SMOOTH = RealOpenGLEnums.GL_SMOOTH;
|
||||
public static final int GL_DEPTH_TEST = RealOpenGLEnums.GL_DEPTH_TEST;
|
||||
public static final int GL_LEQUAL = RealOpenGLEnums.GL_LEQUAL;
|
||||
public static final int GL_ALPHA_TEST = RealOpenGLEnums.GL_ALPHA_TEST;
|
||||
public static final int GL_GREATER = RealOpenGLEnums.GL_GREATER;
|
||||
public static final int GL_BACK = RealOpenGLEnums.GL_BACK;
|
||||
public static final int GL_PROJECTION = RealOpenGLEnums.GL_PROJECTION;
|
||||
public static final int GL_MODELVIEW = RealOpenGLEnums.GL_MODELVIEW;
|
||||
public static final int GL_COLOR_BUFFER_BIT = RealOpenGLEnums.GL_COLOR_BUFFER_BIT;
|
||||
public static final int GL_DEPTH_BUFFER_BIT = RealOpenGLEnums.GL_DEPTH_BUFFER_BIT;
|
||||
public static final int GL_LIGHTING = RealOpenGLEnums.GL_LIGHTING;
|
||||
public static final int GL_FOG = RealOpenGLEnums.GL_FOG;
|
||||
public static final int GL_COLOR_MATERIAL = RealOpenGLEnums.GL_COLOR_MATERIAL;
|
||||
public static final int GL_BLEND = RealOpenGLEnums.GL_BLEND;
|
||||
public static final int GL_RGBA = RealOpenGLEnums.GL_RGBA;
|
||||
public static final int GL_UNSIGNED_BYTE = RealOpenGLEnums.GL_UNSIGNED_BYTE;
|
||||
public static final int GL_TEXTURE_WIDTH = RealOpenGLEnums.GL_TEXTURE_WIDTH;
|
||||
public static final int GL_LIGHT0 = RealOpenGLEnums.GL_LIGHT0;
|
||||
public static final int GL_LIGHT1 = RealOpenGLEnums.GL_LIGHT1;
|
||||
public static final int GL_POSITION = RealOpenGLEnums.GL_POSITION;
|
||||
public static final int GL_DIFFUSE = RealOpenGLEnums.GL_DIFFUSE;
|
||||
public static final int GL_SPECULAR = RealOpenGLEnums.GL_SPECULAR;
|
||||
public static final int GL_AMBIENT = RealOpenGLEnums.GL_AMBIENT;
|
||||
public static final int GL_FLAT = RealOpenGLEnums.GL_FLAT;
|
||||
public static final int GL_LIGHT_MODEL_AMBIENT = RealOpenGLEnums.GL_LIGHT_MODEL_AMBIENT;
|
||||
public static final int GL_FRONT_AND_BACK = RealOpenGLEnums.GL_FRONT_AND_BACK;
|
||||
public static final int GL_AMBIENT_AND_DIFFUSE = RealOpenGLEnums.GL_AMBIENT_AND_DIFFUSE;
|
||||
public static final int GL_MODELVIEW_MATRIX = RealOpenGLEnums.GL_MODELVIEW_MATRIX;
|
||||
public static final int GL_PROJECTION_MATRIX = RealOpenGLEnums.GL_PROJECTION_MATRIX;
|
||||
public static final int GL_VIEWPORT = RealOpenGLEnums.GL_VIEWPORT;
|
||||
public static final int GL_RESCALE_NORMAL = RealOpenGLEnums.GL_RESCALE_NORMAL;
|
||||
public static final int GL_SRC_ALPHA = RealOpenGLEnums.GL_SRC_ALPHA;
|
||||
public static final int GL_ONE_MINUS_SRC_ALPHA = RealOpenGLEnums.GL_ONE_MINUS_SRC_ALPHA;
|
||||
public static final int GL_ONE_MINUS_DST_COLOR = RealOpenGLEnums.GL_ONE_MINUS_DST_COLOR;
|
||||
public static final int GL_ONE_MINUS_SRC_COLOR = RealOpenGLEnums.GL_ONE_MINUS_SRC_COLOR;
|
||||
public static final int GL_CULL_FACE = RealOpenGLEnums.GL_CULL_FACE;
|
||||
public static final int GL_TEXTURE_MIN_FILTER = RealOpenGLEnums.GL_TEXTURE_MIN_FILTER;
|
||||
public static final int GL_TEXTURE_MAG_FILTER = RealOpenGLEnums.GL_TEXTURE_MAG_FILTER;
|
||||
public static final int GL_LINEAR = RealOpenGLEnums.GL_LINEAR;
|
||||
public static final int GL_COLOR_LOGIC_OP = RealOpenGLEnums.GL_COLOR_LOGIC_OP;
|
||||
public static final int GL_OR_REVERSE = RealOpenGLEnums.GL_OR_REVERSE;
|
||||
public static final int GL_EQUAL = RealOpenGLEnums.GL_EQUAL;
|
||||
public static final int GL_SRC_COLOR = RealOpenGLEnums.GL_SRC_COLOR;
|
||||
public static final int GL_TEXTURE = RealOpenGLEnums.GL_TEXTURE;
|
||||
public static final int GL_FRONT = RealOpenGLEnums.GL_FRONT;
|
||||
public static final int GL_COMPILE = RealOpenGLEnums.GL_COMPILE;
|
||||
public static final int GL_S = RealOpenGLEnums.GL_S;
|
||||
public static final int GL_T = RealOpenGLEnums.GL_T;
|
||||
public static final int GL_R = RealOpenGLEnums.GL_R;
|
||||
public static final int GL_Q = RealOpenGLEnums.GL_Q;
|
||||
public static final int GL_TEXTURE_GEN_S = RealOpenGLEnums.GL_TEXTURE_GEN_S;
|
||||
public static final int GL_TEXTURE_GEN_T = RealOpenGLEnums.GL_TEXTURE_GEN_T;
|
||||
public static final int GL_TEXTURE_GEN_R = RealOpenGLEnums.GL_TEXTURE_GEN_R;
|
||||
public static final int GL_TEXTURE_GEN_Q = RealOpenGLEnums.GL_TEXTURE_GEN_Q;
|
||||
public static final int GL_TEXTURE_GEN_MODE = RealOpenGLEnums.GL_TEXTURE_GEN_MODE;
|
||||
public static final int GL_OBJECT_PLANE = RealOpenGLEnums.GL_OBJECT_PLANE;
|
||||
public static final int GL_EYE_PLANE = RealOpenGLEnums.GL_EYE_PLANE;
|
||||
public static final int GL_OBJECT_LINEAR = RealOpenGLEnums.GL_OBJECT_LINEAR;
|
||||
public static final int GL_EYE_LINEAR = RealOpenGLEnums.GL_EYE_LINEAR;
|
||||
public static final int GL_NEAREST = RealOpenGLEnums.GL_NEAREST;
|
||||
public static final int GL_CLAMP = RealOpenGLEnums.GL_CLAMP_TO_EDGE;
|
||||
public static final int GL_TEXTURE_WRAP_S = RealOpenGLEnums.GL_TEXTURE_WRAP_S;
|
||||
public static final int GL_TEXTURE_WRAP_T = RealOpenGLEnums.GL_TEXTURE_WRAP_T;
|
||||
public static final int GL_REPEAT = RealOpenGLEnums.GL_REPEAT;
|
||||
public static final int GL_BGRA = RealOpenGLEnums.GL_BGRA;
|
||||
public static final int GL_UNSIGNED_INT_8_8_8_8_REV = RealOpenGLEnums.GL_UNSIGNED_INT_8_8_8_8_REV;
|
||||
public static final int GL_DST_COLOR = RealOpenGLEnums.GL_DST_COLOR;
|
||||
public static final int GL_POLYGON_OFFSET_FILL = RealOpenGLEnums.GL_POLYGON_OFFSET_FILL;
|
||||
public static final int GL_NORMALIZE = RealOpenGLEnums.GL_NORMALIZE;
|
||||
public static final int GL_DST_ALPHA = RealOpenGLEnums.GL_DST_ALPHA;
|
||||
public static final int GL_FLOAT = RealOpenGLEnums.GL_FLOAT;
|
||||
public static final int GL_TEXTURE_COORD_ARRAY = RealOpenGLEnums.GL_TEXTURE_COORD_ARRAY;
|
||||
public static final int GL_SHORT = RealOpenGLEnums.GL_SHORT;
|
||||
public static final int GL_COLOR_ARRAY = RealOpenGLEnums.GL_COLOR_ARRAY;
|
||||
public static final int GL_VERTEX_ARRAY = RealOpenGLEnums.GL_VERTEX_ARRAY;
|
||||
public static final int GL_TRIANGLES = RealOpenGLEnums.GL_TRIANGLES;
|
||||
public static final int GL_NORMAL_ARRAY = RealOpenGLEnums.GL_NORMAL_ARRAY;
|
||||
public static final int GL_TEXTURE_3D = RealOpenGLEnums.GL_TEXTURE_3D;
|
||||
public static final int GL_FOG_MODE = RealOpenGLEnums.GL_FOG_MODE;
|
||||
public static final int GL_EXP = RealOpenGLEnums.GL_EXP;
|
||||
public static final int GL_FOG_DENSITY = RealOpenGLEnums.GL_FOG_DENSITY;
|
||||
public static final int GL_FOG_START = RealOpenGLEnums.GL_FOG_START;
|
||||
public static final int GL_FOG_END = RealOpenGLEnums.GL_FOG_END;
|
||||
public static final int GL_FOG_COLOR = RealOpenGLEnums.GL_FOG_COLOR;
|
||||
public static final int GL_TRIANGLE_STRIP = RealOpenGLEnums.GL_TRIANGLE_STRIP;
|
||||
public static final int GL_PACK_ALIGNMENT = RealOpenGLEnums.GL_PACK_ALIGNMENT;
|
||||
public static final int GL_UNPACK_ALIGNMENT = RealOpenGLEnums.GL_UNPACK_ALIGNMENT;
|
||||
public static final int GL_QUADS = RealOpenGLEnums.GL_QUADS;
|
||||
public static final int GL_TEXTURE0 = RealOpenGLEnums.GL_TEXTURE0;
|
||||
public static final int GL_TEXTURE1 = RealOpenGLEnums.GL_TEXTURE1;
|
||||
public static final int GL_TEXTURE2 = RealOpenGLEnums.GL_TEXTURE2;
|
||||
public static final int GL_TEXTURE3 = RealOpenGLEnums.GL_TEXTURE3;
|
||||
public static final int GL_INVALID_ENUM = RealOpenGLEnums.GL_INVALID_ENUM;
|
||||
public static final int GL_INVALID_VALUE = RealOpenGLEnums.GL_INVALID_VALUE;
|
||||
public static final int GL_INVALID_OPERATION = RealOpenGLEnums.GL_INVALID_OPERATION;
|
||||
public static final int GL_OUT_OF_MEMORY = RealOpenGLEnums.GL_OUT_OF_MEMORY;
|
||||
public static final int GL_CONTEXT_LOST_WEBGL = -100;
|
||||
public static final int GL_TRIANGLE_FAN = RealOpenGLEnums.GL_TRIANGLE_FAN;
|
||||
public static final int GL_LINE_STRIP = RealOpenGLEnums.GL_LINE_STRIP;
|
||||
public static final int EAG_SWAP_RB = -101;
|
||||
public static final int GL_LINES = RealOpenGLEnums.GL_LINES;
|
||||
public static final int GL_NEAREST_MIPMAP_LINEAR = RealOpenGLEnums.GL_NEAREST_MIPMAP_LINEAR;
|
||||
public static final int GL_TEXTURE_MAX_ANISOTROPY = -103;
|
||||
public static final int GL_TEXTURE_MAX_LEVEL = RealOpenGLEnums.GL_TEXTURE_MAX_LEVEL;
|
||||
public static final int GL_LINEAR_MIPMAP_LINEAR = RealOpenGLEnums.GL_LINEAR_MIPMAP_LINEAR;
|
||||
public static final int GL_LINEAR_MIPMAP_NEAREST = RealOpenGLEnums.GL_LINEAR_MIPMAP_NEAREST;
|
||||
public static final int GL_NEAREST_MIPMAP_NEAREST = RealOpenGLEnums.GL_NEAREST_MIPMAP_NEAREST;
|
||||
|
||||
public static final boolean isWebGL = _wisWebGL();
|
||||
|
||||
|
@ -167,6 +167,7 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
private static TextureGL boundTexture0 = null;
|
||||
private static boolean enableAnisotropicPatch = false;
|
||||
private static boolean hintAnisotropicPatch = false;
|
||||
private static boolean swapRB = false;
|
||||
|
||||
public static final void anisotropicPatch(boolean e) {
|
||||
enableAnisotropicPatch = e;
|
||||
|
@ -293,8 +294,6 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
case GL_LIGHTING:
|
||||
enableLighting = true;
|
||||
break;
|
||||
case GL_LIGHTING2:
|
||||
break;
|
||||
case GL_ALPHA_TEST:
|
||||
enableAlphaTest = true;
|
||||
break;
|
||||
|
@ -312,6 +311,10 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
break;
|
||||
case GL_POLYGON_OFFSET_FILL:
|
||||
_wglEnable(_wGL_POLYGON_OFFSET_FILL);
|
||||
break;
|
||||
case EAG_SWAP_RB:
|
||||
swapRB = true;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -336,14 +339,7 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
alphaThresh = p2;
|
||||
}
|
||||
public static final void glCullFace(int p1) {
|
||||
int f = _wGL_BACK;
|
||||
switch(p1) {
|
||||
case GL_BACK: f = _wGL_BACK; break;
|
||||
case GL_FRONT: f = _wGL_FRONT; break;
|
||||
case GL_FRONT_AND_BACK: f = _wGL_FRONT_AND_BACK; break;
|
||||
default: break;
|
||||
}
|
||||
_wglCullFace(f);
|
||||
_wglCullFace(p1);
|
||||
}
|
||||
public static final void glMatrixMode(int p1) {
|
||||
matrixMode = p1;
|
||||
|
@ -366,14 +362,7 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
_wglViewport(p1, p2, p3, p4);
|
||||
}
|
||||
public static final void glClear(int p1) {
|
||||
int f = 0;
|
||||
if((p1 & GL_COLOR_BUFFER_BIT) == GL_COLOR_BUFFER_BIT) {
|
||||
f = f | _wGL_COLOR_BUFFER_BIT;
|
||||
}
|
||||
if((p1 & GL_DEPTH_BUFFER_BIT) == GL_DEPTH_BUFFER_BIT) {
|
||||
f = f | _wGL_DEPTH_BUFFER_BIT;
|
||||
}
|
||||
_wglClear(f);
|
||||
_wglClear(p1);
|
||||
}
|
||||
public static final void glOrtho(float left, float right, float bottom, float top, float zNear, float zFar) {
|
||||
Matrix4f res = getMatrix();
|
||||
|
@ -430,8 +419,6 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
case GL_LIGHTING:
|
||||
enableLighting = false;
|
||||
break;
|
||||
case GL_LIGHTING2:
|
||||
break;
|
||||
case GL_ALPHA_TEST:
|
||||
enableAlphaTest = false;
|
||||
break;
|
||||
|
@ -449,6 +436,10 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
break;
|
||||
case GL_POLYGON_OFFSET_FILL:
|
||||
_wglDisable(_wGL_POLYGON_OFFSET_FILL);
|
||||
break;
|
||||
case EAG_SWAP_RB:
|
||||
swapRB = false;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -461,10 +452,6 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
}
|
||||
public static final int glGetError() {
|
||||
int err = _wglGetError();
|
||||
if(err == _wGL_INVALID_ENUM) return GL_INVALID_ENUM;
|
||||
if(err == _wGL_INVALID_OPERATION) return GL_INVALID_OPERATION;
|
||||
if(err == _wGL_INVALID_VALUE) return GL_INVALID_VALUE;
|
||||
if(err == _wGL_OUT_OF_MEMORY) return GL_OUT_OF_MEMORY;
|
||||
if(err == _wGL_CONTEXT_LOST_WEBGL) return GL_CONTEXT_LOST_WEBGL;
|
||||
return err;
|
||||
}
|
||||
|
@ -601,34 +588,8 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
}
|
||||
}
|
||||
public static final void glBlendFunc(int p1, int p2) {
|
||||
int pp1 = 0;
|
||||
int pp2 = 0;
|
||||
switch(p1) {
|
||||
default:
|
||||
case GL_SRC_ALPHA: pp1 = _wGL_SRC_ALPHA; break;
|
||||
case GL_ONE_MINUS_SRC_ALPHA: pp1 = _wGL_ONE_MINUS_SRC_ALPHA; break;
|
||||
case GL_DST_ALPHA: pp1 = _wGL_DST_ALPHA; break;
|
||||
case GL_DST_COLOR: pp1 = _wGL_DST_COLOR; break;
|
||||
case GL_SRC_COLOR: pp1 = _wGL_SRC_COLOR; break;
|
||||
case GL_ONE_MINUS_SRC_COLOR: pp1 = _wGL_ONE_MINUS_SRC_COLOR; break;
|
||||
case GL_ONE_MINUS_DST_COLOR: pp1 = _wGL_ONE_MINUS_DST_COLOR; break;
|
||||
case GL_ONE: pp1 = _wGL_ONE; break;
|
||||
case GL_ZERO: pp1 = _wGL_ZERO; break;
|
||||
}
|
||||
switch(p2) {
|
||||
default:
|
||||
case GL_SRC_ALPHA: pp2 = _wGL_SRC_ALPHA; break;
|
||||
case GL_ONE_MINUS_SRC_ALPHA: pp2 = _wGL_ONE_MINUS_SRC_ALPHA; break;
|
||||
case GL_DST_ALPHA: pp2 = _wGL_DST_ALPHA; break;
|
||||
case GL_DST_COLOR: pp2 = _wGL_DST_COLOR; break;
|
||||
case GL_SRC_COLOR: pp2 = _wGL_SRC_COLOR; break;
|
||||
case GL_ONE_MINUS_SRC_COLOR: pp2 = _wGL_ONE_MINUS_SRC_COLOR; break;
|
||||
case GL_ONE_MINUS_DST_COLOR: pp1 = _wGL_ONE_MINUS_DST_COLOR; break;
|
||||
case GL_ONE: pp2 = _wGL_ONE; break;
|
||||
case GL_ZERO: pp2 = _wGL_ZERO; break;
|
||||
}
|
||||
fogPremultiply = (p1 == GL_ONE && p2 == GL_ONE_MINUS_SRC_ALPHA);
|
||||
_wglBlendFunc(pp1, pp2);
|
||||
_wglBlendFunc(p1, p2);
|
||||
}
|
||||
public static final void glDepthMask(boolean p1) {
|
||||
_wglDepthMask(p1);
|
||||
|
@ -658,57 +619,17 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
_wglCopyTexSubImage2D(_wGL_TEXTURE_2D, p2, p3, p4, p5, p6, p7, p8);
|
||||
}
|
||||
public static final void glTexParameteri(int p1, int p2, int p3) {
|
||||
int pp1 = 0;
|
||||
switch(p1) {
|
||||
default:
|
||||
case GL_TEXTURE_2D: pp1 = _wGL_TEXTURE_2D; break;
|
||||
//case GL_TEXTURE_3D: pp1 = _wGL_TEXTURE_3D; break;
|
||||
if(selectedTex == 0 && boundTexture0 != null && p2 == GL_TEXTURE_MAG_FILTER) {
|
||||
boundTexture0.nearest = p3 == GL_NEAREST;
|
||||
}
|
||||
int pp2 = 0;
|
||||
switch(p2) {
|
||||
default:
|
||||
case GL_TEXTURE_MAG_FILTER: pp2 = _wGL_TEXTURE_MAG_FILTER; break;
|
||||
case GL_TEXTURE_MIN_FILTER: pp2 = _wGL_TEXTURE_MIN_FILTER; break;
|
||||
case GL_TEXTURE_WRAP_S: pp2 = _wGL_TEXTURE_WRAP_S; break;
|
||||
case GL_TEXTURE_WRAP_T: pp2 = _wGL_TEXTURE_WRAP_T; break;
|
||||
case GL_TEXTURE_MAX_LEVEL: pp2 = _wGL_TEXTURE_MAX_LEVEL; break;
|
||||
}
|
||||
int pp3 = 0;
|
||||
switch(p3) {
|
||||
default:
|
||||
pp3 = p3; break;
|
||||
case GL_LINEAR: pp3 = _wGL_LINEAR; break;
|
||||
case GL_NEAREST_MIPMAP_LINEAR: pp3 = _wGL_NEAREST_MIPMAP_LINEAR; break;
|
||||
case GL_LINEAR_MIPMAP_LINEAR: pp3 = _wGL_LINEAR_MIPMAP_LINEAR; break;
|
||||
case GL_LINEAR_MIPMAP_NEAREST: pp3 = _wGL_LINEAR_MIPMAP_NEAREST; break;
|
||||
case GL_NEAREST_MIPMAP_NEAREST: pp3 = _wGL_NEAREST_MIPMAP_NEAREST; break;
|
||||
case GL_NEAREST: pp3 = _wGL_NEAREST; break;
|
||||
case GL_REPEAT: pp3 = _wGL_REPEAT; break;
|
||||
case GL_CLAMP: pp3 = _wGL_CLAMP; break;
|
||||
}
|
||||
|
||||
if(selectedTex == 0 && boundTexture0 != null && pp2 == _wGL_TEXTURE_MAG_FILTER) {
|
||||
boundTexture0.nearest = pp3 == _wGL_NEAREST;
|
||||
}
|
||||
_wglTexParameteri(pp1, pp2, pp3);
|
||||
_wglTexParameteri(p1, p2, p3);
|
||||
updateAnisotropicPatch();
|
||||
}
|
||||
public static final void glTexParameterf(int p1, int p2, float p3) {
|
||||
int pp1 = 0;
|
||||
switch(p1) {
|
||||
default:
|
||||
case GL_TEXTURE_2D: pp1 = _wGL_TEXTURE_2D; break;
|
||||
//case GL_TEXTURE_3D: pp1 = _wGL_TEXTURE_3D; break;
|
||||
}
|
||||
int pp2 = 0;
|
||||
switch(p2) {
|
||||
default:
|
||||
case GL_TEXTURE_MAX_ANISOTROPY: pp2 = _wGL_TEXTURE_MAX_ANISOTROPY; break;
|
||||
}
|
||||
if(selectedTex == 0 && boundTexture0 != null && pp2 == _wGL_TEXTURE_MAX_ANISOTROPY) {
|
||||
if(selectedTex == 0 && boundTexture0 != null && p2 == GL_TEXTURE_MAX_ANISOTROPY) {
|
||||
boundTexture0.anisotropic = p3 > 1.0f;
|
||||
}
|
||||
_wglTexParameterf(pp1, pp2, p3);
|
||||
_wglTexParameterf(p1, p2 == GL_TEXTURE_MAX_ANISOTROPY ? _wGL_TEXTURE_MAX_ANISOTROPY : p2, p3);
|
||||
updateAnisotropicPatch();
|
||||
}
|
||||
public static final void glLogicOp(int p1) {
|
||||
|
@ -947,6 +868,7 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
mode = (mode | (enableTexture2D ? FixedFunctionShader.UNIT0 : 0));
|
||||
mode = (mode | (enableTexture2D_1 ? FixedFunctionShader.UNIT1 : 0));
|
||||
mode = (mode | ((enableTexture2D && (enableAnisotropicFix || (hintAnisotropicPatch && enableAnisotropicPatch))) ? FixedFunctionShader.FIX_ANISOTROPIC : 0));
|
||||
mode = (mode | (swapRB ? FixedFunctionShader.SWAP_RB : 0));
|
||||
return mode;
|
||||
}
|
||||
private static final int getShaderModeFlag() {
|
||||
|
@ -962,6 +884,7 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
mode = (mode | (enableTexture2D ? FixedFunctionShader.UNIT0 : 0));
|
||||
mode = (mode | (enableTexture2D_1 ? FixedFunctionShader.UNIT1 : 0));
|
||||
mode = (mode | ((enableTexture2D && (enableAnisotropicFix || (hintAnisotropicPatch && enableAnisotropicPatch))) ? FixedFunctionShader.FIX_ANISOTROPIC : 0));
|
||||
mode = (mode | (swapRB ? FixedFunctionShader.SWAP_RB : 0));
|
||||
return mode;
|
||||
}
|
||||
private static FixedFunctionShader shader = null;
|
||||
|
@ -1034,31 +957,25 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
_wglDrawQuadArrays(p2, p3);
|
||||
triangleDrawn += p3 / 2;
|
||||
}else {
|
||||
int drawMode = 0;
|
||||
switch(p1) {
|
||||
default:
|
||||
case GL_TRIANGLES:
|
||||
drawMode = _wGL_TRIANGLES;
|
||||
triangleDrawn += p3 / 3;
|
||||
break;
|
||||
case GL_TRIANGLE_STRIP:
|
||||
drawMode = _wGL_TRIANGLE_STRIP;
|
||||
triangleDrawn += p3 - 2;
|
||||
break;
|
||||
case GL_TRIANGLE_FAN:
|
||||
drawMode = _wGL_TRIANGLE_FAN;
|
||||
triangleDrawn += p3 - 2;
|
||||
break;
|
||||
case GL_LINE_STRIP:
|
||||
drawMode = _wGL_LINE_STRIP;
|
||||
triangleDrawn += p3 - 1;
|
||||
break;
|
||||
case GL_LINES:
|
||||
drawMode = _wGL_LINES;
|
||||
triangleDrawn += p3 / 2;
|
||||
break;
|
||||
}
|
||||
_wglDrawArrays(drawMode, p2, p3);
|
||||
_wglDrawArrays(p1, p2, p3);
|
||||
}
|
||||
|
||||
shader.unuseProgram();
|
||||
|
@ -1184,19 +1101,12 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
|
||||
public static final void glDrawOcclusionBB(float posX, float posY, float posZ, float sizeX, float sizeY, float sizeZ) {
|
||||
glPushMatrix();
|
||||
glTranslatef(posX - sizeX * 0.0001f, posY - sizeY * 0.0001f, posZ - sizeZ * 0.0001f);
|
||||
glScalef(sizeX * 1.0002f, sizeY * 1.0002f, sizeZ * 1.0002f);
|
||||
glTranslatef(posX - sizeX * 0.01f, posY - sizeY * 0.01f, posZ - sizeZ * 0.01f);
|
||||
glScalef(sizeX * 1.02f, sizeY * 1.02f, sizeZ * 1.02f);
|
||||
matModelV[matModelPointer].store(occlusionModel);
|
||||
_wglUniformMat4fv(occlusion_matrix_m, occlusionModel);
|
||||
_wglDrawArrays(_wGL_TRIANGLES, 0, 36);
|
||||
glPopMatrix();
|
||||
//glPushMatrix();
|
||||
//glTranslatef(posX + sizeX * 0.0001f, posY + sizeY * 0.0001f, posZ + sizeZ * 0.0001f);
|
||||
//glScalef(sizeX * 0.9998f, sizeY * 0.9998f, sizeZ * 0.9998f);
|
||||
//matModelV[matModelPointer].store(occlusionModel);
|
||||
//_wglUniformMat4fv(occlusion_matrix_m, occlusionModel);
|
||||
//_wglDrawArrays(_wGL_TRIANGLES, 0, 36);
|
||||
//glPopMatrix();
|
||||
|
||||
}
|
||||
|
||||
|
@ -1206,7 +1116,12 @@ public class EaglerAdapterGL30 extends EaglerAdapterImpl2 {
|
|||
|
||||
public static final boolean glGetQueryResult(int obj) {
|
||||
QueryGL q = queryObjs.get(obj);
|
||||
return _wglGetQueryObjecti(q, _wGL_QUERY_RESULT_AVAILABLE) == 0 || _wglGetQueryObjecti(q, _wGL_QUERY_RESULT) > 0;
|
||||
return _wglGetQueryObjecti(q, _wGL_QUERY_RESULT) > 0;
|
||||
}
|
||||
|
||||
public static final boolean glGetQueryResultAvailable(int obj) {
|
||||
QueryGL q = queryObjs.get(obj);
|
||||
return _wglGetQueryObjecti(q, _wGL_QUERY_RESULT_AVAILABLE) >= 0;
|
||||
}
|
||||
|
||||
public static final int glGenTextures() {
|
||||
|
|
|
@ -9,7 +9,7 @@ import static net.lax1dude.eaglercraft.glemu.EaglerAdapterGL30.*;
|
|||
|
||||
public class FixedFunctionShader {
|
||||
|
||||
private static final FixedFunctionShader[] instances = new FixedFunctionShader[2048]; //lol
|
||||
private static final FixedFunctionShader[] instances = new FixedFunctionShader[4096]; //lol
|
||||
|
||||
public static void refreshCoreGL() {
|
||||
for(int i = 0; i < instances.length; ++i) {
|
||||
|
@ -32,6 +32,7 @@ public class FixedFunctionShader {
|
|||
public static final int UNIT0 = 256;
|
||||
public static final int UNIT1 = 512;
|
||||
public static final int FIX_ANISOTROPIC = 1024;
|
||||
public static final int SWAP_RB = 2048;
|
||||
|
||||
public static FixedFunctionShader instance(int i) {
|
||||
FixedFunctionShader s = instances[i];
|
||||
|
@ -47,6 +48,7 @@ public class FixedFunctionShader {
|
|||
boolean CC_unit0 = false;
|
||||
boolean CC_unit1 = false;
|
||||
boolean CC_anisotropic = false;
|
||||
boolean CC_swap_rb = false;
|
||||
if((i & COLOR) == COLOR) {
|
||||
CC_a_color = true;
|
||||
}
|
||||
|
@ -80,7 +82,11 @@ public class FixedFunctionShader {
|
|||
if((i & FIX_ANISOTROPIC) == FIX_ANISOTROPIC) {
|
||||
CC_anisotropic = true;
|
||||
}
|
||||
s = new FixedFunctionShader(i, CC_a_color, CC_a_normal, CC_a_texture0, CC_a_texture1, CC_TEX_GEN_STRQ, CC_lighting, CC_fog, CC_alphatest, CC_unit0, CC_unit1, CC_anisotropic);
|
||||
if((i & SWAP_RB) == SWAP_RB) {
|
||||
CC_swap_rb = true;
|
||||
}
|
||||
s = new FixedFunctionShader(i, CC_a_color, CC_a_normal, CC_a_texture0, CC_a_texture1, CC_TEX_GEN_STRQ, CC_lighting,
|
||||
CC_fog, CC_alphatest, CC_unit0, CC_unit1, CC_anisotropic, CC_swap_rb);
|
||||
instances[i] = s;
|
||||
}
|
||||
return s;
|
||||
|
@ -99,6 +105,7 @@ public class FixedFunctionShader {
|
|||
private final boolean enable_unit0;
|
||||
private final boolean enable_unit1;
|
||||
private final boolean enable_anisotropic_fix;
|
||||
private final boolean enable_swap_rb;
|
||||
private final ProgramGL globject;
|
||||
|
||||
private UniformGL u_matrix_m = null;
|
||||
|
@ -148,7 +155,8 @@ public class FixedFunctionShader {
|
|||
public final BufferGL genericBuffer;
|
||||
public boolean bufferIsInitialized = false;
|
||||
|
||||
private FixedFunctionShader(int j, boolean CC_a_color, boolean CC_a_normal, boolean CC_a_texture0, boolean CC_a_texture1, boolean CC_TEX_GEN_STRQ, boolean CC_lighting, boolean CC_fog, boolean CC_alphatest, boolean CC_unit0, boolean CC_unit1, boolean CC_anisotropic_fix) {
|
||||
private FixedFunctionShader(int j, boolean CC_a_color, boolean CC_a_normal, boolean CC_a_texture0, boolean CC_a_texture1, boolean CC_TEX_GEN_STRQ, boolean CC_lighting,
|
||||
boolean CC_fog, boolean CC_alphatest, boolean CC_unit0, boolean CC_unit1, boolean CC_anisotropic_fix, boolean CC_swap_rb) {
|
||||
enable_color = CC_a_color;
|
||||
enable_normal = CC_a_normal;
|
||||
enable_texture0 = CC_a_texture0;
|
||||
|
@ -160,6 +168,7 @@ public class FixedFunctionShader {
|
|||
enable_unit0 = CC_unit0;
|
||||
enable_unit1 = CC_unit1;
|
||||
enable_anisotropic_fix = CC_anisotropic_fix;
|
||||
enable_swap_rb = CC_swap_rb;
|
||||
|
||||
if(shaderSource == null) {
|
||||
shaderSource = fileContents("/glsl/core.glsl");
|
||||
|
@ -177,6 +186,7 @@ public class FixedFunctionShader {
|
|||
if(enable_unit0) source += "#define CC_unit0\n";
|
||||
if(enable_unit1) source += "#define CC_unit1\n";
|
||||
if(enable_anisotropic_fix) source += "#define CC_patch_anisotropic\n";
|
||||
if(enable_swap_rb) source += "#define CC_swap_rb\n";
|
||||
source += shaderSource;
|
||||
|
||||
ShaderGL v = _wglCreateShader(_wGL_VERTEX_SHADER);
|
||||
|
|
2417
src/main/java/net/lax1dude/eaglercraft/glemu/RealOpenGLEnums.java
Normal file
2417
src/main/java/net/lax1dude/eaglercraft/glemu/RealOpenGLEnums.java
Normal file
File diff suppressed because it is too large
Load Diff
|
@ -11,7 +11,9 @@ import net.lax1dude.eaglercraft.EaglerAdapter;
|
|||
import net.lax1dude.eaglercraft.EaglerProfile;
|
||||
|
||||
import net.lax1dude.eaglercraft.GuiScreenEditProfile;
|
||||
import net.lax1dude.eaglercraft.GuiScreenLicense;
|
||||
import net.lax1dude.eaglercraft.GuiScreenVoiceChannel;
|
||||
import net.lax1dude.eaglercraft.LocalStorageManager;
|
||||
import net.lax1dude.eaglercraft.adapter.Tessellator;
|
||||
import net.lax1dude.eaglercraft.glemu.EffectPipeline;
|
||||
import net.lax1dude.eaglercraft.glemu.FixedFunctionShader;
|
||||
|
@ -248,12 +250,20 @@ public class Minecraft implements Runnable {
|
|||
showIntroAnimation();
|
||||
|
||||
String s = EaglerAdapter.getServerToJoinOnLaunch();
|
||||
GuiScreen scr;
|
||||
|
||||
if(s != null) {
|
||||
this.displayGuiScreen(new GuiScreenEditProfile(new GuiConnecting(new GuiMainMenu(), this, new ServerData("Eaglercraft Server", s, false))));
|
||||
scr = new GuiScreenEditProfile(new GuiConnecting(new GuiMainMenu(), this, new ServerData("Eaglercraft Server", s, false)));
|
||||
}else {
|
||||
this.displayGuiScreen(new GuiScreenEditProfile(new GuiMainMenu()));
|
||||
scr = new GuiScreenEditProfile(new GuiMainMenu());
|
||||
}
|
||||
|
||||
if(!LocalStorageManager.profileSettingsStorage.getBoolean("acceptLicense")) {
|
||||
scr = new GuiScreenLicense(scr);
|
||||
}
|
||||
|
||||
displayGuiScreen(scr);
|
||||
|
||||
this.loadingScreen = new LoadingScreenRenderer(this);
|
||||
|
||||
if (this.gameSettings.fullScreen && !this.fullscreen) {
|
||||
|
@ -1443,6 +1453,9 @@ public class Minecraft implements Runnable {
|
|||
|
||||
this.sndManager.playStreaming((String) null, 0.0F, 0.0F, 0.0F);
|
||||
this.sndManager.stopAllSounds();
|
||||
if(EaglerAdapter.isVideoSupported()) {
|
||||
EaglerAdapter.unloadVideo();
|
||||
}
|
||||
this.theWorld = par1WorldClient;
|
||||
|
||||
if (par1WorldClient != null) {
|
||||
|
|
|
@ -102,10 +102,10 @@ public class GuiIngame extends Gui {
|
|||
this.drawTexturedModalRect(var6 / 2 - 91, var7 - 22, 0, 0, 182, 22);
|
||||
this.drawTexturedModalRect(var6 / 2 - 91 - 1 + var31.currentItem * 20, var7 - 22 - 1, 0, 22, 24, 22);
|
||||
tex_icons.bindTexture();
|
||||
//EaglerAdapter.glEnable(EaglerAdapter.GL_BLEND);
|
||||
//EaglerAdapter.glBlendFunc(EaglerAdapter.GL_ONE_MINUS_DST_COLOR, EaglerAdapter.GL_ONE_MINUS_SRC_COLOR);
|
||||
EaglerAdapter.glEnable(EaglerAdapter.GL_BLEND);
|
||||
EaglerAdapter.glBlendFunc(EaglerAdapter.GL_ONE_MINUS_DST_COLOR, EaglerAdapter.GL_ONE_MINUS_SRC_COLOR);
|
||||
this.drawTexturedModalRect(var6 / 2 - 7, var7 / 2 - 7, 0, 0, 16, 16);
|
||||
//EaglerAdapter.glDisable(EaglerAdapter.GL_BLEND);
|
||||
EaglerAdapter.glDisable(EaglerAdapter.GL_BLEND);
|
||||
var11 = this.mc.thePlayer.hurtResistantTime / 3 % 2 == 1;
|
||||
|
||||
if (this.mc.thePlayer.hurtResistantTime < 10) {
|
||||
|
|
|
@ -211,6 +211,7 @@ public class GuiMainMenu extends GuiScreen {
|
|||
int x = (this.width - 345) / 2;
|
||||
int y = (this.height - 230) / 2;
|
||||
if(par1 >= (x + 323) && par1 <= (x + 323 + 13) && par2 >= (y + 7) && par2 <= (y + 7 + 13)) {
|
||||
this.mc.sndManager.playSoundFX("random.click", 1.0F, 1.0F);
|
||||
hideAck();
|
||||
}
|
||||
int trackHeight = 193;
|
||||
|
|
|
@ -3,6 +3,7 @@ package net.minecraft.src;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import net.lax1dude.eaglercraft.ConfigConstants;
|
||||
import net.lax1dude.eaglercraft.EaglerAdapter;
|
||||
|
||||
public class GuiMultiplayer extends GuiScreen {
|
||||
|
@ -287,8 +288,23 @@ public class GuiMultiplayer extends GuiScreen {
|
|||
this.lagTooltip = null;
|
||||
StringTranslate var4 = StringTranslate.getInstance();
|
||||
this.drawDefaultBackground();
|
||||
|
||||
boolean showAyonull = ConfigConstants.ayonullTitle != null && ConfigConstants.ayonullLink != null;
|
||||
|
||||
this.serverSlotContainer.top = showAyonull ? 42 : 32;
|
||||
this.serverSlotContainer.drawScreen(par1, par2, par3);
|
||||
this.drawCenteredString(this.fontRenderer, var4.translateKey("multiplayer.title"), this.width / 2, 20, 16777215);
|
||||
|
||||
if(showAyonull) {
|
||||
this.drawCenteredString(this.fontRenderer, ConfigConstants.ayonullTitle, this.width / 2, 12, 0xDDDD66);
|
||||
|
||||
String link = ConfigConstants.ayonullLink;
|
||||
int linkWidth = fontRenderer.getStringWidth(link);
|
||||
boolean mouseOver = par1 > (this.width - linkWidth) / 2 - 10 && par1 < (this.width + linkWidth) / 2 + 10 && par2 > 21 && par2 < 35;
|
||||
this.drawString(this.fontRenderer, EnumChatFormatting.UNDERLINE + link, (this.width - linkWidth) / 2, 23, mouseOver ? 0xBBBBFF : 0x7777DD);
|
||||
}else {
|
||||
this.drawCenteredString(this.fontRenderer, var4.translateKey("multiplayer.title"), this.width / 2, 16, 16777215);
|
||||
}
|
||||
|
||||
super.drawScreen(par1, par2, par3);
|
||||
|
||||
if (this.lagTooltip != null) {
|
||||
|
@ -306,6 +322,19 @@ public class GuiMultiplayer extends GuiScreen {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
protected void mouseClicked(int par1, int par2, int par3) {
|
||||
if (par3 == 0 && ConfigConstants.ayonullTitle != null && ConfigConstants.ayonullLink != null) {
|
||||
int linkWidth = fontRenderer.getStringWidth(ConfigConstants.ayonullLink);
|
||||
boolean mouseOver = par1 > (this.width - linkWidth) / 2 - 10 && par1 < (this.width + linkWidth) / 2 + 10 && par2 > 21 && par2 < 35;
|
||||
if(mouseOver) {
|
||||
EaglerAdapter.openLink(ConfigConstants.ayonullLink);
|
||||
return;
|
||||
}
|
||||
}
|
||||
super.mouseClicked(par1, par2, par3);
|
||||
}
|
||||
|
||||
/**
|
||||
* Join server by slot index
|
||||
*/
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
package net.minecraft.src;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import net.lax1dude.eaglercraft.EaglerAdapter;
|
||||
|
||||
public class ItemMap extends ItemMapBase {
|
||||
protected ItemMap(int par1) {
|
||||
super(par1);
|
||||
|
@ -273,4 +276,88 @@ public class ItemMap extends ItemMapBase {
|
|||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private static MapData getMapById(WorldClient theWorld, int id) {
|
||||
String var2 = "map_" + id;
|
||||
MapData var3 = (MapData) theWorld.loadItemData(MapData.class, var2);
|
||||
if (var3 == null) {
|
||||
var3 = new MapData(var2);
|
||||
theWorld.setItemData(var2, var3);
|
||||
}
|
||||
return var3;
|
||||
}
|
||||
|
||||
public static void processVideoMap(WorldClient theWorld, byte[] data) {
|
||||
if(!EaglerAdapter.isVideoSupported()) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
DataInputStream dat = new DataInputStream(new ByteArrayInputStream(data));
|
||||
int op = dat.read();
|
||||
if(op == 0) {
|
||||
int count = dat.read();
|
||||
int w = (count >> 4) & 0xF;
|
||||
int h = count & 0xF;
|
||||
for(int y = 0; y < h; ++y) {
|
||||
for(int x = 0; x < w; ++x) {
|
||||
getMapById(theWorld, dat.readUnsignedShort()).enableVideoPlayback = false;
|
||||
}
|
||||
}
|
||||
EaglerAdapter.unloadVideo();
|
||||
}else if(op == 8) {
|
||||
int ttl = dat.readInt();
|
||||
String src = dat.readUTF();
|
||||
EaglerAdapter.bufferVideo(src, ttl);
|
||||
}else {
|
||||
boolean fullResetPacket = (op & 2) == 2;
|
||||
boolean positionPacket = (op & 4) == 4;
|
||||
|
||||
int fps = 0;
|
||||
int len = 0;
|
||||
String url = null;
|
||||
if(fullResetPacket) {
|
||||
int count = dat.read();
|
||||
int w = (count >> 4) & 0xF;
|
||||
int h = count & 0xF;
|
||||
float wf = 1.0f / w;
|
||||
float hf = 1.0f / h;
|
||||
for(int y = 0; y < h; ++y) {
|
||||
for(int x = 0; x < w; ++x) {
|
||||
MapData mp = getMapById(theWorld, dat.readUnsignedShort());
|
||||
mp.videoX1 = x * wf;
|
||||
mp.videoY1 = y * hf;
|
||||
mp.videoX2 = mp.videoX1 + wf;
|
||||
mp.videoY2 = mp.videoY1 + hf;
|
||||
mp.enableVideoPlayback = true;
|
||||
}
|
||||
}
|
||||
fps = dat.read();
|
||||
len = dat.readInt();
|
||||
url = dat.readUTF();
|
||||
}
|
||||
|
||||
if(positionPacket) {
|
||||
float v = dat.readFloat();
|
||||
EaglerAdapter.setVideoVolume((float)dat.readDouble(), (float)dat.readDouble(), (float)dat.readDouble(), v);
|
||||
}
|
||||
|
||||
if(fullResetPacket) {
|
||||
EaglerAdapter.setVideoFrameRate(fps);
|
||||
EaglerAdapter.loadVideo(url, true);
|
||||
}
|
||||
|
||||
int time = dat.readInt();
|
||||
int timeNow = (int)(EaglerAdapter.getVideoCurrentTime() * 1000.0f);
|
||||
if(MathHelper.abs_int(time - timeNow) > 1000) {
|
||||
EaglerAdapter.setVideoCurrentTime(time * 0.001f);
|
||||
}
|
||||
|
||||
EaglerAdapter.setVideoLoop(dat.readBoolean());
|
||||
EaglerAdapter.setVideoPaused(dat.readBoolean());
|
||||
}
|
||||
}catch(IOException e) {
|
||||
System.err.println("Failed to read video map packet! " + e.toString());
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -372,4 +372,12 @@ public class MapData extends WorldSavedData {
|
|||
}
|
||||
}
|
||||
|
||||
public boolean enableVideoPlayback = false;
|
||||
|
||||
public float videoX1 = 0.0f;
|
||||
public float videoY1 = 0.0f;
|
||||
public float videoX2 = 1.0f;
|
||||
public float videoY2 = 1.0f;
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -26,6 +26,20 @@ public class MapItemRenderer {
|
|||
private static final TextureLocation mapicons = new TextureLocation("/misc/mapicons.png");
|
||||
|
||||
public void renderMap(EntityPlayer par1EntityPlayer, RenderEngine par2RenderEngine, MapData par3MapData) {
|
||||
float texX1 = 0.0f;
|
||||
float texX2 = 1.0f;
|
||||
float texY1 = 0.0f;
|
||||
float texY2 = 1.0f;
|
||||
boolean isVideoMode = EaglerAdapter.isVideoSupported() && par3MapData.enableVideoPlayback && EaglerAdapter.isVideoLoaded();
|
||||
if(isVideoMode) {
|
||||
EaglerAdapter.glEnable(EaglerAdapter.EAG_SWAP_RB);
|
||||
EaglerAdapter.updateVideoTexture();
|
||||
EaglerAdapter.bindVideoTexture();
|
||||
texX1 = par3MapData.videoX1;
|
||||
texY1 = par3MapData.videoY1;
|
||||
texX2 = par3MapData.videoX2;
|
||||
texY2 = par3MapData.videoY2;
|
||||
}else {
|
||||
if(par3MapData.enableAyunami) {
|
||||
System.arraycopy(par3MapData.ayunamiPixels, 0, intArray, 0, intArray.length);
|
||||
}else {
|
||||
|
@ -64,27 +78,31 @@ public class MapItemRenderer {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
par2RenderEngine.createTextureFromBytes(this.intArray, 128, 128, this.bufferedImage);
|
||||
}
|
||||
|
||||
byte var15 = 0;
|
||||
byte var16 = 0;
|
||||
Tessellator var17 = Tessellator.instance;
|
||||
float var18 = 0.0F;
|
||||
EaglerAdapter.glBindTexture(EaglerAdapter.GL_TEXTURE_2D, this.bufferedImage);
|
||||
EaglerAdapter.glEnable(EaglerAdapter.GL_BLEND);
|
||||
EaglerAdapter.glBlendFunc(EaglerAdapter.GL_ONE, EaglerAdapter.GL_ONE_MINUS_SRC_ALPHA);
|
||||
EaglerAdapter.glDisable(EaglerAdapter.GL_ALPHA_TEST);
|
||||
var17.startDrawingQuads();
|
||||
var17.addVertexWithUV((double) ((float) (var15 + 0) + var18), (double) ((float) (var16 + 128) - var18), -0.009999999776482582D, 0.0D, 1.0D);
|
||||
var17.addVertexWithUV((double) ((float) (var15 + 128) - var18), (double) ((float) (var16 + 128) - var18), -0.009999999776482582D, 1.0D, 1.0D);
|
||||
var17.addVertexWithUV((double) ((float) (var15 + 128) - var18), (double) ((float) (var16 + 0) + var18), -0.009999999776482582D, 1.0D, 0.0D);
|
||||
var17.addVertexWithUV((double) ((float) (var15 + 0) + var18), (double) ((float) (var16 + 0) + var18), -0.009999999776482582D, 0.0D, 0.0D);
|
||||
var17.addVertexWithUV((double) ((float) (var15 + 0) + var18), (double) ((float) (var16 + 128) - var18), -0.009999999776482582D, texX1, texY2);
|
||||
var17.addVertexWithUV((double) ((float) (var15 + 128) - var18), (double) ((float) (var16 + 128) - var18), -0.009999999776482582D, texX2, texY2);
|
||||
var17.addVertexWithUV((double) ((float) (var15 + 128) - var18), (double) ((float) (var16 + 0) + var18), -0.009999999776482582D, texX2, texY1);
|
||||
var17.addVertexWithUV((double) ((float) (var15 + 0) + var18), (double) ((float) (var16 + 0) + var18), -0.009999999776482582D, texX1, texY1);
|
||||
var17.draw();
|
||||
EaglerAdapter.glEnable(EaglerAdapter.GL_ALPHA_TEST);
|
||||
EaglerAdapter.glDisable(EaglerAdapter.GL_BLEND);
|
||||
par2RenderEngine.resetBoundTexture();
|
||||
|
||||
if(!par3MapData.enableAyunami) {
|
||||
if(isVideoMode) {
|
||||
EaglerAdapter.glDisable(EaglerAdapter.EAG_SWAP_RB);
|
||||
}
|
||||
|
||||
if(!par3MapData.enableAyunami && !isVideoMode) {
|
||||
mapicons.bindTexture();
|
||||
int var19 = 0;
|
||||
|
||||
|
|
|
@ -1019,6 +1019,8 @@ public class NetClientHandler extends NetHandler {
|
|||
ItemMap.getMPMapData(par1Packet131MapData.uniqueID, this.mc.theWorld).updateMPMapData(par1Packet131MapData.itemData);
|
||||
} else if (par1Packet131MapData.itemID == 103) {
|
||||
ItemMap.readAyunamiMapPacket(this.mc.theWorld, par1Packet131MapData.uniqueID, par1Packet131MapData.itemData);
|
||||
} else if (par1Packet131MapData.itemID == 104) {
|
||||
ItemMap.processVideoMap(this.mc.theWorld, par1Packet131MapData.itemData);
|
||||
} else {
|
||||
System.err.println("Unknown itemid: " + par1Packet131MapData.itemID);
|
||||
}
|
||||
|
|
|
@ -155,15 +155,8 @@ public class RenderGlobal implements IWorldAccess {
|
|||
for(int i = 0; i < glOcclusionQuery.length; ++i) {
|
||||
this.glOcclusionQuery[i] = -1;
|
||||
}
|
||||
// if (this.occlusionEnabled) {
|
||||
// this.occlusionResult.clear();
|
||||
// this.glOcclusionQueryBase = GLAllocation.createDirectIntBuffer(var3 * var3 * var4);
|
||||
// this.glOcclusionQueryBase.clear();
|
||||
// this.glOcclusionQueryBase.position(0);
|
||||
// this.glOcclusionQueryBase.limit(var3 * var3 * var4);
|
||||
// EaglerAdapter.glGenQueriesARB(this.glOcclusionQueryBase);
|
||||
// }
|
||||
|
||||
this.occlusionQueryAvailable = new boolean[glOcclusionQuery.length];
|
||||
this.occlusionQueryStalled = new long[occlusionQueryAvailable.length];
|
||||
this.starGLCallList = GLAllocation.generateDisplayLists(3);
|
||||
EaglerAdapter.glPushMatrix();
|
||||
EaglerAdapter.glNewList(this.starGLCallList, EaglerAdapter.GL_COMPILE);
|
||||
|
@ -327,7 +320,8 @@ public class RenderGlobal implements IWorldAccess {
|
|||
int i = (var6 * this.renderChunksTall + var5) * this.renderChunksWide + var4;
|
||||
this.worldRenderers[i] = new WorldRenderer(this.theWorld, this.tileEntities, var4 * 16, var5 * 16, var6 * 16, this.glRenderListBase + var2);
|
||||
this.worldRenderers[i].isWaitingOnOcclusionQuery = false;
|
||||
this.worldRenderers[i].isVisible = true;
|
||||
this.worldRenderers[i].isNowVisible = true;
|
||||
this.worldRenderers[i].isVisible = 100;
|
||||
this.worldRenderers[i].isInFrustum = true;
|
||||
this.worldRenderers[i].chunkIndex = var3++;
|
||||
this.worldRenderers[i].markDirty();
|
||||
|
@ -509,8 +503,11 @@ public class RenderGlobal implements IWorldAccess {
|
|||
}
|
||||
}
|
||||
|
||||
// GOSH FUCKING DAMMIT WHY IN THE FUCK IS GODDAMN WEBGL THIS UNSTABLE
|
||||
|
||||
private long lastOcclusionQuery = 0l;
|
||||
private boolean occlusionQueryAvailable = false;
|
||||
private boolean[] occlusionQueryAvailable;
|
||||
private long[] occlusionQueryStalled;
|
||||
|
||||
/**
|
||||
* Sorts all renderers based on the passed in entity. Args: entityLiving,
|
||||
|
@ -568,18 +565,40 @@ public class RenderGlobal implements IWorldAccess {
|
|||
byte var17 = 0;
|
||||
int var34;
|
||||
|
||||
if(par2 == 0 && occlusionQueryAvailable) {
|
||||
occlusionQueryAvailable = false;
|
||||
long queryRate = 50l;
|
||||
long stallRateVisible = 50l;
|
||||
long stallRate = 500l;
|
||||
int cooldownRate = 10;
|
||||
|
||||
long ct = System.currentTimeMillis();
|
||||
if(par2 == 0) {
|
||||
this.theWorld.theProfiler.endStartSection("getoccl");
|
||||
for (int i = 0; i < this.sortedWorldRenderers.length; ++i) {
|
||||
WorldRenderer c = this.sortedWorldRenderers[i];
|
||||
int ccx = c.chunkX - fx;
|
||||
int ccy = c.chunkY - fy;
|
||||
int ccz = c.chunkZ - fz;
|
||||
if(ccx < 2 && ccx > -2 && ccy < 2 && ccy > -2 && ccz < 2 && ccz > -2) {
|
||||
c.isVisible = true;
|
||||
if((ccx < 2 && ccx > -2 && ccy < 2 && ccy > -2 && ccz < 2 && ccz > -2) || glOcclusionQuery[c.chunkIndex] == -1) {
|
||||
c.isNowVisible = true;
|
||||
c.isVisible = cooldownRate;
|
||||
}else if(!c.skipAllRenderPasses() && c.isInFrustum) {
|
||||
c.isVisible = glOcclusionQuery[c.chunkIndex] == -1 ? true : EaglerAdapter.glGetQueryResult(glOcclusionQuery[c.chunkIndex]);
|
||||
if(occlusionQueryAvailable[c.chunkIndex]) {
|
||||
if(EaglerAdapter.glGetQueryResultAvailable(glOcclusionQuery[c.chunkIndex])) {
|
||||
if(EaglerAdapter.glGetQueryResult(glOcclusionQuery[c.chunkIndex])) {
|
||||
c.isNowVisible = true;
|
||||
c.isVisible = cooldownRate;
|
||||
}else {
|
||||
if(c.isVisible <= 0) {
|
||||
c.isNowVisible = false;
|
||||
}
|
||||
}
|
||||
occlusionQueryAvailable[c.chunkIndex] = false;
|
||||
occlusionQueryStalled[c.chunkIndex] = 0l;
|
||||
}else if(occlusionQueryStalled[c.chunkIndex] != 0l && ct - occlusionQueryStalled[c.chunkIndex] > stallRateVisible) {
|
||||
c.isNowVisible = true;
|
||||
c.isVisible = cooldownRate;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -589,10 +608,9 @@ public class RenderGlobal implements IWorldAccess {
|
|||
|
||||
var7 -= par1EntityLiving.getEyeHeight();
|
||||
|
||||
long ct = System.currentTimeMillis();
|
||||
if(par2 == 0 && ct - lastOcclusionQuery > 50l) {
|
||||
ct = System.currentTimeMillis();
|
||||
if(par2 == 0 && ct - lastOcclusionQuery > queryRate) {
|
||||
lastOcclusionQuery = ct;
|
||||
occlusionQueryAvailable = true;
|
||||
this.theWorld.theProfiler.endStartSection("occl");
|
||||
EaglerAdapter.glEnable(EaglerAdapter.GL_CULL_FACE);
|
||||
EaglerAdapter.glDisable(EaglerAdapter.GL_BLEND);
|
||||
|
@ -605,6 +623,17 @@ public class RenderGlobal implements IWorldAccess {
|
|||
int ccy = c.chunkY - fy;
|
||||
int ccz = c.chunkZ - fz;
|
||||
if(!c.skipAllRenderPasses() && c.isInFrustum && !(ccx < 2 && ccx > -2 && ccy < 2 && ccy > -2 && ccz < 2 && ccz > -2)) {
|
||||
boolean stalled = false;
|
||||
if(occlusionQueryAvailable[c.chunkIndex]) {
|
||||
if(occlusionQueryStalled[c.chunkIndex] == 0l) {
|
||||
occlusionQueryStalled[c.chunkIndex] = ct;
|
||||
stalled = true;
|
||||
}else if(ct - occlusionQueryStalled[c.chunkIndex] < stallRate) {
|
||||
stalled = true;
|
||||
}
|
||||
}
|
||||
if(!stalled) {
|
||||
occlusionQueryAvailable[c.chunkIndex] = true;
|
||||
int q = glOcclusionQuery[c.chunkIndex];
|
||||
if(q == -1) {
|
||||
q = glOcclusionQuery[c.chunkIndex] = EaglerAdapter.glCreateQuery();
|
||||
|
@ -614,6 +643,10 @@ public class RenderGlobal implements IWorldAccess {
|
|||
EaglerAdapter.glEndQuery();
|
||||
}
|
||||
}
|
||||
if(c.isVisible > 0) {
|
||||
--c.isVisible;
|
||||
}
|
||||
}
|
||||
EaglerAdapter.glEndOcclusionBB();
|
||||
EaglerAdapter.glColorMask(true, true, true, true);
|
||||
EaglerAdapter.glDepthMask(true);
|
||||
|
@ -640,14 +673,14 @@ public class RenderGlobal implements IWorldAccess {
|
|||
++this.renderersSkippingRenderPass;
|
||||
} else if (!this.sortedWorldRenderers[var7].isInFrustum) {
|
||||
++this.renderersBeingClipped;
|
||||
} else if (!this.sortedWorldRenderers[var7].isVisible) {
|
||||
} else if(!this.sortedWorldRenderers[var7].isNowVisible) {
|
||||
++this.renderersBeingOccluded;
|
||||
} else {
|
||||
++this.renderersBeingRendered;
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.sortedWorldRenderers[var7].skipRenderPass[par3] && this.sortedWorldRenderers[var7].isInFrustum && this.sortedWorldRenderers[var7].isVisible) {
|
||||
if (!this.sortedWorldRenderers[var7].skipRenderPass[par3] && this.sortedWorldRenderers[var7].isInFrustum && this.sortedWorldRenderers[var7].isNowVisible) {
|
||||
int var8 = this.sortedWorldRenderers[var7].getGLCallListForPass(par3);
|
||||
|
||||
if (var8 >= 0) {
|
||||
|
|
|
@ -19,7 +19,6 @@ public class RenderSlime extends RenderLiving {
|
|||
return 0;
|
||||
} else if (par2 == 0) {
|
||||
this.setRenderPassModel(this.scaleAmount);
|
||||
EaglerAdapter.glEnable(EaglerAdapter.GL_NORMALIZE);
|
||||
EaglerAdapter.glEnable(EaglerAdapter.GL_BLEND);
|
||||
EaglerAdapter.glBlendFunc(EaglerAdapter.GL_SRC_ALPHA, EaglerAdapter.GL_ONE_MINUS_SRC_ALPHA);
|
||||
return 1;
|
||||
|
|
|
@ -38,6 +38,8 @@ public class ServerList {
|
|||
NBTTagCompound nbt = CompressedStreamTools.readUncompressed(Base64.decodeBase64(base64));
|
||||
ConfigConstants.profanity = nbt.getBoolean("profanity");
|
||||
hideDownDefaultServers = nbt.getBoolean("hide_down");
|
||||
ConfigConstants.ayonullTitle = nbt.hasKey("serverListTitle") ? nbt.getString("serverListTitle") : null;
|
||||
ConfigConstants.ayonullLink = nbt.hasKey("serverListLink") ? nbt.getString("serverListLink") : null;
|
||||
forcedServers.clear();
|
||||
NBTTagList list = nbt.getTagList("servers");
|
||||
for (int i = 0; i < list.tagCount(); ++i) {
|
||||
|
|
|
@ -29,7 +29,7 @@ public class WorldClient extends World {
|
|||
* with each subsequent tick until the spawn queue is empty.
|
||||
*/
|
||||
private Set entitySpawnQueue = new HashSet();
|
||||
private final Minecraft mc = Minecraft.getMinecraft();
|
||||
public final Minecraft mc = Minecraft.getMinecraft();
|
||||
private final Set previousActiveChunkSet = new HashSet();
|
||||
|
||||
public WorldClient(NetClientHandler par1NetClientHandler, WorldSettings par2WorldSettings, int par3, int par4, Profiler par5Profiler) {
|
||||
|
|
|
@ -63,7 +63,8 @@ public class WorldRenderer {
|
|||
public int chunkIndex;
|
||||
|
||||
/** Is this renderer visible according to the occlusion query */
|
||||
public boolean isVisible = true;
|
||||
public int isVisible = 0;
|
||||
public boolean isNowVisible = true;
|
||||
|
||||
/** Is this renderer waiting on the result of the occlusion query */
|
||||
public boolean isWaitingOnOcclusionQuery;
|
||||
|
|
|
@ -11,6 +11,7 @@ import java.text.SimpleDateFormat;
|
|||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -24,6 +25,7 @@ import org.teavm.jso.ajax.ReadyStateChangeHandler;
|
|||
import org.teavm.jso.ajax.XMLHttpRequest;
|
||||
import org.teavm.jso.browser.TimerHandler;
|
||||
import org.teavm.jso.browser.Window;
|
||||
import org.teavm.jso.dom.events.Event;
|
||||
import org.teavm.jso.dom.events.EventListener;
|
||||
import org.teavm.jso.dom.events.KeyboardEvent;
|
||||
import org.teavm.jso.dom.events.MessageEvent;
|
||||
|
@ -32,6 +34,8 @@ import org.teavm.jso.dom.events.WheelEvent;
|
|||
import org.teavm.jso.dom.html.HTMLCanvasElement;
|
||||
import org.teavm.jso.dom.html.HTMLDocument;
|
||||
import org.teavm.jso.dom.html.HTMLElement;
|
||||
import org.teavm.jso.dom.html.HTMLVideoElement;
|
||||
import org.teavm.jso.media.MediaError;
|
||||
import org.teavm.jso.typedarrays.ArrayBuffer;
|
||||
import org.teavm.jso.typedarrays.Float32Array;
|
||||
import org.teavm.jso.typedarrays.Int32Array;
|
||||
|
@ -43,6 +47,7 @@ import org.teavm.jso.webaudio.AudioListener;
|
|||
import org.teavm.jso.webaudio.DecodeErrorCallback;
|
||||
import org.teavm.jso.webaudio.DecodeSuccessCallback;
|
||||
import org.teavm.jso.webaudio.GainNode;
|
||||
import org.teavm.jso.webaudio.MediaElementAudioSourceNode;
|
||||
import org.teavm.jso.webaudio.MediaEvent;
|
||||
import org.teavm.jso.webaudio.PannerNode;
|
||||
import org.teavm.jso.webgl.WebGLBuffer;
|
||||
|
@ -372,6 +377,20 @@ public class EaglerAdapterImpl2 {
|
|||
|
||||
mouseEvents.clear();
|
||||
keyEvents.clear();
|
||||
|
||||
Window.setInterval(new TimerHandler() {
|
||||
@Override
|
||||
public void onTimer() {
|
||||
Iterator<BufferedVideo> vids = videosBuffer.values().iterator();
|
||||
while(vids.hasNext()) {
|
||||
BufferedVideo v = vids.next();
|
||||
if(System.currentTimeMillis() - v.requestedTime > v.ttl) {
|
||||
v.videoElement.setSrc("");
|
||||
vids.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
public static final void destroyContext() {
|
||||
|
@ -892,6 +911,308 @@ public class EaglerAdapterImpl2 {
|
|||
return getString("window.navigator.platform").toLowerCase().contains("win");
|
||||
}
|
||||
|
||||
private static HTMLVideoElement currentVideo = null;
|
||||
private static TextureGL videoTexture = null;
|
||||
private static boolean videoIsLoaded = false;
|
||||
private static boolean videoTexIsInitialized = false;
|
||||
private static int frameRate = 33;
|
||||
private static long frameTimer = 0l;
|
||||
|
||||
public static final boolean isVideoSupported() {
|
||||
return true;
|
||||
}
|
||||
public static final void loadVideo(String src, boolean autoplay) {
|
||||
loadVideo(src, autoplay, null, null);
|
||||
}
|
||||
public static final void loadVideo(String src, boolean autoplay, String setJavascriptPointer) {
|
||||
loadVideo(src, autoplay, setJavascriptPointer, null);
|
||||
}
|
||||
|
||||
@JSBody(params = { "ptr", "el" }, script = "window[ptr] = el;")
|
||||
private static native void setVideoPointer(String ptr, HTMLVideoElement el);
|
||||
@JSBody(params = { "ptr", "el" }, script = "window[ptr](el);")
|
||||
private static native void callVideoLoadEvent(String ptr, HTMLVideoElement el);
|
||||
|
||||
private static MediaElementAudioSourceNode currentVideoAudioSource = null;
|
||||
|
||||
private static GainNode currentVideoAudioGain = null;
|
||||
private static float currentVideoAudioGainValue = 1.0f;
|
||||
|
||||
private static PannerNode currentVideoAudioPanner = null;
|
||||
private static float currentVideoAudioX = 0.0f;
|
||||
private static float currentVideoAudioY = 0.0f;
|
||||
private static float currentVideoAudioZ = 0.0f;
|
||||
|
||||
public static final void loadVideo(String src, boolean autoplay, String setJavascriptPointer, final String javascriptOnloadFunction) {
|
||||
videoIsLoaded = false;
|
||||
videoTexIsInitialized = false;
|
||||
if(videoTexture == null) {
|
||||
videoTexture = _wglGenTextures();
|
||||
}
|
||||
if(currentVideo != null) {
|
||||
currentVideo.pause();
|
||||
currentVideo.setSrc("");
|
||||
}
|
||||
|
||||
BufferedVideo vid = videosBuffer.get(src);
|
||||
|
||||
if(vid != null) {
|
||||
currentVideo = vid.videoElement;
|
||||
videosBuffer.remove(src);
|
||||
}else {
|
||||
currentVideo = (HTMLVideoElement) win.getDocument().createElement("video");
|
||||
currentVideo.setAttribute("crossorigin", "anonymous");
|
||||
currentVideo.setAutoplay(autoplay);
|
||||
}
|
||||
|
||||
if(setJavascriptPointer != null) {
|
||||
setVideoPointer(setJavascriptPointer, currentVideo);
|
||||
}
|
||||
|
||||
currentVideo.addEventListener("playing", new EventListener<Event>() {
|
||||
@Override
|
||||
public void handleEvent(Event evt) {
|
||||
videoIsLoaded = true;
|
||||
if(javascriptOnloadFunction != null) {
|
||||
callVideoLoadEvent(javascriptOnloadFunction, currentVideo);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if(vid == null) {
|
||||
currentVideo.setControls(false);
|
||||
currentVideo.setSrc(src);
|
||||
}else {
|
||||
if(autoplay) {
|
||||
currentVideo.play();
|
||||
}
|
||||
}
|
||||
|
||||
if(currentVideoAudioSource != null) {
|
||||
currentVideoAudioSource.disconnect();
|
||||
}
|
||||
|
||||
currentVideoAudioSource = audioctx.createMediaElementSource(currentVideo);
|
||||
|
||||
if(currentVideoAudioGainValue < 0.0f) {
|
||||
currentVideoAudioSource.connect(audioctx.getDestination());
|
||||
}else {
|
||||
if(currentVideoAudioGain == null) {
|
||||
currentVideoAudioGain = audioctx.createGain();
|
||||
currentVideoAudioGain.getGain().setValue(currentVideoAudioGainValue > 1.0f ? 1.0f : currentVideoAudioGainValue);
|
||||
}
|
||||
|
||||
currentVideoAudioSource.connect(currentVideoAudioGain);
|
||||
|
||||
if(currentVideoAudioPanner == null) {
|
||||
currentVideoAudioPanner = audioctx.createPanner();
|
||||
currentVideoAudioPanner.setRolloffFactor(1f);
|
||||
currentVideoAudioPanner.setDistanceModel("linear");
|
||||
currentVideoAudioPanner.setPanningModel("HRTF");
|
||||
currentVideoAudioPanner.setConeInnerAngle(360f);
|
||||
currentVideoAudioPanner.setConeOuterAngle(0f);
|
||||
currentVideoAudioPanner.setConeOuterGain(0f);
|
||||
currentVideoAudioPanner.setOrientation(0f, 1f, 0f);
|
||||
currentVideoAudioPanner.setPosition(currentVideoAudioX, currentVideoAudioY, currentVideoAudioZ);
|
||||
currentVideoAudioPanner.setMaxDistance(currentVideoAudioGainValue * 16f + 0.1f);
|
||||
currentVideoAudioGain.connect(currentVideoAudioPanner);
|
||||
currentVideoAudioPanner.connect(audioctx.getDestination());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class BufferedVideo {
|
||||
|
||||
protected final HTMLVideoElement videoElement;
|
||||
protected final String url;
|
||||
protected final long requestedTime;
|
||||
protected final int ttl;
|
||||
|
||||
public BufferedVideo(HTMLVideoElement videoElement, String url, int ttl) {
|
||||
this.videoElement = videoElement;
|
||||
this.url = url;
|
||||
this.requestedTime = System.currentTimeMillis();
|
||||
this.ttl = ttl;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final HashMap<String, BufferedVideo> videosBuffer = new HashMap();
|
||||
|
||||
public static final void bufferVideo(String src, int ttl) {
|
||||
if(!videosBuffer.containsKey(src)) {
|
||||
HTMLVideoElement video = (HTMLVideoElement) win.getDocument().createElement("video");
|
||||
video.setAutoplay(false);
|
||||
video.setAttribute("crossorigin", "anonymous");
|
||||
video.setPreload("auto");
|
||||
video.setControls(false);
|
||||
video.setSrc(src);
|
||||
videosBuffer.put(src, new BufferedVideo(video, src, ttl));
|
||||
}
|
||||
}
|
||||
|
||||
public static final void unloadVideo() {
|
||||
if(videoTexture != null) {
|
||||
_wglDeleteTextures(videoTexture);
|
||||
videoTexture = null;
|
||||
}
|
||||
if(currentVideo != null) {
|
||||
currentVideo.pause();
|
||||
currentVideo.setSrc("");
|
||||
currentVideo = null;
|
||||
}
|
||||
if(currentVideoAudioSource != null) {
|
||||
currentVideoAudioSource.disconnect();
|
||||
}
|
||||
}
|
||||
public static final boolean isVideoLoaded() {
|
||||
return videoTexture != null && currentVideo != null && videoIsLoaded;
|
||||
}
|
||||
public static final boolean isVideoPaused() {
|
||||
return currentVideo == null || currentVideo.isPaused();
|
||||
}
|
||||
public static final void setVideoPaused(boolean pause) {
|
||||
if(currentVideo != null) {
|
||||
if(pause) {
|
||||
currentVideo.pause();
|
||||
}else {
|
||||
currentVideo.play();
|
||||
}
|
||||
}
|
||||
}
|
||||
public static final void setVideoLoop(boolean loop) {
|
||||
if(currentVideo != null) {
|
||||
currentVideo.setLoop(loop);
|
||||
}
|
||||
}
|
||||
public static final void setVideoVolume(float x, float y, float z, float v) {
|
||||
currentVideoAudioX = x;
|
||||
currentVideoAudioY = y;
|
||||
currentVideoAudioZ = z;
|
||||
if(v < 0.0f) {
|
||||
if(currentVideoAudioGainValue >= 0.0f && currentVideoAudioSource != null) {
|
||||
currentVideoAudioSource.disconnect();
|
||||
currentVideoAudioSource.connect(audioctx.getDestination());
|
||||
}
|
||||
currentVideoAudioGainValue = v;
|
||||
}else {
|
||||
if(currentVideoAudioGain != null) {
|
||||
currentVideoAudioGain.getGain().setValue(v > 1.0f ? 1.0f : v);
|
||||
if(currentVideoAudioGainValue < 0.0f && currentVideoAudioSource != null) {
|
||||
currentVideoAudioSource.disconnect();
|
||||
currentVideoAudioSource.connect(currentVideoAudioGain);
|
||||
}
|
||||
}
|
||||
currentVideoAudioGainValue = v;
|
||||
if(currentVideoAudioPanner != null) {
|
||||
currentVideoAudioPanner.setMaxDistance(v * 16f + 0.1f);
|
||||
currentVideoAudioPanner.setPosition(x, y, z);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@JSBody(
|
||||
params = {"ctx", "target", "internalformat", "format", "type", "video"},
|
||||
script = "ctx.texImage2D(target, 0, internalformat, format, type, video);"
|
||||
)
|
||||
private static native void html5VideoTexImage2D(WebGL2RenderingContext ctx, int target, int internalformat, int format, int type, HTMLVideoElement video);
|
||||
|
||||
@JSBody(
|
||||
params = {"ctx", "target", "format", "type", "video"},
|
||||
script = "ctx.texSubImage2D(target, 0, 0, 0, format, type, video);"
|
||||
)
|
||||
private static native void html5VideoTexSubImage2D(WebGL2RenderingContext ctx, int target, int format, int type, HTMLVideoElement video);
|
||||
|
||||
public static final void updateVideoTexture() {
|
||||
long ms = System.currentTimeMillis();
|
||||
if(ms - frameTimer < frameRate && videoTexIsInitialized) {
|
||||
return;
|
||||
}
|
||||
frameTimer = ms;
|
||||
if(currentVideo != null && videoTexture != null && videoIsLoaded) {
|
||||
try {
|
||||
_wglBindTexture(_wGL_TEXTURE_2D, videoTexture);
|
||||
if(videoTexIsInitialized) {
|
||||
html5VideoTexSubImage2D(webgl, _wGL_TEXTURE_2D, _wGL_RGBA, _wGL_UNSIGNED_BYTE, currentVideo);
|
||||
}else {
|
||||
html5VideoTexImage2D(webgl, _wGL_TEXTURE_2D, _wGL_RGBA, _wGL_RGBA, _wGL_UNSIGNED_BYTE, currentVideo);
|
||||
_wglTexParameteri(_wGL_TEXTURE_2D, _wGL_TEXTURE_WRAP_S, _wGL_CLAMP);
|
||||
_wglTexParameteri(_wGL_TEXTURE_2D, _wGL_TEXTURE_WRAP_T, _wGL_CLAMP);
|
||||
_wglTexParameteri(_wGL_TEXTURE_2D, _wGL_TEXTURE_MIN_FILTER, _wGL_LINEAR);
|
||||
_wglTexParameteri(_wGL_TEXTURE_2D, _wGL_TEXTURE_MAG_FILTER, _wGL_LINEAR);
|
||||
videoTexIsInitialized = true;
|
||||
}
|
||||
}catch(Throwable t) {
|
||||
// rip
|
||||
}
|
||||
}
|
||||
}
|
||||
public static final void bindVideoTexture() {
|
||||
if(videoTexture != null) {
|
||||
_wglBindTexture(_wGL_TEXTURE_2D, videoTexture);
|
||||
}
|
||||
}
|
||||
public static final int getVideoWidth() {
|
||||
if(currentVideo != null && videoIsLoaded) {
|
||||
return currentVideo.getWidth();
|
||||
}else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
public static final int getVideoHeight() {
|
||||
if(currentVideo != null && videoIsLoaded) {
|
||||
return currentVideo.getHeight();
|
||||
}else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
public static final float getVideoCurrentTime() {
|
||||
if(currentVideo != null && videoIsLoaded) {
|
||||
return (float) currentVideo.getCurrentTime();
|
||||
}else {
|
||||
return -1.0f;
|
||||
}
|
||||
}
|
||||
public static final void setVideoCurrentTime(float seconds) {
|
||||
if(currentVideo != null && videoIsLoaded) {
|
||||
currentVideo.setCurrentTime(seconds);
|
||||
}
|
||||
}
|
||||
public static final float getVideoDuration() {
|
||||
if(currentVideo != null && videoIsLoaded) {
|
||||
return (float) currentVideo.getDuration();
|
||||
}else {
|
||||
return -1.0f;
|
||||
}
|
||||
}
|
||||
|
||||
public static final int VIDEO_ERR_NONE = -1;
|
||||
public static final int VIDEO_ERR_ABORTED = 1;
|
||||
public static final int VIDEO_ERR_NETWORK = 2;
|
||||
public static final int VIDEO_ERR_DECODE = 3;
|
||||
public static final int VIDEO_ERR_SRC_NOT_SUPPORTED = 4;
|
||||
|
||||
public static final int getVideoError() {
|
||||
if(currentVideo != null && videoIsLoaded) {
|
||||
MediaError err = currentVideo.getError();
|
||||
if(err != null) {
|
||||
return err.getCode();
|
||||
}else {
|
||||
return -1;
|
||||
}
|
||||
}else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
public static final void setVideoFrameRate(float fps) {
|
||||
frameRate = (int)(1000.0f / fps);
|
||||
if(frameRate < 1) {
|
||||
frameRate = 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static MouseEvent currentEvent = null;
|
||||
private static KeyboardEvent currentEventK = null;
|
||||
private static boolean[] buttonStates = new boolean[8];
|
||||
|
@ -1274,18 +1595,13 @@ public class EaglerAdapterImpl2 {
|
|||
public static native String getFileChooserResultName();
|
||||
|
||||
public static final void setListenerPos(float x, float y, float z, float vx, float vy, float vz, float pitch, float yaw) {
|
||||
float var11 = MathHelper.cos(-yaw * 0.017453292F - (float) Math.PI);
|
||||
float var12 = MathHelper.sin(-yaw * 0.017453292F - (float) Math.PI);
|
||||
float var13 = -var12;
|
||||
float var14 = -MathHelper.sin(-pitch * 0.017453292F - (float) Math.PI);
|
||||
float var15 = -var11;
|
||||
float var16 = 0.0F;
|
||||
float var17 = 1.0F;
|
||||
float var18 = 0.0F;
|
||||
float var2 = MathHelper.cos(-yaw * 0.017453292F);
|
||||
float var3 = MathHelper.sin(-yaw * 0.017453292F);
|
||||
float var4 = -MathHelper.cos(pitch * 0.017453292F);
|
||||
float var5 = MathHelper.sin(pitch * 0.017453292F);
|
||||
AudioListener l = audioctx.getListener();
|
||||
if(!(Float.isInfinite(x) || Float.isInfinite(y) || Float.isInfinite(z)))l.setPosition(x, y, z);
|
||||
l.setOrientation(var13, var14, var15, var16, var17, var18);
|
||||
//l.setVelocity(vx, vy, vz);
|
||||
l.setPosition(x, y, z);
|
||||
l.setOrientation(-var3 * var4, -var5, -var2 * var4, 0.0f, 1.0f, 0.0f);
|
||||
}
|
||||
|
||||
private static int playbackId = 0;
|
||||
|
|
Loading…
Reference in New Issue
Block a user