Cleaned up and formatted code

This commit is contained in:
OLEGSHA 2020-10-06 11:42:36 +03:00
parent 12dc816760
commit 503963f992
6 changed files with 369 additions and 309 deletions

View File

@ -1,6 +1,5 @@
package ru.windcorp.progressia.client; package ru.windcorp.progressia.client;
import ru.windcorp.progressia.client.audio.backend.ALTest;
import ru.windcorp.progressia.client.comms.DefaultClientCommsListener; import ru.windcorp.progressia.client.comms.DefaultClientCommsListener;
import ru.windcorp.progressia.client.comms.ServerCommsChannel; import ru.windcorp.progressia.client.comms.ServerCommsChannel;
import ru.windcorp.progressia.client.graphics.world.Camera; import ru.windcorp.progressia.client.graphics.world.Camera;

View File

@ -10,29 +10,39 @@ import static org.lwjgl.stb.STBVorbis.*;
import static org.lwjgl.openal.AL10.*; import static org.lwjgl.openal.AL10.*;
public class AudioReader { public class AudioReader {
private AudioReader() {};
//TODO fix converting from mono-stereo private AudioReader() {};
public static SoundType readAsMono(String AudioFile) { // TODO fix converting from mono-stereo
IntBuffer channelBuffer = BufferUtils.createIntBuffer(1);
IntBuffer rateBuffer = BufferUtils.createIntBuffer(1);
Resource res = ResourceManager.getResource(AudioFile);
ShortBuffer rawMonoAudio = decodeVorbis(res, channelBuffer, rateBuffer);
return new SoundType(rawMonoAudio, AL_FORMAT_MONO16, rateBuffer.get(0)); private static SoundType readAsSpecified(String audioName, int format) {
} IntBuffer channelBuffer = BufferUtils.createIntBuffer(1);
IntBuffer rateBuffer = BufferUtils.createIntBuffer(1);
public static SoundType readAsStereo(String AudioFile) { Resource res = ResourceManager.getResource(audioName);
IntBuffer channelsBuffer = BufferUtils.createIntBuffer(2);
IntBuffer rateBuffer = BufferUtils.createIntBuffer(1);
Resource res = ResourceManager.getResource(AudioFile);
ShortBuffer rawStereoAudio = decodeVorbis(res, channelsBuffer, rateBuffer);
return new SoundType(rawStereoAudio, AL_FORMAT_STEREO16, rateBuffer.get(0)); ShortBuffer rawAudio = decodeVorbis(res, channelBuffer, rateBuffer);
}
private static ShortBuffer decodeVorbis(Resource dataToDecode, IntBuffer channelsBuffer, IntBuffer rateBuffer) { return new SoundType(rawAudio, format, rateBuffer.get(0));
return stb_vorbis_decode_memory(dataToDecode.readAsBytes(), channelsBuffer, rateBuffer); }
}
public static SoundType readAsMono(String audioName) {
return readAsSpecified(audioName, AL_FORMAT_MONO16);
}
public static SoundType readAsStereo(String audioName) {
return readAsSpecified(audioName, AL_FORMAT_STEREO16);
}
private static ShortBuffer decodeVorbis(
Resource dataToDecode,
IntBuffer channelsBuffer,
IntBuffer rateBuffer
) {
return stb_vorbis_decode_memory(
dataToDecode.readAsBytes(),
channelsBuffer,
rateBuffer
);
}
} }

View File

@ -11,58 +11,66 @@ import static org.lwjgl.openal.AL10.*;
public class Listener { public class Listener {
private static final Listener instance = new Listener(); private static final Listener INSTANCE = new Listener();
private Listener() {}
public static Listener getInstance() {
return instance;
}
//Params private Listener() {}
private final Vec3 position = new Vec3();
private final Vec3 velocity = new Vec3();
private final Vec3 oriAt = new Vec3();
private final Vec3 oriUp = new Vec3();
private boolean isClientConnected = false; public static Listener getInstance() {
private Camera.Anchor anchor; return INSTANCE;
}
public void update() { // Params
Client client = ClientState.getInstance(); private final Vec3 position = new Vec3();
if (client == null) { private final Vec3 velocity = new Vec3();
if (isClientConnected) { private final Vec3 oriAt = new Vec3();
isClientConnected = false; private final Vec3 oriUp = new Vec3();
resetParams();
applyParams();
}
} else {
isClientConnected = true;
if (anchor == null) {
anchor = client.getCamera().getAnchor();
} else {
anchor.getCameraPosition(position);
float pitch = anchor.getCameraPitch();
float yaw = anchor.getCameraYaw();
oriAt.set((float) (Math.cos(pitch) * Math.cos(yaw)),
(float) (Math.cos(pitch) * Math.sin(yaw)),
(float) Math.sin(pitch));
oriUp.set((float) (Math.cos(pitch + Math.PI / 2) * Math.cos(yaw)),
(float) (Math.cos(pitch + Math.PI / 2) * Math.sin(yaw)),
(float) Math.sin(pitch + Math.PI / 2));
applyParams();
}
}
}
private void resetParams() { private boolean isClientConnected = false;
position.set(0f, 0f, 0f); private Camera.Anchor anchor;
velocity.set(0f, 0f, 0f);
oriAt.set(0f, 0f, 0f);
oriUp.set(0f, 0f, 0f);
}
private void applyParams() { public void update() {
alListener3f(AL_POSITION, position.x, position.y, position.z); Client client = ClientState.getInstance();
alListener3f(AL_VELOCITY, velocity.x, velocity.y, velocity.z); if (client == null) {
alListenerfv(AL_ORIENTATION, new float[]{oriAt.x, oriAt.y, oriAt.z, oriUp.x, oriUp.y, oriUp.z}); if (isClientConnected) {
} isClientConnected = false;
resetParams();
applyParams();
}
} else {
isClientConnected = true;
if (anchor == null) {
anchor = client.getCamera().getAnchor();
} else {
anchor.getCameraPosition(position);
float pitch = anchor.getCameraPitch();
float yaw = anchor.getCameraYaw();
oriAt.set(
(float) (Math.cos(pitch) * Math.cos(yaw)),
(float) (Math.cos(pitch) * Math.sin(yaw)),
(float) Math.sin(pitch)
);
oriUp.set(
(float) (Math.cos(pitch + Math.PI / 2) * Math.cos(yaw)),
(float) (Math.cos(pitch + Math.PI / 2) * Math.sin(yaw)),
(float) Math.sin(pitch + Math.PI / 2)
);
applyParams();
}
}
}
private void resetParams() {
position.set(0);
velocity.set(0);
oriAt.set(0);
oriUp.set(0);
}
private void applyParams() {
alListener3f(AL_POSITION, position.x, position.y, position.z);
alListener3f(AL_VELOCITY, velocity.x, velocity.y, velocity.z);
alListenerfv(AL_ORIENTATION, new float[] {
oriAt.x, oriAt.y, oriAt.z, oriUp.x, oriUp.y, oriUp.z
});
}
} }

View File

@ -7,122 +7,136 @@ import java.nio.FloatBuffer;
import static org.lwjgl.openal.AL11.*; import static org.lwjgl.openal.AL11.*;
public class Sound { public class Sound {
//Buffers
private int audio;
private int source;
//Characteristics
private FloatBuffer position =
(FloatBuffer) BufferUtils.createFloatBuffer(3)
.put(new float[]{0.0f, 0.0f, 0.0f}).rewind();
private FloatBuffer velocity =
(FloatBuffer) BufferUtils.createFloatBuffer(3)
.put(new float[]{0.0f, 0.0f, 0.0f}).rewind();
private float pitch = 1.0f;
private float gain = 1.0f;
public Sound() { // Buffers
} private int audio;
private int source;
public Sound(int audio, int source) { // Characteristics
setAudio(audio); private FloatBuffer position = (FloatBuffer) BufferUtils.createFloatBuffer(
setSource(source); 3
} ).put(new float[] {
0.0f, 0.0f, 0.0f
}).rewind();
public Sound(int audio, FloatBuffer position, FloatBuffer velocity, float pitch, float gain) private FloatBuffer velocity = (FloatBuffer) BufferUtils.createFloatBuffer(
{ 3
setAudio(audio); ).put(new float[] {
setPosition(position); 0.0f, 0.0f, 0.0f
setVelocity(velocity); }).rewind();
setPitch(pitch);
setGain(gain);
}
public Sound(FloatBuffer position, FloatBuffer velocity, float pitch, float gain) { private float pitch = 1.0f;
setPosition(position); private float gain = 1.0f;
setVelocity(velocity);
setPitch(pitch);
setGain(gain);
}
public void playOnce() { public Sound() {}
alSourcePlay(source);
}
public void playLoop() { public Sound(int audio, int source) {
alSourcei(source, AL_LOOPING, AL_TRUE); setAudio(audio);
playOnce(); setSource(source);
alSourcei(source, AL_LOOPING, AL_FALSE); }
}
public void stop() { public Sound(
alSourceStop(source); int audio,
} FloatBuffer position,
FloatBuffer velocity,
float pitch,
float gain
) {
setAudio(audio);
setPosition(position);
setVelocity(velocity);
setPitch(pitch);
setGain(gain);
}
public void pause() { public Sound(
alSourcePause(source); FloatBuffer position,
} FloatBuffer velocity,
float pitch,
float gain
) {
setPosition(position);
setVelocity(velocity);
setPitch(pitch);
setGain(gain);
}
public boolean isPlaying() { public void playOnce() {
final int state = alGetSourcei(source, AL_SOURCE_STATE); alSourcePlay(source);
return state == AL_PLAYING; }
}
public int getAudio() { public void playLoop() {
return audio; alSourcei(source, AL_LOOPING, AL_TRUE);
} playOnce();
alSourcei(source, AL_LOOPING, AL_FALSE);
}
public void setAudio(int audio) { public void stop() {
this.audio = audio; alSourceStop(source);
} }
public void setSource(int source) { public void pause() {
this.source = source; alSourcePause(source);
alSourcei(this.source, AL_BUFFER, audio); }
}
public int getSource() { public boolean isPlaying() {
return source; final int state = alGetSourcei(source, AL_SOURCE_STATE);
} return state == AL_PLAYING;
}
public int getAudio() {
return audio;
}
//OTHER public void setAudio(int audio) {
this.audio = audio;
}
public void setPosition(FloatBuffer position) { public void setSource(int source) {
this.position = position; this.source = source;
alSourcefv(source, AL_POSITION, position); alSourcei(this.source, AL_BUFFER, audio);
} }
public FloatBuffer getPosition() { public int getSource() {
return position; return source;
} }
// OTHER
public void setVelocity(FloatBuffer velocity) { public void setPosition(FloatBuffer position) {
alSourcefv(source, AL_VELOCITY, velocity); this.position = position;
this.velocity = velocity; alSourcefv(source, AL_POSITION, position);
} }
public FloatBuffer getVelocity() { public FloatBuffer getPosition() {
return velocity; return position;
} }
public void setVelocity(FloatBuffer velocity) {
alSourcefv(source, AL_VELOCITY, velocity);
this.velocity = velocity;
}
public void setPitch(float pitch) { public FloatBuffer getVelocity() {
alSourcef(source, AL_PITCH, pitch); return velocity;
this.pitch = pitch; }
}
public float getPitch() { public void setPitch(float pitch) {
return pitch; alSourcef(source, AL_PITCH, pitch);
} this.pitch = pitch;
}
public float getPitch() {
return pitch;
}
public void setGain(float gain) { public void setGain(float gain) {
alSourcef(source, AL_GAIN, gain); alSourcef(source, AL_GAIN, gain);
this.gain = gain; this.gain = gain;
} }
public float getGain() {
return gain;
}
public float getGain() {
return gain;
}
} }

View File

@ -1,6 +1,5 @@
package ru.windcorp.progressia.client.audio; package ru.windcorp.progressia.client.audio;
import org.lwjgl.BufferUtils;
import org.lwjgl.openal.AL; import org.lwjgl.openal.AL;
import org.lwjgl.openal.ALC; import org.lwjgl.openal.ALC;
import org.lwjgl.openal.ALCCapabilities; import org.lwjgl.openal.ALCCapabilities;
@ -12,73 +11,97 @@ import static org.lwjgl.openal.ALC10.*;
import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ArrayBlockingQueue;
public class SoundManager { public class SoundManager {
private static final int SOURCES_NUM = 64;
private static int lastSourceIndex = -1;
private static final int[] SOURCES = new int[SOURCES_NUM];
private static final ArrayBlockingQueue<Sound> SOUNDS = new ArrayBlockingQueue<>(SOURCES_NUM);
private static long DEVICE; private static final int SOURCES_NUM = 64;
private static int lastSourceIndex = -1;
private static final int[] SOURCES = new int[SOURCES_NUM];
public static void initAL() { private static final ArrayBlockingQueue<Sound> SOUNDS =
String defaultDeviceName = alcGetString(0, ALC_DEFAULT_DEVICE_SPECIFIER); new ArrayBlockingQueue<>(SOURCES_NUM);
DEVICE = alcOpenDevice(defaultDeviceName);
int[] attributes = {0};
long context = alcCreateContext(DEVICE, attributes);
alcMakeContextCurrent(context);
ALCCapabilities deviceCaps = ALC.createCapabilities(DEVICE);
ALCapabilities alcaps = AL.createCapabilities(deviceCaps);
checkALError();
alGenSources(SOURCES);
}
public static void update() { private static long device;
//Position of the listener
Listener.getInstance().update();
}
private static void addSound(Sound sound) { private static ALCCapabilities deviceCapabilities;
if (!SOUNDS.offer(sound)) { private static ALCapabilities alCapabilities;
Sound polled = SOUNDS.poll();
assert polled != null;
polled.stop();
if (!SOUNDS.offer(sound)) {
throw new RuntimeException();
}
}
}
private static int getNextSource() { public static void initAL() {
if (++lastSourceIndex > SOURCES_NUM) lastSourceIndex = 0; String defaultDeviceName = alcGetString(
return SOURCES[lastSourceIndex]; 0,
} ALC_DEFAULT_DEVICE_SPECIFIER
);
public static Sound createSound(SoundType soundType) { device = alcOpenDevice(defaultDeviceName);
Sound sound = soundType.genSoundSource(getNextSource());
addSound(sound);
return sound;
}
public static void clearSounds() { int[] attributes = new int[1];
Sound polled = SOUNDS.poll(); long context = alcCreateContext(device, attributes);
while (polled != null) { alcMakeContextCurrent(context);
polled.stop();
polled = SOUNDS.poll();
}
}
public static void checkALError() { deviceCapabilities = ALC.createCapabilities(device);
int errorCode = alGetError(); alCapabilities = AL.createCapabilities(deviceCapabilities);
if(alGetError() != AL_NO_ERROR) {
throw new RuntimeException(String.valueOf(errorCode)); checkALError();
}
} alGenSources(SOURCES);
}
public static void update() {
// Position of the listener
Listener.getInstance().update();
}
private static void addSound(Sound sound) {
if (!SOUNDS.offer(sound)) {
Sound polled = SOUNDS.poll();
assert polled != null;
polled.stop();
if (!SOUNDS.offer(sound)) {
throw new RuntimeException();
}
}
}
private static int getNextSource() {
if (++lastSourceIndex > SOURCES_NUM)
lastSourceIndex = 0;
return SOURCES[lastSourceIndex];
}
public static Sound createSound(SoundType soundType) {
Sound sound = soundType.genSoundSource(getNextSource());
addSound(sound);
return sound;
}
public static void clearSounds() {
Sound polled = SOUNDS.poll();
while (polled != null) {
polled.stop();
polled = SOUNDS.poll();
}
}
public static void checkALError() {
int errorCode = alGetError();
if (alGetError() != AL_NO_ERROR) {
throw new RuntimeException(String.valueOf(errorCode));
}
}
public static void closeAL() {
clearSounds();
alDeleteSources(SOURCES);
for (Sound s : SOUNDS) {
alDeleteBuffers(s.getAudio());
}
alcCloseDevice(device);
}
public static ALCapabilities getALCapabilities() {
return alCapabilities;
}
public static ALCCapabilities getDeviceCapabilities() {
return deviceCapabilities;
}
public static void closeAL() {
clearSounds();
alDeleteSources(SOURCES);
for(Sound s : SOUNDS) {
alDeleteBuffers(s.getAudio());
}
alcCloseDevice(DEVICE);
}
} }

View File

@ -4,42 +4,48 @@ import java.nio.ShortBuffer;
import static org.lwjgl.openal.AL11.*; import static org.lwjgl.openal.AL11.*;
public class SoundType { public class SoundType {
private ShortBuffer rawAudio;
private int sampleRate;
private int format;
public SoundType(ShortBuffer rawAudio, int format, int sampleRate) { private ShortBuffer rawAudio;
this.rawAudio = rawAudio; private int sampleRate;
this.sampleRate = sampleRate; private int format;
this.format = format;
}
public static int genEmptyAudio() { public SoundType(ShortBuffer rawAudio, int format, int sampleRate) {
return alGenBuffers(); this.rawAudio = rawAudio;
} this.sampleRate = sampleRate;
this.format = format;
}
public static int genEmptySource() { public static int genEmptyAudio() {
return alGenSources(); return alGenBuffers();
} }
public int genAudio() { public static int genEmptySource() {
int audio = alGenBuffers(); return alGenSources();
alBufferData(audio, format, rawAudio, sampleRate); }
return audio;
}
public Sound genSoundSource() { public int genAudio() {
return new Sound(genAudio(), alGenSources()); int audio = alGenBuffers();
} alBufferData(audio, format, rawAudio, sampleRate);
return audio;
}
public Sound genSoundSource(int source) { public Sound genSoundSource() {
if(!alIsSource(source)) throw new RuntimeException(); return new Sound(genAudio(), alGenSources());
return new Sound(genAudio(), source); }
}
public Sound genSoundSource(int source) {
if (!alIsSource(source))
throw new RuntimeException();
return new Sound(genAudio(), source);
}
public Sound genSoundSource(int source, int audio) {
if (!alIsBuffer(audio) || !alIsSource(source))
throw new RuntimeException();
alBufferData(audio, format, rawAudio, sampleRate);
return new Sound(audio, alGenSources());
}
public Sound genSoundSource(int source, int audio) {
if(!alIsBuffer(audio) || !alIsSource(source)) throw new RuntimeException();
alBufferData(audio, format, rawAudio, sampleRate);
return new Sound(audio, alGenSources());
}
} }