001package jmri.jmrit.vsdecoder; 002 003import com.jogamp.openal.AL; 004import com.jogamp.openal.ALException; 005import com.jogamp.openal.util.ALut; 006import java.io.InputStream; 007import java.nio.ByteBuffer; 008import java.nio.ByteOrder; 009import java.util.ArrayList; 010import java.util.Arrays; 011import java.util.List; 012import jmri.jmrit.audio.AudioFactory; 013import jmri.jmrit.audio.AudioBuffer; 014import org.slf4j.Logger; 015import org.slf4j.LoggerFactory; 016 017/** 018 * Utility class for doing "VSD-special" things with the JMRI Audio classes. 019 * 020 * <hr> 021 * This file is part of JMRI. 022 * <p> 023 * JMRI is free software; you can redistribute it and/or modify it under the 024 * terms of version 2 of the GNU General Public License as published by the Free 025 * Software Foundation. See the "COPYING" file for a copy of this license. 026 * <p> 027 * JMRI is distributed in the hope that it will be useful, but WITHOUT ANY 028 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR 029 * A PARTICULAR PURPOSE. See the GNU General Public License for more details. 030 * 031 * @author Mark Underwood copyright (c) 2009, 2013 032 */ 033public class AudioUtil { 034 //------------------------ 035 // New Methods to allow creating a set of (sub)Buffers built off a single 036 // input stream. Should probably look closely at this and see if it could 037 // be pushed up to AbstractAudioBuffer. Or better yet, moved OVER to AbstractAudioFactory. 038 039 /** 040 * Split the WAV data from an InputStream into a series of ByteBuffers 041 * of size (approximately) between max_time and min_time, split on zero 042 * crossings. 043 * 044 * The method will try to create Buffers of max_time length (plus whatever's 045 * needed to hit the next zero crossing), until there's not enough bytes 046 * left in the InputStream. The last Buffer will be of at least min_time 047 * length, else the remaining data (less than min_time length) will be 048 * discarded. Buffers will be cut on zero-crossing boundaries. 049 * 050 * @param stream : Input Stream, assumed to be WAV-format data 051 * @param max_time : maximum (target) length of each split Buffer, in 052 * milliseconds 053 * @param min_time : minimum length of buffer to return, in milliseconds. 054 * Any buffer that would be smaller than this will be 055 * discarded. 056 * 057 * @return (List<ByteBuffer>) List of ByteBuffers containing the 058 * (split-up) data from stream 059 */ 060 private static List<ByteBuffer> splitInputStream(InputStream stream, int max_time, int min_time) { 061 List<ByteBuffer> rlist = new ArrayList<>(); 062 int[] format = new int[1]; 063 ByteBuffer[] data = new ByteBuffer[1]; 064 int[] size = new int[1]; 065 int[] freq = new int[1]; 066 int[] loop = new int[1]; 067 068 // Pull the WAV data into the "data" buffer. 069 try { 070 ALut.alutLoadWAVFile(stream, format, data, size, freq, loop); 071 } catch (ALException e) { 072 log.warn("Error loading JoalAudioBuffer", e); 073 return null; 074 } 075 076 // OK, for now, we're only going to support 8-bit and 16-bit Mono data. 077 // I'll have to figure out later how to extend this to multiple data formats. 078 if ((format[0] != AL.AL_FORMAT_MONO8) && (format[0] != AL.AL_FORMAT_MONO16)) { 079 log.warn("Invalid Format for splitting! Failing out.{}", parseFormat(format[0])); 080 return null; 081 } 082 083 while (data[0].remaining() > 0) { 084 log.debug("while loop. Source: {}", data[0]); 085 ByteBuffer ab = getSubBuffer(data[0], max_time, min_time, format[0], freq[0]); 086 // getSubBuffer returning null isn't necessarily an error. It could mean there weren't enough bytes left, so we truncated. 087 // In this case, we will have already gotten (via get()) the remaining bytes from data[0], so we should exit the while loop 088 // normally. 089 if (ab != null) { 090 ab.rewind(); 091 rlist.add(ab); 092 } 093 } 094 return rlist; 095 } 096 097 public static ByteBuffer getWavData(InputStream stream) { 098 int[] format = new int[1]; 099 int[] size = new int[1]; 100 ByteBuffer[] data = new ByteBuffer[1]; 101 int[] freq = new int[1]; 102 int[] loop = new int[1]; 103 104 // Pull the WAV data into the "data" buffer. 105 try { 106 ALut.alutLoadWAVFile(stream, format, data, size, freq, loop); 107 } catch (ALException e) { 108 log.warn("Error loading JoalAudioBuffer from stream", e); 109 return null; 110 } 111 log.debug("WAV data: {}, order: {}, size: {}", data[0], data[0].order(), size[0]); 112 return data[0]; 113 } 114 115 public static int[] getWavFormats(InputStream stream) { 116 int[] format = new int[1]; 117 int[] size = new int[1]; 118 ByteBuffer[] data = new ByteBuffer[1]; 119 int[] freq = new int[1]; 120 int[] loop = new int[1]; 121 122 int[] formats = new int[3]; 123 124 // Pull the WAV data into the "data" buffer. 125 try { 126 ALut.alutLoadWAVFile(stream, format, data, size, freq, loop); 127 } catch (ALException e) { 128 log.warn("Error loading JoalAudioBuffer from stream", e); 129 return formats; 130 } 131 // OK, for now, we're only going to support 8-bit and 16-bit Mono data. 132 // I'll have to figure out later how to extend this to multiple data formats. 133 if ((format[0] != AL.AL_FORMAT_MONO8) && (format[0] != AL.AL_FORMAT_MONO16)) { 134 log.warn("Invalid Format! Failing out.{}", parseFormat(format[0])); 135 return formats; 136 } 137 formats[0] = format[0]; 138 formats[1] = freq[0]; 139 formats[2] = frameSize(format[0]); 140 return formats; 141 } 142 143 static public boolean isAudioRunning() { 144 AudioFactory af = jmri.InstanceManager.getDefault(jmri.AudioManager.class).getActiveAudioFactory(); 145 if (af == null) { 146 return false; 147 } else { 148 return ((jmri.jmrit.audio.AudioThread) af.getCommandThread()).isThreadAlive(); 149 } 150 } 151 152 public static List<ByteBuffer> getByteBufferList(InputStream stream, int max_time, int min_time) { 153 return splitInputStream(stream, max_time, min_time); 154 } 155 156 // This is here only because the AbstractAudioBuffer.getFrameSize() doesn't look 157 // at the AL versions of the format strings. And because it must be static. 158 private static int frameSize(int format) { 159 int frameSize; 160 switch (format) { 161 case AudioBuffer.FORMAT_16BIT_7DOT1: 162 frameSize = 16; 163 break; 164 case AudioBuffer.FORMAT_8BIT_7DOT1: 165 frameSize = 8; 166 break; 167 case AudioBuffer.FORMAT_16BIT_6DOT1: 168 frameSize = 14; 169 break; 170 case AudioBuffer.FORMAT_8BIT_6DOT1: 171 frameSize = 7; 172 break; 173 case AudioBuffer.FORMAT_16BIT_5DOT1: 174 frameSize = 12; 175 break; 176 case AudioBuffer.FORMAT_8BIT_5DOT1: 177 frameSize = 6; 178 break; 179 case AudioBuffer.FORMAT_16BIT_QUAD: 180 frameSize = 8; 181 break; 182 case AudioBuffer.FORMAT_8BIT_QUAD: 183 case AudioBuffer.FORMAT_16BIT_STEREO: 184 frameSize = 4; 185 break; 186 case AL.AL_FORMAT_MONO16: 187 case AL.AL_FORMAT_STEREO8: 188 frameSize = 2; 189 break; 190 case AL.AL_FORMAT_MONO8: 191 default: 192 // Note this will be wrong for all the modes we don't support. 193 frameSize = 1; 194 } 195 return frameSize; 196 } 197 198 private static String parseFormat(int fmt) { 199 switch (fmt) { 200 case AL.AL_FORMAT_MONO8: 201 return "8-bit mono"; 202 case AL.AL_FORMAT_MONO16: 203 return "16-bit mono"; 204 case AL.AL_FORMAT_STEREO8: 205 return "8-bit stereo"; 206 case AL.AL_FORMAT_STEREO16: 207 return "16-bit stereo"; 208 default: 209 return "Something Multichannel: val=" + fmt; 210 } 211 } 212 213 /** 214 * Calculates the number of bytes offset that corresponds to the given time 215 * interval, with the given data format and sample frequency. 216 * 217 * bytes = frame_size * time_ms * sample_frequency / 1000 218 * 219 * @param fmt : audio data format 220 * @param freq : sample frequency in Hz 221 * @param time_ms : time interval in milliseconds 222 * 223 * @return (int) number of bytes. 224 */ 225 private static int calcTimeIndex(int fmt, int freq, int time_ms) { 226 // freq == samples per second. time_us = microseconds to calculate. 227 // samples = time_us * freq / 1e3. 228 // This will be approximate due to integer rounding. 229 int rv = frameSize(fmt) * (time_ms * freq / 1000); 230 log.debug("calcTimeIndex: freq = {} time_us = {} rv = {}", freq, time_ms, rv); 231 return rv; 232 } 233 234 /** 235 * Looks at the (last) three samples in buf (with sample size defined by 236 * 237 * @format) and determines whether they represent a positive-going zero 238 * crossing event. 239 * 240 * Works only for AL.AL_FORMAT_MONO8 or AL.AL_FORMAT_MONO16. Returns false 241 * otherwise. 242 * 243 * @param buf : (minimum) 3-sample buffer of WAV data 244 * @param len : size of buf. Minimum 3 bytes for 8-bit, 6 bytes for 245 * 16-bit mono data. 246 * @param format : AL.format identifier. 247 * @param order : ByteOrder of data in buf 248 * 249 * @return true if a zero crossing is detected. 250 */ 251 private static Boolean isZeroCross(byte[] buf, int len, int format, ByteOrder order) { 252 switch (format) { 253 case AL.AL_FORMAT_MONO8: 254 if (len < 3) { 255 return false; 256 } else { 257 return (((0xFF & buf[len - 3]) < 128) && ((0xFF & buf[len - 2]) < 128) && ((0xFF & buf[len - 1]) >= 128)); 258 } 259 case AL.AL_FORMAT_MONO16: 260 if (len < 6) { 261 return false; 262 } 263 short[] sbuf = new short[len / 2]; 264 // Below assumes little-endian 265 ByteBuffer bb = ByteBuffer.wrap(buf); 266 bb.order(order); 267 //bb.reset(); 268 sbuf[0] = bb.getShort(); 269 sbuf[1] = bb.getShort(); 270 sbuf[2] = bb.getShort(); 271 return ((sbuf[0] < 0) && (sbuf[1] < 0) && (sbuf[2] >= 0)); 272 default: 273 return false; 274 } 275 } 276 277 /** 278 * Extract a sub-buffer of (at least) specified size, extended to nearest 279 * zero crossing, from the given ByteBuffer. 280 * 281 * Returns null if there are fewer than (min_time * sample rate) samples in 282 * "source" Returns between min_time and (max_time + samples to next zero 283 * crossing) samples if enough bytes are available. 284 * 285 * @param source : ByteBuffer of source data. 286 * @param max_time : time interval (ms) to slice the source buffer. 287 * @param min_time : minimum size (ms) of the output buffer. 288 * @param format : audio format of source data 289 * @param freq : sample frequency of source data (in Hz) 290 * 291 * @return ByteBuffer of data copied from "source". Note: source position 292 * will be incremented by the number of bytes copied. 293 */ 294 private static ByteBuffer getSubBuffer(ByteBuffer source, int max_time, int min_time, int format, int freq) { 295 int time_size = calcTimeIndex(format, freq, max_time); 296 int bufcount; 297 int frameSize = frameSize(format); 298 ByteBuffer retbuf; 299 byte[] retbytes = new byte[source.remaining() + 1]; 300 301 log.debug("Creating sub buffer. interval = {} freq = {} time_size = {} sample size= {}", max_time, freq, time_size, frameSize(format)); 302 log.debug("\tBefore: Source = {}", source); 303 304 if (time_size < source.remaining()) { 305 log.debug("Extracting slice. Remaining = {}", source.remaining()); 306 // Enough bytes remaining to pull out a chunk. 307 // First, copy over time_size bytes 308 source.get(retbytes, 0, time_size); 309 bufcount = time_size; 310 // Now, find the zero crossing and add bytes up to it 311 // Loop until we run out of samples or find a zero crossing 312 while ((!isZeroCross(Arrays.copyOfRange(retbytes, bufcount - 6, bufcount), 6, format, source.order())) && (source.remaining() >= frameSize)) { 313 source.get(retbytes, bufcount, frameSize); 314 bufcount += frameSize; 315 } 316 } else { 317 log.debug("Not enough bytes. Copying remaining bytes = {}", source.remaining()); 318 // Not enough bytes remaning to pull out a chunk. Just copy/return the rest of the buffer. 319 bufcount = source.remaining(); 320 source.get(retbytes, 0, bufcount); 321 } 322 // Now create the ByteBuffer for return... IF there's enough bytes to mess with. If the size of the array 323 // is smaller than the specified minimum time interval, return null. 324 if (bufcount > calcTimeIndex(format, freq, min_time)) { 325 retbuf = ByteBuffer.allocate(bufcount); 326 retbuf.order(source.order()); // set new buffer's byte order to match source buffer. 327 retbuf.put(retbytes, 0, bufcount); 328 log.debug("\tAfter: source= {}bufcount={} retbuf= {}", source, bufcount, retbuf); 329 } else { 330 log.debug("Remaining bytes less than minimum time interval. Discarding."); 331 return null; 332 } 333 return retbuf; 334 } 335 336 private static final Logger log = LoggerFactory.getLogger(AudioUtil.class); 337 338}