001package jmri.jmrit.sound; 002 003import java.io.ByteArrayOutputStream; 004import java.io.File; 005import javax.sound.sampled.AudioFileFormat; 006import javax.sound.sampled.AudioFormat; 007import javax.sound.sampled.AudioInputStream; 008import javax.sound.sampled.AudioSystem; 009import javax.sound.sampled.DataLine; 010import javax.sound.sampled.LineUnavailableException; 011import javax.sound.sampled.SourceDataLine; 012import org.slf4j.Logger; 013import org.slf4j.LoggerFactory; 014 015/** 016 * Provide simple way to load and play Java 2 sounds in JMRI. 017 * <p> 018 * This is placed in the jmri.jmrit.sound package by process of elimination. It 019 * doesn't belong in the base jmri package, as it's not a basic interface. Nor 020 * is it a specific implementation of a basic interface, which would put it in 021 * jmri.jmrix. 022 * 023 * 024 * @author Bob Jacobsen Copyright (C) 2004, 2006 025 */ 026public class SoundUtil { 027 028 /** 029 * Play a sound from a buffer. 030 * 031 * @param wavData the .wav byte array. 032 */ 033 public static void playSoundBuffer(byte[] wavData) { 034 035 // get characteristics from buffer 036 jmri.jmrit.sound.WavBuffer wb = new jmri.jmrit.sound.WavBuffer(wavData); 037 float sampleRate = wb.getSampleRate(); 038 int sampleSizeInBits = wb.getSampleSizeInBits(); 039 int channels = wb.getChannels(); 040 boolean signed = wb.getSigned(); 041 boolean bigEndian = wb.getBigEndian(); 042 043 AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian); 044 SourceDataLine line; 045 DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); // format is an AudioFormat object 046 if (!AudioSystem.isLineSupported(info)) { 047 // Handle the error. 048 log.warn("line not supported: {}", info); 049 return; 050 } 051 // Obtain and open the line. 052 try { 053 line = (SourceDataLine) AudioSystem.getLine(info); 054 line.open(format); 055 } catch (LineUnavailableException ex) { 056 // Handle the error. 057 log.error("error opening line", ex); 058 return; 059 } 060 line.start(); 061 // write(byte[] b, int off, int len) 062 line.write(wavData, 0, wavData.length); 063 064 } 065 066 private static final int BUFFER_LENGTH = 4096; 067 068 public static byte[] bufferFromFile(String filename, 069 float sampleRate, int sampleSizeInBits, int channels, 070 boolean signed, boolean bigEndian) throws java.io.IOException, javax.sound.sampled.UnsupportedAudioFileException { 071 072 File sourceFile = new File(filename); 073 074 // Get the type of the source file. We need this information 075 // later to write the audio data to a file of the same type. 076 AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(sourceFile); 077 //AudioFileFormat.Type targetFileType = fileFormat.getType(); 078 AudioFormat audioFormat = fileFormat.getFormat(); 079 080 // get desired output format 081 AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian); 082 083 // get a conversion stream 084 // (Errors not checked yet) 085 AudioInputStream stream = AudioSystem.getAudioInputStream(sourceFile); 086 AudioInputStream inputAIS = AudioSystem.getAudioInputStream(format, stream); 087 088 // Read the audio data into a memory buffer. 089 ByteArrayOutputStream baos = new ByteArrayOutputStream(); 090 int nBufferSize = BUFFER_LENGTH * audioFormat.getFrameSize(); 091 byte[] abBuffer = new byte[nBufferSize]; 092 while (true) { 093 if (log.isDebugEnabled()) { 094 log.debug("trying to read (bytes): {}", abBuffer.length); 095 } 096 int nBytesRead = inputAIS.read(abBuffer); 097 098 if (log.isDebugEnabled()) { 099 log.debug("read (bytes): {}", nBytesRead); 100 } 101 if (nBytesRead == -1) { 102 break; 103 } 104 baos.write(abBuffer, 0, nBytesRead); 105 } 106 107 // Create byte array 108 byte[] abAudioData = baos.toByteArray(); 109 return abAudioData; 110 } 111 112 private final static Logger log = LoggerFactory.getLogger(SoundUtil.class); 113}