Java Examples for javax.sound.sampled.SourceDataLine

The following java examples will help you to understand the usage of javax.sound.sampled.SourceDataLine. These source code samples are taken from different open source projects.

Example 1
Project: Classeur-master  File: SpeakerThread.java View source code
private javax.sound.sampled.SourceDataLine getSpeaker(AudioFormat format, int bufsize) throws LineUnavailableException {
    javax.sound.sampled.DataLine.Info info;
    javax.sound.sampled.Mixer.Info[] infos;
    javax.sound.sampled.Mixer mixer;
    javax.sound.sampled.SourceDataLine speaker;
    infos = AudioSystem.getMixerInfo();
    info = new javax.sound.sampled.DataLine.Info(javax.sound.sampled.SourceDataLine.class, format);
    for (int i = 0; i < infos.length; i++) {
        mixer = AudioSystem.getMixer(infos[i]);
        if (!(mixer instanceof uk.co.mmscomputing.sound.provider.Mixer)) {
            //        System.err.println("SPEAKER : "+mixer.getMixerInfo().getName());
            try {
                speaker = (javax.sound.sampled.SourceDataLine) mixer.getLine(info);
                speaker.open(format, bufsize);
                return speaker;
            } catch (IllegalArgumentException iae) {
            } catch (LineUnavailableException lue) {
            }
        }
    }
    throw new LineUnavailableException(getClass().getName() + ".run() : \n\tNo microphone available.");
}
Example 2
Project: RSSOwl-master  File: AudioUtils.java View source code
private static void doPlay(String file) throws javax.sound.sampled.UnsupportedAudioFileException, IOException, javax.sound.sampled.LineUnavailableException {
    /* Open the Input-Stream to the Audio File */
    javax.sound.sampled.AudioInputStream inS = null;
    try {
        inS = javax.sound.sampled.AudioSystem.getAudioInputStream(new File(file));
        /* Retrieve Format to actually play the sound */
        javax.sound.sampled.AudioFormat audioFormat = inS.getFormat();
        /* Open a SourceDataLine for Playback */
        javax.sound.sampled.DataLine.Info info = new javax.sound.sampled.DataLine.Info(javax.sound.sampled.SourceDataLine.class, audioFormat);
        javax.sound.sampled.SourceDataLine line = (javax.sound.sampled.SourceDataLine) javax.sound.sampled.AudioSystem.getLine(info);
        line.open(audioFormat);
        /* Activate the line */
        line.start();
        int read = 0;
        byte[] buf = new byte[1024];
        while ((read = inS.read(buf, 0, buf.length)) != -1 && !Controller.getDefault().isShuttingDown()) line.write(buf, 0, read);
        line.drain();
        line.close();
    } finally {
        if (inS != null)
            inS.close();
    }
}
Example 3
Project: NavalBattle-master  File: Music.java View source code
public void playLoop() {
    while (!this.stop) {
        try {
            this.stream = AudioSystem.getAudioInputStream(this.url);
            this.decodedStream = null;
            if (this.stream != null) {
                this.format = this.stream.getFormat();
                this.decodedFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, this.format.getSampleRate(), 16, this.format.getChannels(), this.format.getChannels() * 2, this.format.getSampleRate(), false);
                this.decodedStream = AudioSystem.getAudioInputStream(this.decodedFormat, this.stream);
            }
        } catch (Exception e) {
        }
        SourceDataLine line = null;
        try {
            line = this.getLine(this.decodedFormat);
        } catch (LineUnavailableException lue) {
        }
        if (line != null) {
            try {
                byte[] data = new byte[4096];
                line.start();
                int nBytesRead = 0;
                while (nBytesRead != -1) {
                    nBytesRead = this.decodedStream.read(data, 0, data.length);
                    if (nBytesRead != -1) {
                        line.write(data, 0, nBytesRead);
                    }
                    if (this.stop) {
                        break;
                    }
                }
                line.drain();
                line.stop();
                line.close();
                this.decodedStream.close();
                this.stream.close();
            } catch (IOException io) {
            }
        }
    }
}
Example 4
Project: pbi-master  File: Speaker.java View source code
private static void tone(int hz, int msecs, double vol) throws LineUnavailableException {
    byte[] buf = new byte[1];
    AudioFormat af = new // sampleRate
    AudioFormat(// sampleRate
    SAMPLE_RATE, // sampleSizeInBits
    8, // channels
    1, // signed
    true, // bigEndian
    false);
    SourceDataLine sdl = AudioSystem.getSourceDataLine(af);
    sdl.open(af);
    sdl.start();
    for (int i = 0; i < msecs * 8; i++) {
        double angle = i / (SAMPLE_RATE / hz) * 2.0 * Math.PI;
        buf[0] = (byte) (Math.sin(angle) * 127.0 * vol);
        sdl.write(buf, 0, 1);
    }
    sdl.drain();
    sdl.stop();
    sdl.close();
}
Example 5
Project: vsminecraft-master  File: VoiceOutput.java View source code
@Override
public void run() {
    try {
        sourceLine = ((SourceDataLine) AudioSystem.getLine(speaker));
        sourceLine.open(voiceClient.format, 2200);
        sourceLine.start();
        while (voiceClient.running) {
            try {
                short byteCount = voiceClient.input.readShort();
                byte[] audioData = new byte[byteCount];
                voiceClient.input.readFully(audioData);
                sourceLine.write(audioData, 0, audioData.length);
            } catch (Exception e) {
            }
        }
    } catch (Exception e) {
        Mekanism.logger.error("VoiceServer: Error while running client output thread.");
        e.printStackTrace();
    }
}
Example 6
Project: freedomotic-master  File: AePlayWave.java View source code
public void run() {
    File soundFile = new File(filename);
    if (!soundFile.exists()) {
        System.err.println("Wave file not found: " + filename);
        return;
    }
    AudioInputStream audioInputStream = null;
    try {
        audioInputStream = AudioSystem.getAudioInputStream(soundFile);
    } catch (UnsupportedAudioFileException e1) {
        e1.printStackTrace();
        return;
    } catch (IOException e1) {
        e1.printStackTrace();
        return;
    }
    AudioFormat format = audioInputStream.getFormat();
    SourceDataLine auline = null;
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    try {
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
    } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
    } catch (Exception e) {
        e.printStackTrace();
        return;
    }
    if (auline.isControlSupported(FloatControl.Type.PAN)) {
        FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN);
        if (curPosition == Position.RIGHT) {
            pan.setValue(1.0f);
        } else if (curPosition == Position.LEFT) {
            pan.setValue(-1.0f);
        }
    }
    auline.start();
    int nBytesRead = 0;
    byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
    try {
        while (nBytesRead != -1) {
            nBytesRead = audioInputStream.read(abData, 0, abData.length);
            if (nBytesRead >= 0) {
                auline.write(abData, 0, nBytesRead);
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
        return;
    } finally {
        auline.drain();
        auline.close();
    }
}
Example 7
Project: javacuriosities-master  File: AudioReceiver.java View source code
private static void reproduce(byte soundbytes[]) {
    try {
        DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, AudioFormatHelper.getAudioFormat());
        // El source data line se usa para escribir datos en el
        SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
        sourceDataLine.open(AudioFormatHelper.getAudioFormat());
        sourceDataLine.start();
        sourceDataLine.write(soundbytes, 0, soundbytes.length);
        sourceDataLine.drain();
        sourceDataLine.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}
Example 8
Project: jcodec-master  File: AudioTest.java View source code
public static void main1(String[] args) throws IOException, InterruptedException, LineUnavailableException {
    // AudioSource tone = new WavAudioSource(new
    // RandomAccessFileInputStream(new File(args[0])));
    AudioSource tone = new ToneAudioSource();
    AudioInfo audioInfo = tone.getAudioInfo();
    AudioFormat af = audioInfo.getFormat();
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, tone.getAudioInfo().getFormat());
    if (!AudioSystem.isLineSupported(info)) {
        throw new RuntimeException("Line matching " + info + " not supported.");
    }
    Clip clip = AudioSystem.getClip();
    ByteBuffer bb = ByteBuffer.allocate(af.getFrameSize() * 96000);
    tone.getFrame(bb);
    byte[] array = NIOUtils.toArray(bb);
    clip.open(af, array, 0, array.length);
    clip.start();
    clip.drain();
    clip.close();
}
Example 9
Project: jpc-master  File: AudioLayer.java View source code
public static boolean open(int bufferSize, int freq) {
    AudioFormat format = new AudioFormat(freq, 16, 2, true, false);
    try {
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, bufferSize);
        line.start();
        audioThreadExit = false;
        audioThread = new Thread() {

            public void run() {
                while (!audioThreadExit) {
                    boolean result;
                    synchronized (Mixer.audioMutex) {
                        result = Mixer.MIXER_CallBack(audioBuffer, audioBuffer.length);
                    }
                    if (result)
                        line.write(audioBuffer, 0, audioBuffer.length);
                    else {
                        try {
                            Thread.sleep(20);
                        } catch (Exception e) {
                        }
                    }
                }
            }
        };
        // this needs to be smaller than buffer size passed into open other line.write will block
        audioBuffer = new byte[512];
        audioThread.start();
        return true;
    } catch (Exception e) {
        e.printStackTrace();
        return false;
    }
}
Example 10
Project: Konsolenradio-master  File: PlayerTest.java View source code
private void rawplay(AudioFormat targetFormat, AudioInputStream din) throws IOException, LineUnavailableException {
    byte[] data = new byte[4096];
    SourceDataLine line = getLine(targetFormat);
    if (line != null) {
        // Start
        line.start();
        int nBytesRead = 0, nBytesWritten = 0;
        while (nBytesRead != -1) {
            nBytesRead = din.read(data, 0, data.length);
            if (nBytesRead != -1)
                nBytesWritten = line.write(data, 0, nBytesRead);
        }
        // Stop
        line.drain();
        line.stop();
        line.close();
        din.close();
    }
}
Example 11
Project: loli.io-master  File: MP3Player.java View source code
private void rawplay(AudioFormat targetFormat, AudioInputStream din) throws IOException, LineUnavailableException {
    byte[] data = new byte[4096];
    SourceDataLine line = getLine(targetFormat);
    if (line != null) {
        // Start
        line.start();
        @SuppressWarnings("unused") int nBytesRead = 0, nBytesWritten = 0;
        while (nBytesRead != -1) {
            nBytesRead = din.read(data, 0, data.length);
            if (nBytesRead != -1)
                nBytesWritten = line.write(data, 0, nBytesRead);
        }
        // Stop
        line.drain();
        line.stop();
        line.close();
        din.close();
    }
}
Example 12
Project: Mekanism-master  File: VoiceOutput.java View source code
@Override
public void run() {
    try {
        sourceLine = ((SourceDataLine) AudioSystem.getLine(speaker));
        sourceLine.open(voiceClient.format, 2200);
        sourceLine.start();
        // less allocation/gc (if done outside the loop)
        byte[] audioData = new byte[4096];
        int byteCount;
        int length;
        while (voiceClient.running) {
            try {
                //Why would we only read signed shorts? negative amount of waiting data doesn't make sense anyway :D
                byteCount = voiceClient.input.readUnsignedShort();
                while (byteCount > 0 && voiceClient.running) {
                    length = audioData.length;
                    if (length > byteCount)
                        length = byteCount;
                    // That one returns the actual read amount of data (we can begin transferring even if input is waiting/incomplete)
                    length = voiceClient.input.read(audioData, 0, length);
                    if (length < 0)
                        throw new EOFException();
                    sourceLine.write(audioData, 0, length);
                    byteCount -= length;
                }
            } catch (EOFException eof) {
                Mekanism.logger.error("VoiceServer: Unexpected input EOF Exception occured.");
                break;
            } catch (Exception e) {
            }
        }
    } catch (Exception e) {
        Mekanism.logger.error("VoiceServer: Error while running client output thread.");
        e.printStackTrace();
    }
}
Example 13
Project: myLib-master  File: AdpcmPlayTest3D.java View source code
//	@Test
public void test() throws Exception {
    SourceDataLine audioLine = null;
    // 44.1 kHz
    int samplingRate = 44100;
    // 16bit
    int bit = 16;
    AudioFormat format = new AudioFormat((float) samplingRate, bit, 1, true, false);
    // only for monoral here...
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    audioLine = (SourceDataLine) AudioSystem.getLine(info);
    audioLine.open(format);
    audioLine.start();
    logger.info("start test");
    // now, try to get data. decode adpcm, play.
    IFileReadChannel source = FileReadChannel.openFileReadChannel("http://49.212.39.17/gc-25-1-3.h264_adpcmimawav5k.mkv");
    IContainer container = null;
    MkvTagReader reader = new MkvTagReader();
    short fourth = 0;
    short third = 0;
    short second = 0;
    while ((container = reader.read(source)) != null) {
        if (container instanceof MkvBlockTag) {
            MkvBlockTag blockTag = (MkvBlockTag) container;
            IFrame frame = blockTag.getFrame();
            if (frame instanceof AdpcmImaWavFrame) {
                AdpcmImaWavFrame aFrame = (AdpcmImaWavFrame) frame;
                // frame is available.
                IReadChannel frameData = new ByteReadChannel(aFrame.getData());
                BitLoader loader = new BitLoader(frameData);
                // treat as little endian.
                loader.setLittleEndianFlg(true);
                Bit16 predictorData = new Bit16();
                Bit8 indexData = new Bit8();
                Bit8 reservedData = new Bit8();
                loader.load(predictorData, indexData, reservedData);
                int predictor = (short) predictorData.get();
                int index = indexData.get();
                ;
                int step = imaStepTable[index];
                // put the data on the buffer.
                ByteBuffer buffer = ByteBuffer.allocate(aFrame.getSampleNum() * 2);
                buffer.order(ByteOrder.LITTLE_ENDIAN);
                buffer.putShort((short) predictor);
                Bit4[] nibbleList = new Bit4[aFrame.getSampleNum() - 1];
                for (int i = 0; i < nibbleList.length; i++) {
                    nibbleList[i] = new Bit4();
                }
                loader.load(nibbleList);
                for (Bit4 nibble : nibbleList) {
                    index = nextIndex(index, nibble.get());
                    predictor = nextPredictor(index, nibble.get(), predictor, step);
                    step = imaStepTable[index];
                    buffer.putShort((short) predictor);
                }
                buffer.flip();
                ByteBuffer completeBuffer = ByteBuffer.allocate(buffer.remaining() * 8);
                completeBuffer.order(ByteOrder.LITTLE_ENDIAN);
                while (buffer.remaining() > 0) {
                    short first = buffer.getShort();
                    double c0, c1, c2, d0, d1, e0;
                    c0 = (third - fourth) / 8D;
                    c1 = (second - third) / 8D;
                    c2 = (first - second) / 8D;
                    d0 = (c1 - c0) / 16D;
                    d1 = (c2 - c1) / 16D;
                    e0 = (d1 - d0) / 24D;
                    completeBuffer.putShort((short) (third + (0) * (c0 + (0 - 8) * (d0 + e0 * (0 - 16)))));
                    completeBuffer.putShort((short) (third + (1) * (c0 + (1 - 8) * (d0 + e0 * (1 - 16)))));
                    completeBuffer.putShort((short) (third + (2) * (c0 + (2 - 8) * (d0 + e0 * (2 - 16)))));
                    completeBuffer.putShort((short) (third + (3) * (c0 + (3 - 8) * (d0 + e0 * (3 - 16)))));
                    completeBuffer.putShort((short) (third + (4) * (c0 + (4 - 8) * (d0 + e0 * (4 - 16)))));
                    completeBuffer.putShort((short) (third + (5) * (c0 + (5 - 8) * (d0 + e0 * (5 - 16)))));
                    completeBuffer.putShort((short) (third + (6) * (c0 + (6 - 8) * (d0 + e0 * (6 - 16)))));
                    completeBuffer.putShort((short) (third + (7) * (c0 + (7 - 8) * (d0 + e0 * (7 - 16)))));
                    fourth = third;
                    third = second;
                    second = first;
                }
                completeBuffer.flip();
                audioLine.write(completeBuffer.array(), 0, completeBuffer.remaining());
            }
        }
    }
    audioLine.drain();
    audioLine.close();
    audioLine = null;
}
Example 14
Project: RemoteDesktopSharing-master  File: VoiceChat.java View source code
public void startCapture() {
    try {
        sock = new Socket(ipAddress, 50000);
        out = new BufferedOutputStream(sock.getOutputStream());
        in = new BufferedInputStream(sock.getInputStream());
        Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
        System.out.println("Available mixers:");
        for (int cnt = 0; cnt < mixerInfo.length; cnt++) {
            System.out.println(mixerInfo[cnt].getName());
        }
        audioFormat = getAudioFormat();
        DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
        for (int cnt = 0; cnt < mixerInfo.length; cnt++) {
            try {
                Mixer mixer = AudioSystem.getMixer(mixerInfo[cnt]);
                targetDataLine = (TargetDataLine) mixer.getLine(dataLineInfo);
                System.err.println(mixerInfo[cnt].getName());
                break;
            } catch (Exception e) {
                continue;
            }
        }
        if (targetDataLine == null) {
        }
        targetDataLine.open(audioFormat);
        targetDataLine.start();
        Thread captureThread = new CaptureThread();
        captureThread.setName("Capture Thread Tx");
        captureThread.start();
        DataLine.Info dataLineInfo1 = new DataLine.Info(SourceDataLine.class, audioFormat);
        sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo1);
        sourceDataLine.open(audioFormat);
        sourceDataLine.start();
        Thread playThread = new PlayThread();
        playThread.setName("Play Thread TX");
        playThread.start();
    } catch (Exception e) {
        System.out.println(e);
    }
}
Example 15
Project: SonicFieldRepo-master  File: SF_StereoMonitor.java View source code
@Override
public Object Interpret(Object input) throws SFPL_RuntimeException {
    List<Object> lin = Caster.makeBunch(input);
    //$NON-NLS-1$
    if (lin.size() != 2)
        throw new SFPL_RuntimeException(Messages.getString("SF_StereoMonitor.1"));
    try {
        SFSignal dataIn1a = Caster.makeSFSignal(lin.get(0));
        SFSignal dataIn2a = Caster.makeSFSignal(lin.get(1));
        SFSignal dataIn1 = SF_Normalise.doNormalisation(dataIn1a);
        SFSignal dataIn2 = SF_Normalise.doNormalisation(dataIn2a);
        AudioFormat af = new AudioFormat((float) SFConstants.SAMPLE_RATE, 16, 2, true, true);
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, af);
        SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info);
        source.open(af);
        source.start();
        byte[] buf = new byte[dataIn1.getLength() * 4];
        for (int i = 0; i < buf.length; ++i) {
            short sample = 0;
            if (i / 4 < dataIn1.getLength()) {
                sample = (short) (dataIn1.getSample(i / 4) * 32767.0);
            }
            buf[i] = (byte) (sample >> 8);
            buf[++i] = (byte) (sample & 0xFF);
            sample = 0;
            if (i / 4 < dataIn2.getLength()) {
                sample = (short) (dataIn2.getSample(i / 4) * 32767.0);
            }
            buf[++i] = (byte) (sample >> 8);
            buf[++i] = (byte) (sample & 0xFF);
        }
        source.write(buf, 0, buf.length);
        source.drain();
        source.stop();
        source.close();
        List<SFSignal> ret = new ArrayList<>();
        ret.add(dataIn1a);
        ret.add(dataIn2a);
        return ret;
    } catch (Exception e) {
        throw new SFPL_RuntimeException(Messages.getString("SF_Monitor.1"), e);
    }
}
Example 16
Project: soundlibs-master  File: PlayerTest.java View source code
private void rawplay(AudioFormat targetFormat, AudioInputStream din) throws IOException, LineUnavailableException {
    byte[] data = new byte[4096];
    SourceDataLine line = getLine(targetFormat);
    if (line != null) {
        // Start
        line.start();
        int nBytesRead = 0, nBytesWritten = 0;
        while (nBytesRead != -1) {
            nBytesRead = din.read(data, 0, data.length);
            if (nBytesRead != -1)
                nBytesWritten = line.write(data, 0, nBytesRead);
        }
        // Stop
        line.drain();
        line.stop();
        line.close();
        din.close();
    }
}
Example 17
Project: spacegraph1-master  File: JavaSoundAudio.java View source code
public void start() throws IOException {
    audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fSampleRate, 16, 2, 4, fSampleRate, false);
    oscillator = new Oscillator(nWaveformType, fSignalFrequency, fAmplitude, audioFormat, AudioSystem.NOT_SPECIFIED);
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
    try {
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(audioFormat);
    } catch (Exception e) {
        e.printStackTrace();
    }
    new Thread(this).start();
}
Example 18
Project: Sphinx-master  File: AudioPlayer.java View source code
/* Plays the AudioData in a separate thread. */
@Override
public void run() {
    while (true) {
        try {
            synchronized (audio) {
                audio.wait();
                AudioFormat format = audio.getAudioFormat();
                short[] data = audio.getAudioData();
                int start = Math.max(0, selectionStart);
                int end = selectionEnd;
                if (end == -1) {
                    end = data.length;
                }
                DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
                line = (SourceDataLine) AudioSystem.getLine(info);
                line.open(format);
                line.start();
                byte[] frame = new byte[2];
                for (int i = start; i < end && i < data.length; i++) {
                    Utils.toBytes(data[i], frame, false);
                    line.write(frame, 0, frame.length);
                }
                line.drain();
                line.close();
                line = null;
            }
        } catch (Exception e) {
            e.printStackTrace();
            break;
        }
    }
}
Example 19
Project: sphinx4-master  File: AudioPlayer.java View source code
/* Plays the AudioData in a separate thread. */
@Override
public void run() {
    while (true) {
        try {
            synchronized (audio) {
                audio.wait();
                AudioFormat format = audio.getAudioFormat();
                short[] data = audio.getAudioData();
                int start = Math.max(0, selectionStart);
                int end = selectionEnd;
                if (end == -1) {
                    end = data.length;
                }
                DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
                line = (SourceDataLine) AudioSystem.getLine(info);
                line.open(format);
                line.start();
                byte[] frame = new byte[2];
                for (int i = start; i < end && i < data.length; i++) {
                    Utils.toBytes(data[i], frame, false);
                    line.write(frame, 0, frame.length);
                }
                line.drain();
                line.close();
                line = null;
            }
        } catch (Exception e) {
            e.printStackTrace();
            break;
        }
    }
}
Example 20
Project: voipcall-master  File: AudioDeviceScanner.java View source code
private Set<Speaker> discoverSpeakers() {
    Set<Speaker> speakers = new HashSet<>();
    for (Mixer.Info mixerinfo : AudioSystem.getMixerInfo()) {
        if (DEBUG)
            System.out.println("mixerinfo: " + mixerinfo);
        Mixer mixer = AudioSystem.getMixer(mixerinfo);
        if (DEBUG)
            System.out.println("mixer:     " + mixer);
        if (DEBUG)
            System.out.println("mixerinfo: " + mixer.getLineInfo());
        for (Line.Info lineinfo : mixer.getSourceLineInfo()) {
            try {
                Line line;
                line = mixer.getLine(lineinfo);
                if (line instanceof SourceDataLine) {
                    if (DEBUG)
                        System.out.println("    lineinfo:   " + lineinfo);
                    if (DEBUG)
                        System.out.println("    line:       " + line);
                    if (DEBUG)
                        System.out.println("    lineinfo:   " + line.getLineInfo());
                    if (mixer.isLineSupported(lineinfo)) {
                        speakers.add(new Speaker(new Speaker.Info(mixerinfo, mixer, lineinfo, (SourceDataLine) line)));
                    } else {
                        if (DEBUG)
                            System.out.println("    NOT SUPPORTED!");
                    }
                }
            } catch (LineUnavailableException e) {
                e.printStackTrace();
            }
        }
    }
    return speakers;
}
Example 21
Project: Bonsai-Game-Library-master  File: GameSound.java View source code
public final boolean init() {
    final AudioFormat[] formats = new AudioFormat[] { new AudioFormat(44100.0f, 16, 2, true, false), new AudioFormat(22050.0f, 16, 2, true, false), new AudioFormat(11050.0f, 16, 2, true, false) };
    for (AudioFormat format : formats) {
        try {
            DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
            SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
            line.open(format);
            line.start();
            line.close();
            return true;
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    return false;
}
Example 22
Project: ClothoBiofabEdition-master  File: AePlayWave.java View source code
@Override
public void run() {
    FileObject fo = FileUtil.getConfigFile("org/clothocad/tool/bulltrowell/bull.wav");
    InputStream istream = null;
    try {
        istream = fo.getInputStream();
    } catch (FileNotFoundException ex) {
        ex.printStackTrace();
        return;
    }
    AudioInputStream audioInputStream = null;
    try {
        audioInputStream = AudioSystem.getAudioInputStream(istream);
    } catch (UnsupportedAudioFileException e1) {
        e1.printStackTrace();
        return;
    } catch (IOException e1) {
        e1.printStackTrace();
        return;
    }
    AudioFormat format = audioInputStream.getFormat();
    SourceDataLine auline = null;
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    try {
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
    } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
    } catch (Exception e) {
        e.printStackTrace();
        return;
    }
    if (auline.isControlSupported(FloatControl.Type.PAN)) {
        FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN);
        if (curPosition == Position.RIGHT)
            pan.setValue(1.0f);
        else if (curPosition == Position.LEFT)
            pan.setValue(-1.0f);
    }
    auline.start();
    int nBytesRead = 0;
    byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
    try {
        while (nBytesRead != -1) {
            nBytesRead = audioInputStream.read(abData, 0, abData.length);
            if (nBytesRead >= 0)
                auline.write(abData, 0, nBytesRead);
        }
    } catch (IOException e) {
        e.printStackTrace();
        return;
    } finally {
        auline.drain();
        auline.close();
    }
}
Example 23
Project: Izou-master  File: MixerAspect.java View source code
/**
     * creates the appropriate IzouSoundLine if the request originates from an AddOn.
     * @param line the line
     * @return an IzouSoundLine if an addon requested the line
     */
static Line getAndRegisterLine(Line line) {
    AddOnModel addOnModel;
    Optional<AddOnModel> addOnModelForClassLoader = main.getSecurityManager().getAddOnModelForClassLoader();
    if (!addOnModelForClassLoader.isPresent()) {
        logger.debug("the SoundManager will not manage this line, obtained by system");
        return line;
    } else {
        addOnModel = addOnModelForClassLoader.get();
    }
    IzouSoundLineBaseClass izouSoundLine;
    if (line instanceof SourceDataLine) {
        if (line instanceof Clip) {
            izouSoundLine = new IzouSoundLineClipAndSDLine((Clip) line, (SourceDataLine) line, main, false, addOnModel);
        } else {
            izouSoundLine = new IzouSoundSourceDataLine((SourceDataLine) line, main, false, addOnModel);
        }
    } else if (line instanceof Clip) {
        izouSoundLine = new IzouSoundLineClip((Clip) line, main, false, addOnModel);
    } else if (line instanceof DataLine) {
        izouSoundLine = new IzouSoundDataLine((DataLine) line, main, false, addOnModel);
    } else {
        izouSoundLine = new IzouSoundLineBaseClass(line, main, false, addOnModel);
    }
    main.getSoundManager().addIzouSoundLine(addOnModel, izouSoundLine);
    return izouSoundLine;
}
Example 24
Project: JaC64-master  File: AudioDriverSE.java View source code
public void init(int sampleRate, int bufferSize) {
    //  Allocate Audio resources
    AudioFormat af = new AudioFormat(sampleRate, 16, 1, true, false);
    DataLine.Info dli = new DataLine.Info(SourceDataLine.class, af, bufferSize);
    try {
        dataLine = (SourceDataLine) AudioSystem.getLine(dli);
        if (dataLine == null)
            System.out.println("DataLine: not existing...");
        else {
            System.out.println("DataLine allocated: " + dataLine);
            dataLine.open(dataLine.getFormat(), bufferSize);
            volume = (FloatControl) dataLine.getControl(FloatControl.Type.MASTER_GAIN);
            setMasterVolume(100);
            // Startup the dataline
            dataLine.start();
        }
    } catch (Exception e) {
        System.out.println("Problem while getting data line ");
        e.printStackTrace();
        dataLine = null;
    }
}
Example 25
Project: jucy-master  File: AePlayWave.java View source code
public void run() {
    Bundle bundle = Platform.getBundle(Application.PLUGIN_ID);
    Path path = new Path(filename);
    URL url = FileLocator.find(bundle, path, Collections.EMPTY_MAP);
    AudioInputStream audioInputStream = null;
    SourceDataLine auline = null;
    try {
        audioInputStream = AudioSystem.getAudioInputStream(new BufferedInputStream(url.openStream()));
        AudioFormat format = audioInputStream.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        if (!AudioSystem.isLineSupported(info)) {
            return;
        }
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
        if (auline.isControlSupported(FloatControl.Type.PAN)) {
            FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN);
            if (curPosition == Position.RIGHT) {
                pan.setValue(1.0f);
            } else if (curPosition == Position.LEFT) {
                pan.setValue(-1.0f);
            }
        }
        auline.start();
        int nBytesRead = 0;
        byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
        while (nBytesRead != -1) {
            nBytesRead = audioInputStream.read(abData, 0, abData.length);
            if (nBytesRead >= 0) {
                auline.write(abData, 0, nBytesRead);
            }
        }
    } catch (IOException e) {
        logger.warn(e, e);
        return;
    } catch (UnsupportedAudioFileException e) {
        logger.warn(e, e);
    } catch (LineUnavailableException e) {
        logger.warn(e, e);
    } finally {
        if (auline != null) {
            auline.drain();
            auline.close();
        }
    }
}
Example 26
Project: marketcetera-master  File: PlayWave.java View source code
@Override
public void run() {
    //$NON-NLS-1$
    SLF4JLoggerProxy.debug(this, "Playing audio file: \"{0}\"", mFilename);
    File soundFile = new File(mFilename);
    if (!soundFile.exists()) {
        Messages.AUDIO_CANNOT_FIND_FILE.error(this, mFilename);
        return;
    }
    AudioInputStream audioInputStream = null;
    try {
        audioInputStream = AudioSystem.getAudioInputStream(soundFile);
    } catch (UnsupportedAudioFileException e) {
        SLF4JLoggerProxy.warn(this, e);
        return;
    } catch (IOException e) {
        SLF4JLoggerProxy.warn(this, e);
        return;
    }
    AudioFormat format = audioInputStream.getFormat();
    SourceDataLine auline = null;
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    try {
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
    } catch (LineUnavailableException e) {
        SLF4JLoggerProxy.warn(this, e);
        return;
    }
    auline.start();
    int nBytesRead = 0;
    byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
    try {
        while (nBytesRead != -1) {
            nBytesRead = audioInputStream.read(abData, 0, abData.length);
            if (nBytesRead >= 0)
                auline.write(abData, 0, nBytesRead);
        }
    } catch (IOException e) {
        SLF4JLoggerProxy.warn(this, e);
        return;
    } finally {
        auline.drain();
        auline.close();
    }
}
Example 27
Project: NearInfinity-master  File: AudioPlayer.java View source code
/**
   * Starts playback of audio data associated with the specified audio buffer.
   * @param audioBuffer AudioBuffer object containing audio data.
   * @throws Exception On error
   */
public void play(AudioBuffer audioBuffer) throws Exception {
    if (audioBuffer == null || audioBuffer.getAudioData() == null)
        return;
    setPlaying(true);
    setStopped(false);
    AudioInputStream ais = null;
    try {
        ais = AudioSystem.getAudioInputStream(new ByteArrayInputStream(audioBuffer.getAudioData()));
    } catch (UnsupportedAudioFileException e) {
        throw new Exception("Unsupported audio format");
    }
    if (dataLine == null || !ais.getFormat().matches(audioFormat)) {
        audioFormat = ais.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
        if (!AudioSystem.isLineSupported(info)) {
            throw new Exception("Unsupported audio format");
        }
        dataLine = (SourceDataLine) AudioSystem.getLine(info);
        dataLine.open(ais.getFormat(), 16384);
    }
    dataLine.start();
    while (isPlaying()) {
        int numBytesRead = ais.read(buffer, 0, buffer.length);
        if (numBytesRead < 0)
            break;
        dataLine.write(buffer, 0, numBytesRead);
    }
    ais.close();
    if (!isPlaying()) {
        dataLine.drain();
    }
    setStopped(true);
}
Example 28
Project: openjdk-master  File: PhantomMixers.java View source code
public static void main(String args[]) throws Exception {
    int SDLformats = 0;
    int TDLformats = 0;
    Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
    for (int i = 0; i < mixerInfo.length; i++) {
        Mixer.Info thisMixerInfo = mixerInfo[i];
        System.out.println("Mixer #" + i + ": " + thisMixerInfo.getName() + ": " + thisMixerInfo.getDescription());
        Mixer mixer = AudioSystem.getMixer(thisMixerInfo);
        Line.Info[] srcLineInfo = mixer.getSourceLineInfo();
        Line.Info[] dstLineInfo = mixer.getTargetLineInfo();
        int count = srcLineInfo.length + dstLineInfo.length;
        System.out.print(" -> " + (srcLineInfo.length + dstLineInfo.length) + " line");
        switch(count) {
            case 0:
                System.out.println("s");
                break;
            case 1:
                System.out.println("");
                break;
            default:
                System.out.println("s:");
                break;
        }
        int l;
        for (l = 0; l < srcLineInfo.length; l++) {
            System.out.println("    " + srcLineInfo[l].toString());
            if (srcLineInfo[l].getLineClass() == SourceDataLine.class && (srcLineInfo[l] instanceof DataLine.Info)) {
                SDLformats += ((DataLine.Info) srcLineInfo[l]).getFormats().length;
            }
        }
        for (l = 0; l < dstLineInfo.length; l++) {
            System.out.println("    " + dstLineInfo[l].toString());
            if (dstLineInfo[l].getLineClass() == TargetDataLine.class && (dstLineInfo[l] instanceof DataLine.Info)) {
                TDLformats += ((DataLine.Info) dstLineInfo[l]).getFormats().length;
            }
        }
    }
    if (mixerInfo.length == 0) {
        System.out.println("[no mixers present]");
    }
    System.out.println("" + SDLformats + " total formats for SourceDataLines");
    System.out.println("" + TDLformats + " total formats for TargetDataLines");
    System.out.println("");
    System.out.println("If there are audio devices correctly installed on your");
    System.out.println("system, you should see at least one Mixer, and in total");
    System.out.println("at least each one SourceDataLine and TargetDataLine, both");
    System.out.println("providing at least one format.");
    System.out.println("");
    System.out.println("Now disable your soundcard and repeat the test.");
    System.out.println("The corresponding mixer(s) should not provide any formats");
    System.out.println("anymore. If you disable all available soundcards");
    System.out.println("on your computer, the number of formats above should be");
    System.out.println("0 for both line types (although mixers are allowed to exist).");
}
Example 29
Project: QuickBuild-Tray-Monitor-master  File: WavePlayer.java View source code
@Override
public void run() {
    AudioInputStream audioIn = null;
    try {
        audioIn = AudioSystem.getAudioInputStream(in);
        AudioFormat format = audioIn.getFormat();
        SourceDataLine auline = null;
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
        if (auline.isControlSupported(FloatControl.Type.PAN)) {
            FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN);
            if (curPosition == Position.RIGHT)
                pan.setValue(1.0f);
            else if (curPosition == Position.LEFT)
                pan.setValue(-1.0f);
        }
        auline.start();
        int nBytesRead = 0;
        byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
        try {
            while (nBytesRead != -1 && !paused) {
                nBytesRead = audioIn.read(abData, 0, abData.length);
                if (nBytesRead >= 0)
                    auline.write(abData, 0, nBytesRead);
            }
        } catch (IOException e) {
            e.printStackTrace();
            return;
        } finally {
            auline.drain();
            auline.close();
        }
    } catch (UnsupportedAudioFileException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (LineUnavailableException e) {
        e.printStackTrace();
    } finally {
        IOUtils.closeQuietly(in);
        IOUtils.closeQuietly(audioIn);
    }
}
Example 30
Project: sos-dendrogram-master  File: FlatAudioPlayThread.java View source code
@Override
public boolean doPlaying() {
    AudioInputStream inStream;
    File audioFile = song.getAudioFile();
    if (audioFile == null || !audioFile.exists() || !audioFile.isFile()) {
        System.err.println("No file given or file not found!");
        return true;
    }
    try {
        inStream = AudioSystem.getAudioInputStream(audioFile);
    } catch (UnsupportedAudioFileException e2) {
        e2.printStackTrace();
        return true;
    } catch (IOException e2) {
        e2.printStackTrace();
        return true;
    }
    AudioFormat format = inStream.getFormat();
    SourceDataLine auline = null;
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    try {
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
    } catch (LineUnavailableException e1) {
        e1.printStackTrace();
        return true;
    }
    auline.start();
    int nBytesRead = 0;
    byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
    try {
        while (!stopPlaying && nBytesRead != -1) {
            nBytesRead = inStream.read(abData, 0, abData.length);
            if (nBytesRead >= 0) {
                auline.write(abData, 0, nBytesRead);
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
        return true;
    } finally {
        auline.drain();
        auline.close();
    }
    return !stopPlaying;
}
Example 31
Project: stupidwarriors-master  File: OggPlayer.java View source code
public void play(String filePath) {
    final File file = new File(filePath);
    try (final AudioInputStream in = getAudioInputStream(file)) {
        final AudioFormat outFormat = getOutFormat(in.getFormat());
        final Info info = new Info(SourceDataLine.class, outFormat);
        try (final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info)) {
            if (line != null) {
                line.open(outFormat);
                line.start();
                AudioInputStream inputMystream = AudioSystem.getAudioInputStream(outFormat, in);
                stream(inputMystream, line);
                line.drain();
                line.stop();
            }
        }
    } catch (UnsupportedAudioFileExceptionLineUnavailableException | IOException |  e) {
        throw new IllegalStateException(e);
    }
}
Example 32
Project: MDE-Web-Service-Front-End-master  File: MultiWavePlayer.java View source code
// end MultiWavePlayer
/**
     * Allocates <code>line</code>, a new <code>SourceDataLine</code> for
     * this <code>MultiWavePlayer</code>. opens and Starts <code>line</code>.
     * Starts the player thread.
     * 
     * @see gov.nasa.ial.mde.sound.MultiWavePlayer#line
     * @see javax.sound.sampled.SourceDataLine 
     */
public void initLine() {
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    try {
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, MultiWavePlayer.J_BUFF_SIZE);
        pan = (FloatControl) line.getControl(FloatControl.Type.PAN);
    }// end try
     catch (LineUnavailableException lue) {
        System.err.println("No audio line available; no sound will play.");
        return;
    }
    // end catch
    // starts the internal buffer-filling operation
    line.start();
    // starts my buffer-filling thread
    doPlay();
}
Example 33
Project: ChromisPOS-master  File: PlayWave.java View source code
public void run() {
    File soundFile = new File(filename);
    if (!soundFile.exists()) {
        System.err.println("Wave file not found: " + filename);
        return;
    }
    AudioInputStream audioInputStream = null;
    try {
        audioInputStream = AudioSystem.getAudioInputStream(soundFile);
    } catch (UnsupportedAudioFileException e1) {
        e1.printStackTrace();
        return;
    } catch (IOException e1) {
        e1.printStackTrace();
        return;
    }
    AudioFormat format = audioInputStream.getFormat();
    SourceDataLine auline = null;
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    try {
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
    } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
    } catch (Exception e) {
        e.printStackTrace();
        return;
    }
    if (auline.isControlSupported(FloatControl.Type.PAN)) {
        FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN);
        if (curPosition == Position.RIGHT)
            pan.setValue(1.0f);
        else if (curPosition == Position.LEFT)
            pan.setValue(-1.0f);
    }
    auline.start();
    int nBytesRead = 0;
    byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
    try {
        while (nBytesRead != -1) {
            nBytesRead = audioInputStream.read(abData, 0, abData.length);
            if (nBytesRead >= 0)
                auline.write(abData, 0, nBytesRead);
        }
    } catch (IOException e) {
        e.printStackTrace();
        return;
    } finally {
        auline.drain();
        auline.close();
    }
}
Example 34
Project: classlib6-master  File: SoftAudioPusher.java View source code
public void run() {
    byte[] buffer = SoftAudioPusher.this.buffer;
    AudioInputStream ais = SoftAudioPusher.this.ais;
    SourceDataLine sourceDataLine = SoftAudioPusher.this.sourceDataLine;
    try {
        while (active) {
            // Read from audio source
            int count = ais.read(buffer);
            if (count < 0)
                break;
            // Write byte buffer to source output
            sourceDataLine.write(buffer, 0, count);
        }
    } catch (IOException e) {
        active = false;
    }
}
Example 35
Project: etyllica-master  File: AudioHandler.java View source code
public synchronized void playAudio(byte[] audio, final AudioFormat format) {
    try {
        InputStream input = new ByteArrayInputStream(audio);
        final AudioInputStream ais = new AudioInputStream(input, format, audio.length / format.getFrameSize());
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format);
        line.start();
        Runnable runner = new Runnable() {

            int bufferSize = (int) format.getSampleRate() * format.getFrameSize();

            byte buffer[] = new byte[bufferSize];

            public void run() {
                try {
                    int count;
                    while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
                        if (count > 0) {
                            //PlayAudio
                            line.write(buffer, 0, count);
                        }
                    }
                    line.drain();
                    line.close();
                } catch (IOException e) {
                    System.err.println("I/O problems: " + e);
                }
            }
        };
        Thread playThread = new Thread(runner);
        playThread.start();
    } catch (LineUnavailableException e) {
        System.err.println("Line unavailable: " + e);
    }
}
Example 36
Project: gjtapi-master  File: PlaybackURLConnection.java View source code
/**
     * {@inheritDoc}
     */
public void connect() throws IOException {
    if (connected) {
        return;
    }
    // Get audio format that will open playback
    AudioFormat format = getAudioFormat();
    // Representation of the line that will be opened
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    // Checks if line is supported
    if (!AudioSystem.isLineSupported(info)) {
        throw new IOException("Cannot open the requested line: " + info.toString());
    }
    // Obtain, open and start the line.
    try {
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, AudioSystem.NOT_SPECIFIED);
        // Starts the line
        line.start();
    } catch (LineUnavailableException ex) {
        throw new IOException("Line is unavailable");
    }
    // Marks this URLConnection as connected
    connected = true;
}
Example 37
Project: ikvm-openjdk-master  File: SoftAudioPusher.java View source code
public void run() {
    byte[] buffer = SoftAudioPusher.this.buffer;
    AudioInputStream ais = SoftAudioPusher.this.ais;
    SourceDataLine sourceDataLine = SoftAudioPusher.this.sourceDataLine;
    try {
        while (active) {
            // Read from audio source
            int count = ais.read(buffer);
            if (count < 0)
                break;
            // Write byte buffer to source output
            sourceDataLine.write(buffer, 0, count);
        }
    } catch (IOException e) {
        active = false;
    }
}
Example 38
Project: interval-music-compositor-master  File: ExtractMusicPlayer.java View source code
private void playPCM(AudioInputStream inputStream) {
    try {
        AudioFormat audioFormat = inputStream.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
        SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(audioFormat);
        line.start();
        int nBytesRead = 0;
        byte[] abData = new byte[AUDIO_BUFFER_SIZE];
        while (nBytesRead != -1 && play) {
            try {
                nBytesRead = inputStream.read(abData, 0, abData.length);
            } catch (IOException e) {
                addDebugMessage("Unable to read data stream: " + e.getMessage());
            }
            if (nBytesRead >= 0) {
                line.write(abData, 0, nBytesRead);
            }
        }
        line.stop();
        line.close();
        play = false;
        clearStream();
    } catch (LineUnavailableException e) {
        addDebugMessage("Unable to play music: " + e.getMessage());
    }
    addDebugMessage("Stopped playing.");
}
Example 39
Project: JavaX-master  File: AudioPlayer.java View source code
// playAudioFile
/** Plays audio from the given audio input stream. */
public static void playAudioStream(AudioInputStream audioInputStream) {
    // Audio format provides information like sample rate, size, channels.
    AudioFormat audioFormat = audioInputStream.getFormat();
    System.out.println("Play input audio format=" + audioFormat);
    // Open a data line to play our type of sampled audio.
    // Use SourceDataLine for play and TargetDataLine for record.
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
    if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Play.playAudioStream does not handle this type of audio on this system.");
        return;
    }
    try {
        // Create a SourceDataLine for play back (throws
        // LineUnavailableException).
        SourceDataLine dataLine = (SourceDataLine) AudioSystem.getLine(info);
        // System.out.println( "SourceDataLine class=" + dataLine.getClass()
        // );
        // The line acquires system resources (throws
        // LineAvailableException).
        dataLine.open(audioFormat);
        // Adjust the volume on the output line.
        if (dataLine.isControlSupported(FloatControl.Type.MASTER_GAIN)) {
            FloatControl volume = (FloatControl) dataLine.getControl(FloatControl.Type.MASTER_GAIN);
            volume.setValue(100.0F);
        }
        // Allows the line to move data in and out to a port.
        dataLine.start();
        // Create a buffer for moving data from the audio stream to the
        // line.
        int bufferSize = (int) audioFormat.getSampleRate() * audioFormat.getFrameSize();
        byte[] buffer = new byte[bufferSize];
        // Move the data until done or there is an error.
        try {
            int bytesRead = 0;
            while (bytesRead >= 0) {
                bytesRead = audioInputStream.read(buffer, 0, buffer.length);
                if (bytesRead >= 0) {
                    // System.out.println(
                    // "Play.playAudioStream bytes read=" + bytesRead +
                    // ", frame size=" + audioFormat.getFrameSize() +
                    // ", frames read=" + bytesRead /
                    // audioFormat.getFrameSize() );
                    // Odd sized sounds throw an exception if we don't write
                    // the same amount.
                    int framesWritten = dataLine.write(buffer, 0, bytesRead);
                }
            // while
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        System.out.println("Play.playAudioStream draining line.");
        // Continues data line I/O until its buffer is drained.
        dataLine.drain();
        System.out.println("Play.playAudioStream closing line.");
        // Closes the data line, freeing any resources such as the audio
        // device.
        dataLine.close();
    } catch (LineUnavailableException e) {
        e.printStackTrace();
    }
}
Example 40
Project: jdk7u-jdk-master  File: DataLine_ArrayIndexOutOfBounds.java View source code
static void testSDL(Mixer mixer, Scenario scenario) {
    log("  Testing SDL (scenario: " + scenario + ")...");
    Line.Info linfo = new Line.Info(SourceDataLine.class);
    SourceDataLine line = null;
    try {
        line = (SourceDataLine) mixer.getLine(linfo);
        log("    got line: " + line);
        log("    open...");
        line.open();
    } catch (IllegalArgumentException ex) {
        log("    unsupported (IllegalArgumentException)");
        return;
    } catch (LineUnavailableException ex) {
        log("    unavailable: " + ex);
        return;
    }
    total++;
    log("    start...");
    line.start();
    AsyncLineStopper lineStopper = new AsyncLineStopper(line, STOPPER_DELAY);
    int offset = scenario.getBufferOffset(line);
    int len = scenario.getBufferLength(line);
    // ensure len represents integral number of frames
    len -= len % line.getFormat().getFrameSize();
    log("    write...");
    lineStopper.schedule();
    try {
        line.write(buffer, offset, len);
        log("    ERROR: didn't get ArrayIndexOutOfBoundsException");
        failed++;
    } catch (ArrayIndexOutOfBoundsException ex) {
        log("    OK: got ArrayIndexOutOfBoundsException: " + ex);
    }
    lineStopper.force();
}
Example 41
Project: JVerge-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            if (source.isControlSupported(FloatControl.Type.MASTER_GAIN)) {
                /** Rafael: Changed this block to set volume properly */
                float db = (float) (Math.log(volume / 100) / Math.log(10.0) * 20.0);
                FloatControl c = (FloatControl) source.getControl(FloatControl.Type.MASTER_GAIN);
                c.setValue(db);
            }
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 42
Project: JWildfire-master  File: JWFAudioDevice.java View source code
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            source.open(audioFormat);
            source.start();
        }
    } catch (Throwable ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("Cannot obtain source audio line", t);
}
Example 43
Project: ManagedRuntimeInitiative-master  File: SoftAudioPusher.java View source code
public void run() {
    byte[] buffer = SoftAudioPusher.this.buffer;
    AudioInputStream ais = SoftAudioPusher.this.ais;
    SourceDataLine sourceDataLine = SoftAudioPusher.this.sourceDataLine;
    try {
        while (active) {
            // Read from audio source
            int count = ais.read(buffer);
            if (count < 0)
                break;
            // Write byte buffer to source output
            sourceDataLine.write(buffer, 0, count);
        }
    } catch (IOException e) {
        active = false;
    }
}
Example 44
Project: mpc_tp2-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 45
Project: Musicdroid-deprecated-master  File: JavaSoundThread.java View source code
private void perform() throws LineUnavailableException {
    // JavaSound setup.
    int sampleSize = 2;
    AudioFormat audioFormat = new AudioFormat(sampleRate, 8 * sampleSize, outChans, true, true);
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
    SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(info);
    sourceDataLine.open(audioFormat);
    sourceDataLine.start();
    // Buffer setup for exchanging samples between libpd and JavaSound.
    // Question: Is this the best possible solution?  It seems to involve too
    // much copying.
    int frames = PdBase.blockSize() * ticks;
    short[] dummy = new short[0];
    short[] samples = new short[frames * outChans];
    byte[] rawSamples = new byte[samples.length * sampleSize];
    ByteBuffer buf = ByteBuffer.wrap(rawSamples);
    ShortBuffer shortBuf = buf.asShortBuffer();
    while (// Note: sourceDataLine.write seems to clear the interrupted flag, and so Thread.interrupted() doesn't work here.
    !terminated) {
        PdBase.process(ticks, dummy, samples);
        shortBuf.rewind();
        shortBuf.put(samples);
        sourceDataLine.write(rawSamples, 0, rawSamples.length);
    }
    // Shutdown.
    sourceDataLine.drain();
    sourceDataLine.stop();
    sourceDataLine.close();
}
Example 46
Project: openjdk8-jdk-master  File: DataLine_ArrayIndexOutOfBounds.java View source code
static void testSDL(Mixer mixer, Scenario scenario) {
    log("  Testing SDL (scenario: " + scenario + ")...");
    Line.Info linfo = new Line.Info(SourceDataLine.class);
    SourceDataLine line = null;
    try {
        line = (SourceDataLine) mixer.getLine(linfo);
        log("    got line: " + line);
        log("    open...");
        line.open();
    } catch (IllegalArgumentException ex) {
        log("    unsupported (IllegalArgumentException)");
        return;
    } catch (LineUnavailableException ex) {
        log("    unavailable: " + ex);
        return;
    }
    total++;
    log("    start...");
    line.start();
    AsyncLineStopper lineStopper = new AsyncLineStopper(line, STOPPER_DELAY);
    int offset = scenario.getBufferOffset(line);
    int len = scenario.getBufferLength(line);
    // ensure len represents integral number of frames
    len -= len % line.getFormat().getFrameSize();
    log("    write...");
    lineStopper.schedule();
    try {
        line.write(buffer, offset, len);
        log("    ERROR: didn't get ArrayIndexOutOfBoundsException");
        failed++;
    } catch (ArrayIndexOutOfBoundsException ex) {
        log("    OK: got ArrayIndexOutOfBoundsException: " + ex);
    }
    lineStopper.force();
}
Example 47
Project: partyplayer-master  File: XugglePlayer.java View source code
private void openJavaSound(IStreamCoder coder) {
    AudioFormat af = new AudioFormat(coder.getSampleRate(), (int) IAudioSamples.findSampleBitDepth(coder.getSampleFormat()), coder.getChannels(), true, false);
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, af);
    try {
        mLine = (SourceDataLine) AudioSystem.getLine(info);
        mLine.open(af);
        mLine.start();
    } catch (LineUnavailableException e) {
        throw new RuntimeException("Could not open data line");
    }
}
Example 48
Project: rtty_modem-master  File: rtty.java View source code
/**
	 * @param args
	 * @throws LineUnavailableException 
	 */
public static void main(String[] args) throws LineUnavailableException {
    // TODO Auto-generated method stub
    final ByteArrayOutputStream out = new ByteArrayOutputStream();
    float sampleRate = 8000;
    int sampleSizeInBits = 8;
    int channels = 1;
    boolean signed = true;
    boolean bigEndian = true;
    final AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
    DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
    final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
    line.open(format);
    line.start();
    Runnable runner = new Runnable() {

        int bufferSize = (int) format.getSampleRate() * format.getFrameSize();

        byte buffer[] = new byte[bufferSize];

        public void run() {
            try {
                int count = line.read(buffer, 0, buffer.length);
                if (count > 0) {
                    out.write(buffer, 0, count);
                }
                out.close();
            } catch (IOException e) {
                System.err.println("I/O problems: " + e);
                System.exit(-1);
            }
        }
    };
    Thread captureThread = new Thread(runner);
    captureThread.start();
    byte audio[] = out.toByteArray();
    InputStream input = new ByteArrayInputStream(audio);
    final SourceDataLine line1 = (SourceDataLine) AudioSystem.getLine(info);
    final AudioInputStream ais = new AudioInputStream(input, format, audio.length / format.getFrameSize());
    line1.open(format);
    line1.start();
    runner = new Runnable() {

        int bufferSize = (int) format.getSampleRate() * format.getFrameSize();

        byte buffer[] = new byte[bufferSize];

        public void run() {
            try {
                int count;
                while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
                    if (count > 0) {
                        line1.write(buffer, 0, count);
                    }
                }
                line1.drain();
                line1.close();
            } catch (IOException e) {
                System.err.println("I/O problems: " + e);
                System.exit(-3);
            }
        }
    };
    Thread playThread = new Thread(runner);
    playThread.start();
}
Example 49
Project: Soen6471Frinika-master  File: OutputBufferTest.java View source code
public void startAudioOutput() {
    try {
        lineOut = (SourceDataLine) AudioSystem.getMixer(currentMixer).getLine(infoOut);
        if (standardLatency)
            lineOut.open(format, bufferSize);
        else
            lineOut.open(format);
        lineOut.start();
        System.out.println("Buffersize: " + bufferSize + " / " + lineOut.getBufferSize());
    } catch (Exception e) {
        lineOut = null;
        System.out.println("No audio output available. Use Audio Devices dialog to reconfigure.");
    }
    Thread thread = new Thread(this);
    thread.setPriority(Thread.MAX_PRIORITY);
    thread.start();
}
Example 50
Project: swip-master  File: AdaptiveSpeakerOutput.java View source code
// AudioOutputStream stream;
private void init(AudioFormat format) {
    // tähän
    DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, format);
    if (!AudioSystem.isLineSupported(lineInfo)) {
        System.err.println("ERROR: AudioLine not supported by this System.");
    }
    try {
        line = (SourceDataLine) AudioSystem.getLine(lineInfo);
        // if (DEBUG) println("SourceDataLine: "+source_line);
        // tähän voi lisätä buffer sizen
        line.open(format);
        Logger.debug("Line opened");
    } catch (LineUnavailableException e) {
        System.err.println("ERROR: LineUnavailableException at AudioReceiver()");
        e.printStackTrace();
    }
    if (!line.isOpen()) {
        Logger.error("Linja on kiinni");
    }
/*		else
		{

			line.start();
			Logger.debug("Line started");
		}
*/
}
Example 51
Project: TarsosTranscoder-master  File: StreamerTester.java View source code
/**
	 * Play a random http stream.
	 */
public void playStream() throws EncoderException, LineUnavailableException, IOException, UnsupportedAudioFileException, InterruptedException {
    String source;
    SourceDataLine line;
    DataLine.Info info;
    //The source stream
    source = "http://mp3.streampower.be/stubru-high.mp3";
    source = "http://mp3.streampower.be/klara-high.mp3";
    //Set the transcoding to WAV PCM, 16bits LE, 16789Hz (to make sure resampling is done). 
    Attributes attributes = DefaultAttributes.WAV_PCM_S16LE_STEREO_44KHZ.getAttributes();
    attributes.setSamplingRate(16789);
    //Stream the same file with on the fly decoding:		
    AudioInputStream streamedAudioInputStream = Streamer.stream(source, attributes);
    AudioFormat audioFormat = Streamer.streamAudioFormat(attributes);
    byte[] streamBuffer = new byte[1024];
    info = new DataLine.Info(SourceDataLine.class, audioFormat);
    line = (SourceDataLine) AudioSystem.getLine(info);
    line.open(audioFormat);
    line.start();
    while (streamedAudioInputStream.available() > streamBuffer.length) {
        int bytesRead = streamedAudioInputStream.read(streamBuffer);
        int bytesWrote = line.write(streamBuffer, 0, streamBuffer.length);
        assertEquals("The number of bytes read should match the number of bytes written to the dataline", bytesRead, bytesWrote);
    }
    line.close();
    streamedAudioInputStream.close();
}
Example 52
Project: Tira-Teima-master  File: Som.java View source code
private void playLocal() {
    File soundFile = new File(filename);
    if (!soundFile.exists()) {
        System.err.println("Wave file not found: " + filename);
        return;
    }
    AudioInputStream audioInputStream = null;
    try {
        audioInputStream = AudioSystem.getAudioInputStream(soundFile);
    } catch (UnsupportedAudioFileException e1) {
        e1.printStackTrace();
        return;
    } catch (IOException e1) {
        e1.printStackTrace();
        return;
    }
    AudioFormat format = audioInputStream.getFormat();
    SourceDataLine auline = null;
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    try {
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
    } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
    } catch (Exception e) {
        e.printStackTrace();
        return;
    }
    if (auline.isControlSupported(FloatControl.Type.PAN)) {
        FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN);
        if (curPosition == Position.RIGHT)
            pan.setValue(1.0f);
        else if (curPosition == Position.LEFT)
            pan.setValue(-1.0f);
    }
    auline.start();
    int nBytesRead = 0;
    byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
    try {
        while (nBytesRead != -1) {
            nBytesRead = audioInputStream.read(abData, 0, abData.length);
            if (nBytesRead >= 0)
                auline.write(abData, 0, nBytesRead);
        }
    } catch (IOException e) {
        e.printStackTrace();
        return;
    } finally {
        auline.drain();
        auline.close();
    }
}
Example 53
Project: Towel-master  File: PlayingStreamed.java View source code
/**
     * Plays the noise.
     */
public void run() {
    if (isFinished)
        throw new IllegalStateException("Sound already played.");
    SourceDataLine line = null;
    try {
        byte[] buffer = createStreamedBuffer();
        line = createDataLine(buffer.length);
        InputStream input = stream.newInputStream();
        line.start();
        int numBytesRead = 0;
        while (numBytesRead != -1 && !isFinished()) {
            if (isPaused()) {
                Thread.yield();
                continue;
            }
            numBytesRead = input.read(buffer, 0, buffer.length);
            if (numBytesRead != -1)
                line.write(buffer, 0, numBytesRead);
        }
    } catch (Exception e) {
    } finally {
        stop();
        if (line != null) {
            line.drain();
            line.close();
        }
    }
}
Example 54
Project: VocabularyTrainerSuite-master  File: AudioFilePlayer.java View source code
public void playAudioFile(String urlString) {
    URL url;
    try {
        url = new URL(urlString);
    } catch (MalformedURLException ex) {
        ex.printStackTrace();
        return;
    }
    try {
        AudioInputStream audioIn = AudioSystem.getAudioInputStream(url);
    } catch (UnsupportedAudioFileException ex) {
        Logger.getLogger(AudioFilePlayer.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(AudioFilePlayer.class.getName()).log(Level.SEVERE, null, ex);
    }
    SourceDataLine line = null;
    try (AudioInputStream ais = getAudioInputStream(url)) {
        AudioFormat audioFormat = getAudioFormat(ais.getFormat());
        Info info = new Info(SourceDataLine.class, audioFormat);
        line = (SourceDataLine) AudioSystem.getLine(info);
        if (line != null) {
            line.open(audioFormat);
            line.start();
            stream(getAudioInputStream(audioFormat, ais), line);
            line.drain();
            line.stop();
            line.close();
        }
    } catch (IOException ex) {
        ex.printStackTrace();
    } catch (UnsupportedAudioFileException ex) {
        ex.printStackTrace();
    } catch (LineUnavailableException ex) {
        ex.printStackTrace();
    } finally {
        if (line != null)
            line.close();
    }
}
Example 55
Project: ydkjx-master  File: AudioPlayer.java View source code
/**
     * @param filename the name of the file that is going to be played
     * @throws IOException 
     * @throws UnsupportedAudioFileException 
     * @throws FileNotFoundException 
     * @throws LineUnavailableException 
     */
public void playSound() throws FileNotFoundException, UnsupportedAudioFileException, IOException, LineUnavailableException {
    if (audio != null) {
        isPlayingFlag = true;
        InputStream snd = new ByteArrayInputStream(audio);
        audioStream = AudioSystem.getAudioInputStream(snd);
        audioFormat = audioStream.getFormat();
        final byte[] data = new byte[4096];
        try {
            SourceDataLine res = null;
            DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
            res = (SourceDataLine) AudioSystem.getLine(info);
            res.open(audioFormat);
            sourceLine = res;
            // Start
            onPlay();
            sourceLine.start();
            int nBytesRead = 0;
            while ((nBytesRead != -1) && (!stopFlag)) {
                if (!pauseFlag) {
                    isPlayingFlag = true;
                    nBytesRead = audioStream.read(data, 0, data.length);
                    if (nBytesRead != -1) {
                        if (sourceLine.isControlSupported(FloatControl.Type.MASTER_GAIN)) {
                            ((FloatControl) sourceLine.getControl(FloatControl.Type.MASTER_GAIN)).setValue(volume_dB);
                        }
                        sourceLine.write(data, 0, nBytesRead);
                    }
                } else {
                    isPlayingFlag = false;
                }
            }
            // Stop
            sourceLine.drain();
            sourceLine.stop();
            sourceLine.close();
            audioStream.close();
        } catch (LineUnavailableException e) {
            e.printStackTrace();
        }
        isPlayingFlag = false;
        onStop();
    }
}
Example 56
Project: apes-master  File: PlayerHandler.java View source code
/**
   * Does some initialization such as fetching the line and getting volume
   * control. NOTE: {@link PlayerHandler#setInternalFormat setInternalFormat}
   * must be called before this.
   */
private void init() {
    try {
        AudioFormat format = new AudioFormat(internalFormat.getSampleRate(), internalFormat.bitsPerSample, internalFormat.getNumChannels(), true, false);
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        if (line != null) {
            line.close();
        }
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format);
        line.start();
        // Get volume control.
        gainControl = (FloatControl) line.getControl(FloatControl.Type.MASTER_GAIN);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
Example 57
Project: cdt-master  File: SoundAction.java View source code
@Override
public void run() {
    AudioInputStream soundStream;
    try {
        soundStream = AudioSystem.getAudioInputStream(soundFile);
        AudioFormat audioFormat = soundStream.getFormat();
        DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
        SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
        byte[] soundBuffer = new byte[5000];
        sourceDataLine.open(audioFormat);
        sourceDataLine.start();
        int dataCount = 0;
        while ((dataCount = soundStream.read(soundBuffer, 0, soundBuffer.length)) != -1) {
            if (dataCount > 0) {
                sourceDataLine.write(soundBuffer, 0, dataCount);
            }
        }
        sourceDataLine.drain();
        sourceDataLine.close();
    } catch (UnsupportedAudioFileException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (LineUnavailableException e) {
        e.printStackTrace();
    }
}
Example 58
Project: cdt-tests-runner-master  File: SoundAction.java View source code
public void run() {
    AudioInputStream soundStream;
    try {
        soundStream = AudioSystem.getAudioInputStream(soundFile);
        AudioFormat audioFormat = soundStream.getFormat();
        DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
        SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
        byte[] soundBuffer = new byte[5000];
        sourceDataLine.open(audioFormat);
        sourceDataLine.start();
        int dataCount = 0;
        while ((dataCount = soundStream.read(soundBuffer, 0, soundBuffer.length)) != -1) {
            if (dataCount > 0) {
                sourceDataLine.write(soundBuffer, 0, dataCount);
            }
        }
        sourceDataLine.drain();
        sourceDataLine.close();
    } catch (UnsupportedAudioFileException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (LineUnavailableException e) {
        e.printStackTrace();
    }
}
Example 59
Project: CollectiveFramework-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 60
Project: dota2-sound-editor-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 61
Project: EDMHouse-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 62
Project: emul-master  File: JavaSoundListener.java View source code
/* (non-Javadoc)
	 * 
	 */
public void started(SoundFormat format) {
    if (soundGeneratorLine != null) {
        if (soundFormat.equals(format))
            return;
        stopped();
    }
    soundQueue = new LinkedBlockingQueue<AudioChunk>(20);
    soundFormat = toAudioFormat(format);
    Line.Info slInfo = new DataLine.Info(SourceDataLine.class, soundFormat);
    if (!AudioSystem.isLineSupported(slInfo)) {
        logger.error("Line not supported: " + soundFormat);
        return;
    }
    try {
        int soundFramesPerTick = (int) (soundFormat.getFrameRate() / ticksPerSec);
        soundGeneratorLine = (SourceDataLine) AudioSystem.getLine(slInfo);
        soundGeneratorLine.open(soundFormat, soundFramesPerTick * 20 * 4);
        logger.debug("Sound format: " + soundFormat);
    } catch (LineUnavailableException e) {
        logger.error("Line not available");
        e.printStackTrace();
        return;
    }
    soundWritingThread = new Thread(new Runnable() {

        public void run() {
            while (true) {
                AudioChunk chunk = null;
                try {
                    chunk = soundQueue.take();
                } catch (InterruptedException e2) {
                    return;
                }
                if (soundGeneratorLine == null)
                    return;
                if (chunk.soundData != null) {
                    soundGeneratorLine.write(chunk.soundData, 0, chunk.soundData.length);
                }
            }
        }
    }, "Sound Writing");
    soundWritingThread.setDaemon(true);
    soundWritingThread.start();
    soundGeneratorLine.start();
}
Example 63
Project: forplay-master  File: PlayerTest.java View source code
private void rawplay(AudioFormat targetFormat, AudioInputStream din) throws IOException, LineUnavailableException {
    byte[] data = new byte[4096];
    SourceDataLine line = getLine(targetFormat);
    if (line != null) {
        // Start
        line.start();
        int nBytesRead = 0, nBytesWritten = 0;
        while (nBytesRead != -1) {
            nBytesRead = din.read(data, 0, data.length);
            if (nBytesRead != -1)
                nBytesWritten = line.write(data, 0, nBytesRead);
        }
        // Stop
        line.drain();
        line.stop();
        line.close();
        din.close();
    }
}
Example 64
Project: hypertron-velodyne-master  File: Experiment2.java View source code
public static void play(AudioInputStream ais, AudioFormat af, int nRepeats) {
    int nBytesRead = 0;
    int nBytesWritten = 0;
    ais.mark(LARGE_NUMBER);
    SourceDataLine line = null;
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, af);
    try {
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(af);
    } catch (LineUnavailableException e) {
        e.printStackTrace();
        System.exit(1);
    } catch (Exception e) {
        e.printStackTrace();
        System.exit(1);
    }
    byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
    line.start();
    for (int i = 0; i < nRepeats; i++) {
        nBytesRead = 0;
        while (nBytesRead != -1) {
            try {
                nBytesRead = ais.read(abData, 0, abData.length);
            } catch (IOException e) {
                e.printStackTrace();
            }
            if (nBytesRead >= 0) {
                nBytesWritten = line.write(abData, 0, nBytesRead);
            }
        }
        try {
            ais.reset();
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
    }
    line.drain();
    line.close();
}
Example 65
Project: javac-master  File: JavaFxPlayVideoAndAudio.java View source code
@Override
public void start(Stage primaryStage) throws Exception {
    StackPane root = new StackPane();
    ImageView imageView = new ImageView();
    root.getChildren().add(imageView);
    imageView.fitWidthProperty().bind(primaryStage.widthProperty());
    imageView.fitHeightProperty().bind(primaryStage.heightProperty());
    Scene scene = new Scene(root, 640, 480);
    primaryStage.setTitle("Video + audio");
    primaryStage.setScene(scene);
    primaryStage.show();
    playThread = new Thread(() -> {
        try {
            FFmpegFrameGrabber grabber = new FFmpegFrameGrabber("C:\\Users\\gda\\Desktop\\bunny_move\\1486430724718.mp4");
            grabber.start();
            AudioFormat audioFormat = new AudioFormat(44100, 16, 1, true, true);
            DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
            SourceDataLine soundLine = (SourceDataLine) AudioSystem.getLine(info);
            soundLine.open(audioFormat);
            soundLine.start();
            OpenCVFrameConverter converter = new OpenCVFrameConverter.ToIplImage();
            Java2DFrameConverter paintConverter = new Java2DFrameConverter();
            ExecutorService executor = Executors.newSingleThreadExecutor();
            while (!Thread.interrupted()) {
                Frame frame = grabber.grab();
                if (frame == null) {
                    break;
                }
                if (frame.image != null) {
                    Image image = SwingFXUtils.toFXImage(paintConverter.convert(frame), null);
                    Platform.runLater(() -> {
                        imageView.setImage(image);
                    });
                } else if (frame.samples != null) {
                    FloatBuffer channelSamplesFloatBuffer = (FloatBuffer) frame.samples[0];
                    channelSamplesFloatBuffer.rewind();
                    ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesFloatBuffer.capacity() * 2);
                    for (int i = 0; i < channelSamplesFloatBuffer.capacity(); i++) {
                        /**
                             * FloatBuffer is converted to ByteBuffer with some
                             * magic constant SC16 (~Short.MAX_VALUE). I found
                             * it on some forum with this explanation:
                             *
                             * For 16 bit signed to float, divide by 32768. For
                             * float to 16 bit, multiply by 32768.
                             *
                             * Going from float to integer, do the initial
                             * conversion into a container bigger than the
                             * destination container so that it doesn't
                             * accidentally wrap on overs. For instance, on 16
                             * or 24 bit, you can use signed int 32.
                             *
                             * Or alternately, do the clipping on the scaled
                             * float value, before casting into integer. That
                             * way you can save the clipped float direct to a 16
                             * bit container and not have to fool with an
                             * intermediate 32 bit container.
                             *
                             * Clip the float to int results to stay in bounds.
                             * Anything lower than 0x8000 clipped to 0x8000, and
                             * anything higher than 0x7FFFF clipped to 0x7FFFF.
                             *
                             * The advantage of using a factor of 32768 is that
                             * bit patterns will stay the same after conversion.
                             * If you use 32767, the bit patterns will change.
                             * Not much change, but it just doesn't seem elegant
                             * to have them change if it can be avoided.
                             *
                             * If you want to do it as fast as possible it is
                             * just a matter of optimizing the code in whatever
                             * way seems sensible.
                             */
                        // Could be replaced with: short val = (short) (channelSamplesFloatBuffer.get(i) * Short.MAX_VALUE);
                        short val = (short) ((double) channelSamplesFloatBuffer.get(i) * SC16);
                        outBuffer.putShort(val);
                    }
                    /**
                         * We need this because soundLine.write ignores
                         * interruptions during writing.
                         */
                    try {
                        executor.submit(() -> {
                            soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
                            outBuffer.clear();
                        }).get();
                    } catch (InterruptedException interruptedException) {
                        Thread.currentThread().interrupt();
                    }
                }
            }
            executor.shutdownNow();
            executor.awaitTermination(10, TimeUnit.SECONDS);
            soundLine.stop();
            grabber.stop();
            grabber.release();
            Platform.exit();
        } catch (Exception exception) {
            LOG.log(Level.SEVERE, null, exception);
            System.exit(1);
        }
    });
    playThread.start();
}
Example 66
Project: javacv-master  File: JavaFxPlayVideoAndAudio.java View source code
@Override
public void start(Stage primaryStage) throws Exception {
    StackPane root = new StackPane();
    ImageView imageView = new ImageView();
    root.getChildren().add(imageView);
    imageView.fitWidthProperty().bind(primaryStage.widthProperty());
    imageView.fitHeightProperty().bind(primaryStage.heightProperty());
    Scene scene = new Scene(root, 640, 480);
    primaryStage.setTitle("Video + audio");
    primaryStage.setScene(scene);
    primaryStage.show();
    playThread = new Thread(() -> {
        try {
            FFmpegFrameGrabber grabber = new FFmpegFrameGrabber("C:\\Users\\gda\\Desktop\\bunny_move\\1486430724718.mp4");
            grabber.start();
            AudioFormat audioFormat = new AudioFormat(44100, 16, 1, true, true);
            DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
            SourceDataLine soundLine = (SourceDataLine) AudioSystem.getLine(info);
            soundLine.open(audioFormat);
            soundLine.start();
            OpenCVFrameConverter converter = new OpenCVFrameConverter.ToIplImage();
            Java2DFrameConverter paintConverter = new Java2DFrameConverter();
            ExecutorService executor = Executors.newSingleThreadExecutor();
            while (!Thread.interrupted()) {
                Frame frame = grabber.grab();
                if (frame == null) {
                    break;
                }
                if (frame.image != null) {
                    Image image = SwingFXUtils.toFXImage(paintConverter.convert(frame), null);
                    Platform.runLater(() -> {
                        imageView.setImage(image);
                    });
                } else if (frame.samples != null) {
                    FloatBuffer channelSamplesFloatBuffer = (FloatBuffer) frame.samples[0];
                    channelSamplesFloatBuffer.rewind();
                    ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesFloatBuffer.capacity() * 2);
                    for (int i = 0; i < channelSamplesFloatBuffer.capacity(); i++) {
                        /**
                             * FloatBuffer is converted to ByteBuffer with some
                             * magic constant SC16 (~Short.MAX_VALUE). I found
                             * it on some forum with this explanation:
                             *
                             * For 16 bit signed to float, divide by 32768. For
                             * float to 16 bit, multiply by 32768.
                             *
                             * Going from float to integer, do the initial
                             * conversion into a container bigger than the
                             * destination container so that it doesn't
                             * accidentally wrap on overs. For instance, on 16
                             * or 24 bit, you can use signed int 32.
                             *
                             * Or alternately, do the clipping on the scaled
                             * float value, before casting into integer. That
                             * way you can save the clipped float direct to a 16
                             * bit container and not have to fool with an
                             * intermediate 32 bit container.
                             *
                             * Clip the float to int results to stay in bounds.
                             * Anything lower than 0x8000 clipped to 0x8000, and
                             * anything higher than 0x7FFFF clipped to 0x7FFFF.
                             *
                             * The advantage of using a factor of 32768 is that
                             * bit patterns will stay the same after conversion.
                             * If you use 32767, the bit patterns will change.
                             * Not much change, but it just doesn't seem elegant
                             * to have them change if it can be avoided.
                             *
                             * If you want to do it as fast as possible it is
                             * just a matter of optimizing the code in whatever
                             * way seems sensible.
                             */
                        // Could be replaced with: short val = (short) (channelSamplesFloatBuffer.get(i) * Short.MAX_VALUE);
                        short val = (short) ((double) channelSamplesFloatBuffer.get(i) * SC16);
                        outBuffer.putShort(val);
                    }
                    /**
                         * We need this because soundLine.write ignores
                         * interruptions during writing.
                         */
                    try {
                        executor.submit(() -> {
                            soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
                            outBuffer.clear();
                        }).get();
                    } catch (InterruptedException interruptedException) {
                        Thread.currentThread().interrupt();
                    }
                }
            }
            executor.shutdownNow();
            executor.awaitTermination(10, TimeUnit.SECONDS);
            soundLine.stop();
            grabber.stop();
            grabber.release();
            Platform.exit();
        } catch (Exception exception) {
            LOG.log(Level.SEVERE, null, exception);
            System.exit(1);
        }
    });
    playThread.start();
}
Example 67
Project: JavaStuffs-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 68
Project: jclic-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 69
Project: jflac-dev-master  File: SeekTablePlayer.java View source code
/**
     * Process the StreamInfo block.
     * @param streamInfo the StreamInfo block
     * @see org.kc7bfi.jflac.PCMProcessor#processStreamInfo(org.kc7bfi.jflac.metadata.StreamInfo)
     */
public void processStreamInfo(StreamInfo streamInfo) {
    this.streamInfo = streamInfo;
    try {
        fmt = streamInfo.getAudioFormat();
        info = new DataLine.Info(SourceDataLine.class, fmt, AudioSystem.NOT_SPECIFIED);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(fmt, AudioSystem.NOT_SPECIFIED);
        line.start();
    } catch (LineUnavailableException e) {
        e.printStackTrace();
    }
}
Example 70
Project: JMediaPlayer-master  File: PlayerTest.java View source code
private void rawplay(AudioFormat targetFormat, AudioInputStream din) throws IOException, LineUnavailableException {
    byte[] data = new byte[4096];
    SourceDataLine line = getLine(targetFormat);
    if (line != null) {
        // Start
        line.start();
        int nBytesRead = 0, nBytesWritten = 0;
        while (nBytesRead != -1) {
            nBytesRead = din.read(data, 0, data.length);
            if (nBytesRead != -1)
                nBytesWritten = line.write(data, 0, nBytesRead);
        }
        // Stop
        line.drain();
        line.stop();
        line.close();
        din.close();
    }
}
Example 71
Project: JNekounter-master  File: Mp3Player.java View source code
@Override
protected Void doInBackground() throws Exception {
    AudioInputStream decodedInputStream = null;
    if (song != null && song.length() > 0) {
        try {
            AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(song);
            AudioFormat baseFormat = audioInputStream.getFormat();
            AudioFormat decodedFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), baseFormat.isBigEndian());
            decodedInputStream = AudioSystem.getAudioInputStream(decodedFormat, audioInputStream);
            DataLine.Info info = new DataLine.Info(SourceDataLine.class, decodedFormat);
            souceDataline = (SourceDataLine) AudioSystem.getLine(info);
            if (souceDataline != null) {
                souceDataline.open(decodedFormat);
                byte[] data = new byte[4096];
                souceDataline.start();
                trackPlaying = true;
                int nBytesRead;
                while ((nBytesRead = decodedInputStream.read(data, 0, data.length)) != -1) {
                    souceDataline.write(data, 0, nBytesRead);
                }
                souceDataline.drain();
                souceDataline.stop();
                souceDataline.close();
            }
        } catch (UnsupportedAudioFileException e) {
            System.out.println("[Mp3Player.playMp3:UnsupportedAudioFileException]" + e.getMessage());
        } catch (LineUnavailableException e) {
            System.out.println("[Mp3Player.playMp3:LineUnavailableException]" + e.getMessage());
        } catch (IOException e) {
            System.out.println("[Mp3Player.playMp3:IOException]" + e.getMessage());
        } finally {
            try {
                decodedInputStream.close();
            } catch (IOException e) {
                System.out.println("[Mp3Player.finally.IOException]" + e.getMessage());
            }
        }
    }
    return null;
}
Example 72
Project: jogl-master  File: JavaSoundAudioSink.java View source code
@Override
public boolean init(final AudioSink.AudioFormat requestedFormat, final float frameDuration, final int initialQueueSize, final int queueGrowAmount, final int queueLimit) {
    if (!staticAvailable) {
        return false;
    }
    // Create the audio format we wish to use
    format = new javax.sound.sampled.AudioFormat(requestedFormat.sampleRate, requestedFormat.sampleSize, requestedFormat.channelCount, requestedFormat.signed, !requestedFormat.littleEndian);
    // Create dataline info object describing line format
    info = new DataLine.Info(SourceDataLine.class, format);
    // Clear buffer initially
    Arrays.fill(sampleData, (byte) 0);
    try {
        // Get line to write data to
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
        auline.start();
        System.out.println("JavaSound audio sink");
        initialized = true;
        chosenFormat = requestedFormat;
    } catch (final Exception e) {
        initialized = false;
    }
    return true;
}
Example 73
Project: josm-plugins-master  File: EngineSound.java View source code
public void start() {
    rpm = 0.3;
    speed = 0.0;
    n = 0;
    if (output != null)
        stop();
    AudioFormat output_format = new AudioFormat(S_RATE, 16, 1, true, true);
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, output_format);
    /* Get the data line, open it and initialise the device */
    try {
        output = (SourceDataLine) AudioSystem.getLine(info);
        output.open(output_format);
        output.start();
        frames_written = 0;
        reschedule(0);
    } catch (Exception e) {
        output = null;
        System.out.println("Audio not available: " + e.getClass().getSimpleName());
    }
}
Example 74
Project: jpcsp-master  File: UmdBrowserSound.java View source code
private boolean read(int codecType, AtracFileInfo atracFileInfo) {
    codec = CodecFactory.getCodec(codecType);
    if (codec == null) {
        return false;
    }
    int result = codec.init(atracFileInfo.atracBytesPerFrame, atracFileInfo.atracChannels, atracFileInfo.atracChannels, atracFileInfo.atracCodingMode);
    if (result < 0) {
        return false;
    }
    AudioFormat audioFormat = new AudioFormat(44100, 16, atracFileInfo.atracChannels, true, false);
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
    try {
        mLine = (SourceDataLine) AudioSystem.getLine(info);
        mLine.open(audioFormat);
    } catch (LineUnavailableException e) {
        return false;
    }
    mLine.start();
    inputOffset = atracFileInfo.inputFileDataOffset;
    inputPosition = inputOffset;
    inputBytesPerFrame = atracFileInfo.atracBytesPerFrame;
    channels = atracFileInfo.atracChannels;
    return true;
}
Example 75
Project: jukefox-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
				if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
				{
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
				    c.setValue(c.getMaximum());
				}*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 76
Project: lol-jclient-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 77
Project: mage-master  File: LinePool.java View source code
public void playSound(final MageClip mageClip) {
    final SourceDataLine line;
    synchronized (LinePool.this) {
        log.debug("Playing {}", mageClip.getFilename());
        logLineStats();
        line = borrowLine();
        if (line == null) {
            // no lines available, queue sound to play it when a line is available
            queue.add(mageClip);
            log.debug("Sound {} queued.", mageClip.getFilename());
            return;
        }
        logLineStats();
    }
    ThreadUtils.threadPool.submit(() -> {
        synchronized (LinePool.this) {
            try {
                if (!line.isOpen()) {
                    line.open();
                    line.addLineListener( event -> {
                        log.debug("Event: {}", event);
                        if (event.getType() != Type.STOP) {
                            return;
                        }
                        synchronized (LinePool.this) {
                            log.debug("Before stop on line {}", line);
                            logLineStats();
                            returnLine(line);
                            log.debug("After stop on line {}", line);
                            logLineStats();
                            MageClip queuedSound = queue.poll();
                            if (queuedSound != null) {
                                log.debug("Playing queued sound {}", queuedSound);
                                playSound(queuedSound);
                            }
                        }
                    });
                }
                line.start();
            } catch (LineUnavailableException e) {
                log.warn("Failed to open line", e);
            }
        }
        byte[] buffer = mageClip.getBuffer();
        log.debug("Before write to line {}", line);
        line.write(buffer, 0, buffer.length);
        line.drain();
        line.stop();
        log.debug("Line completed: {}", line);
    });
}
Example 78
Project: mediaserver-master  File: SoundCard.java View source code
@Override
public void onMediaTransfer(Frame frame) throws IOException {
    System.out.println("Receive " + frame.getFormat() + ", len=" + frame.getLength() + ", header=" + frame.getHeader());
    if (first) {
        first = false;
        AudioFormat fmt = (AudioFormat) frame.getFormat();
        if (fmt == null) {
            return;
        }
        float sampleRate = (float) fmt.getSampleRate();
        int sampleSizeInBits = fmt.getSampleSize();
        int channels = fmt.getChannels();
        int frameSize = (fmt.getSampleSize() / 8);
        //float frameRate = 1;
        boolean bigEndian = false;
        Encoding encoding = getEncoding(fmt.getName().toString());
        frameSize = (channels == AudioSystem.NOT_SPECIFIED || sampleSizeInBits == AudioSystem.NOT_SPECIFIED) ? AudioSystem.NOT_SPECIFIED : ((sampleSizeInBits + 7) / 8) * channels;
        audioFormat = new javax.sound.sampled.AudioFormat(encoding, sampleRate, sampleSizeInBits, channels, frameSize, sampleRate, bigEndian);
        // FIXME : Need a configuration to select the specific hardware
        DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
        // beforehand.
        try {
            sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
            sourceDataLine.open(audioFormat);
            sourceDataLine.start();
        } catch (Exception e) {
            this.stop();
            logger.error(e);
        }
    }
    // FIXME : write() will block till all bytes are written. Need async operation here.
    byte[] data = frame.getData();
    try {
        sourceDataLine.write(data, frame.getOffset(), frame.getLength());
    } catch (RuntimeException e) {
        logger.error(e);
    }
}
Example 79
Project: MineTunes-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
            // XXX ~Vazkii
            setGain(ThreadMusicPlayer.gain);
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 80
Project: Minim-master  File: JSStreamingSampleRecorder.java View source code
/**
   * Finishes the recording process by closing the file.
   */
public AudioRecordingStream save() {
    try {
        aos.close();
    } catch (IOException e) {
        Minim.error("AudioRecorder.save: An error occurred when trying to save the file:\n" + e.getMessage());
    }
    String filePath = filePath();
    AudioInputStream ais = system.getAudioInputStream(filePath);
    SourceDataLine sdl = system.getSourceDataLine(ais.getFormat(), 1024);
    // this is fine because the recording will always be 
    // in a raw format (WAV, AU, etc).
    long length = AudioUtils.frames2Millis(ais.getFrameLength(), format);
    BasicMetaData meta = new BasicMetaData(filePath, length, ais.getFrameLength());
    JSPCMAudioRecordingStream recording = new JSPCMAudioRecordingStream(system, meta, ais, sdl, 1024);
    return recording;
}
Example 81
Project: mobicents-master  File: PlayerImpl.java View source code
@Override
public void onMediaTransfer(Buffer buffer) throws IOException {
    if (first) {
        first = false;
        AudioFormat fmt = (AudioFormat) buffer.getFormat();
        float sampleRate = (float) fmt.getSampleRate();
        int sampleSizeInBits = fmt.getSampleSizeInBits();
        int channels = fmt.getChannels();
        int frameSize = (fmt.getFrameSizeInBits() / 8);
        float frameRate = (float) fmt.getFrameRate();
        boolean bigEndian = fmt.getEndian() == 1;
        Encoding encoding = getEncoding(fmt.getEncoding());
        frameSize = (channels == AudioSystem.NOT_SPECIFIED || sampleSizeInBits == AudioSystem.NOT_SPECIFIED) ? AudioSystem.NOT_SPECIFIED : ((sampleSizeInBits + 7) / 8) * channels;
        audioFormat = new javax.sound.sampled.AudioFormat(encoding, sampleRate, sampleSizeInBits, channels, frameSize, sampleRate, bigEndian);
        // FIXME : Need a configuration to select the specific hardware
        DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
        // beforehand.
        try {
            sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
            sourceDataLine.open(audioFormat);
            sourceDataLine.start();
        } catch (LineUnavailableException e) {
            logger.error(e);
            this.failed(NotifyEvent.RX_FAILED, e);
            this.stop();
        } catch (IllegalArgumentException e) {
            logger.error(e);
            this.failed(NotifyEvent.RX_FAILED, e);
            this.stop();
        }
    }
    // FIXME : write() will block till all bytes are written. Need async operation here.
    try {
        sourceDataLine.write((byte[]) buffer.getData(), buffer.getOffset(), buffer.getLength());
    } catch (IllegalArgumentException e) {
        logger.error(e);
    } catch (ArrayIndexOutOfBoundsException e) {
        logger.error(e);
    }
}
Example 82
Project: openblocks-master  File: BeepGenerator.java View source code
public synchronized void start() {
    wavePhase = 0;
    beepPhase = 0;
    if (!isRunning()) {
        final AudioFormat af = new AudioFormat(SAMPLE_RATE, 8 * BYTES_PER_SAMPLE, 1, true, true);
        try {
            SourceDataLine line = AudioSystem.getSourceDataLine(af);
            line.open(af, SAMPLE_RATE);
            writerThread = new WriterThread(line);
            writerThread.start();
        } catch (LineUnavailableException e) {
            Log.warn(e, "Failed to initialize beeper");
            if (writerThread != null)
                writerThread.shutdown();
        }
    }
}
Example 83
Project: orcc-master  File: Audio.java View source code
public static void audio_initAudioFormat(BigInteger SampleRate, BigInteger SampleSizeInBits, BigInteger Channels) {
    if (SampleSizeInBits.intValue() == 8) {
        audioFormat = new AudioFormat(SampleRate.floatValue(), SampleSizeInBits.intValue(), Channels.intValue(), false, false);
    } else {
        audioFormat = new AudioFormat(SampleRate.floatValue(), SampleSizeInBits.intValue(), Channels.intValue(), true, false);
    }
    info = new DataLine.Info(SourceDataLine.class, audioFormat);
    if (AudioSystem.isLineSupported(info)) {
        try {
            line = (SourceDataLine) AudioSystem.getLine(info);
            line.open(audioFormat);
        } catch (LineUnavailableException e) {
            e.printStackTrace();
            System.exit(1);
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
    } else {
        System.out.println("Format of the file is incorrect (only 8 or 16 bits per sample are supported).");
    }
}
Example 84
Project: Picklr-master  File: RadioUtils.java View source code
/*
	 * This takes a decoded buffered audio input stream and format, and plays it
	 */
public static void rawplay(AudioFormat targetFormat, AudioInputStream decodedInput) throws IOException, LineUnavailableException {
    byte[] data = new byte[4096];
    // Create the source data line
    SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(new DataLine.Info(SourceDataLine.class, targetFormat));
    // Open the line with the specified format
    sourceDataLine.open(targetFormat);
    // If the source data line has been successfully established
    if (sourceDataLine != null) {
        // Start
        sourceDataLine.start();
        // Set the data remaining indicator to 0
        int bytesRead = 0;
        // This is used to control the volume/master gain
        gainControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN);
        // Set the gain to the saved default
        gainControl.setValue(-1.0F);
        // while there is new data still remaining to read
        while (bytesRead != -1) {
            // read the input data stream
            bytesRead = decodedInput.read(data, 0, data.length);
            // assuming there is new data, write it out to the data line
            if (bytesRead != -1) {
                sourceDataLine.write(data, 0, bytesRead);
            }
        }
        // Stop playing
        sourceDataLine.drain();
        sourceDataLine.stop();
        // Close up resources to avoid leaks
        sourceDataLine.close();
        decodedInput.close();
    }
}
Example 85
Project: PixelUtilities-master  File: SoundChip.java View source code
/** Initialize sound hardware if available */
public SourceDataLine initSoundHardware() {
    try {
        AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sampleRate, 8, 2, 2, sampleRate, true);
        DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, format);
        if (!AudioSystem.isLineSupported(lineInfo)) {
            System.out.println("Error: Can't find audio output system!");
            soundEnabled = false;
        } else {
            SourceDataLine line = (SourceDataLine) AudioSystem.getLine(lineInfo);
            int bufferLength = (sampleRate / 1000) * bufferLengthMsec;
            line.open(format, bufferLength);
            line.start();
            //    System.out.println("Initialized audio successfully.");
            soundEnabled = true;
            return line;
        }
    } catch (Exception e) {
        System.out.println("Error: Audio system busy!");
        soundEnabled = false;
    }
    return null;
}
Example 86
Project: reaper--rest-in-peace-master  File: BasicPlayerApplet.java View source code
protected void createLine() throws LineUnavailableException {
    log.info("Create Line");
    if (m_line == null) {
        AudioFormat sourceFormat = m_audioInputStream.getFormat();
        log.info("Create Line : Source format : " + sourceFormat.toString());
        AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2, sourceFormat.getSampleRate(), false);
        log.info("Create Line : Target format: " + targetFormat);
        // Keep a reference on encoded stream to progress notification.
        m_encodedaudioInputStream = m_audioInputStream;
        try {
            // Get total length in bytes of the encoded stream.
            encodedLength = m_encodedaudioInputStream.available();
        } catch (IOException e) {
            log.error("Cannot get m_encodedaudioInputStream.available()", e);
        }
        // Applet UGLY workaround.
        if ((isOgg == true) || (forceOgg == true))
            m_audioInputStream = AppletVorbisSPIWorkaround.getAudioInputStream(targetFormat, m_audioInputStream);
        else
            m_audioInputStream = AppletMpegSPIWorkaround.getAudioInputStream(targetFormat, m_audioInputStream);
        AudioFormat audioFormat = m_audioInputStream.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);
        m_line = (SourceDataLine) AudioSystem.getLine(info);
        log.debug("Line AudioFormat: " + m_line.getFormat().toString());
        /*-- Display supported controls --*/
        Control[] c = m_line.getControls();
        for (int p = 0; p < c.length; p++) {
            log.debug("Controls : " + c[p].toString());
        }
        /*-- Is Gain Control supported ? --*/
        if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) {
            m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN);
            log.info("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum() + "] " + m_gainControl.getPrecision());
        }
        /*-- Is Pan control supported ? --*/
        if (m_line.isControlSupported(FloatControl.Type.PAN)) {
            m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN);
            log.info("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "] " + m_panControl.getPrecision());
        }
    }
}
Example 87
Project: settlers-remake-master  File: SwingSoundPlayer.java View source code
@Override
public void run() {
    AudioFormat format = new AudioFormat(22050, 16, 2, true, false);
    Line.Info info = new Line.Info(SourceDataLine.class);
    try {
        SourceDataLine dataLine = (SourceDataLine) AudioSystem.getMixer(null).getLine(info);
        dataLine.open(format, BUFFER_SIZE);
        while (true) {
            try {
                // start sound playing
                dataLine.start();
                Sound<Integer> sound = queue.take();
                byte[] buffer;
                if (dataLine.isControlSupported(FloatControl.Type.VOLUME) && dataLine.isControlSupported(FloatControl.Type.BALANCE)) {
                    buffer = transformData(soundDataRetriever.getSoundData(sound.getData()));
                    FloatControl volumeControl = (FloatControl) dataLine.getControl(FloatControl.Type.VOLUME);
                    volumeControl.setValue(sound.getVolume() * volumeControl.getMaximum());
                    ((FloatControl) dataLine.getControl(FloatControl.Type.BALANCE)).setValue(sound.getBalance());
                } else {
                    buffer = transformData(soundDataRetriever.getSoundData(sound.getData()), sound.getLvolume(), sound.getRvolume());
                }
                dataLine.write(buffer, 0, buffer.length);
                // stop playing
                dataLine.drain();
                dataLine.stop();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    } catch (InterruptedException e) {
    } catch (LineUnavailableException e) {
        e.printStackTrace();
    }
}
Example 88
Project: SFXR-Plus-Plus-master  File: SFXRSound.java View source code
public void run() {
    try {
        final AudioFormat audioFormat = new AudioFormat(soundThread.sampleRate, 8, 1, true, true);
        SourceDataLine line = AudioSystem.getSourceDataLine(audioFormat);
        line = AudioSystem.getSourceDataLine(audioFormat);
        line.open(audioFormat);
        line.start();
        // play the byteArray
        line.write(soundThread.getPcm(), 0, // (byte[] b,
        soundThread.getPcm().length);
        // int off,
        // int len)
        line.drain();
        line.flush();
        line.close();
    } catch (LineUnavailableException e) {
        System.err.println("Audio Error:\n\t" + e.getMessage() + "\nExiting.");
    }
}
Example 89
Project: SmartHome-master  File: AudioPlayer.java View source code
/**
     * This method plays the contained AudioSource
     */
@Override
public void run() {
    SourceDataLine line;
    AudioFormat audioFormat = convertAudioFormat(this.audioStream.getFormat());
    if (audioFormat == null) {
        logger.warn("Audio format is unsupported or does not have enough details in order to be played");
        return;
    }
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
    try {
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(audioFormat);
    } catch (Exception e) {
        logger.warn("No line found: {}", e.getMessage());
        logger.info("Available lines are:");
        Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
        Mixer mixer = null;
        for (int cnt = 0; cnt < mixerInfo.length; cnt++) {
            mixer = AudioSystem.getMixer(mixerInfo[cnt]);
            Line.Info[] lineInfos = mixer.getSourceLineInfo();
            for (Info lineInfo : lineInfos) {
                logger.info(lineInfo.toString());
            }
        }
        return;
    }
    line.start();
    int nRead = 0;
    // needs to be a multiple of 4 and 6, to support both 16 and 24 bit stereo
    byte[] abData = new byte[65532];
    try {
        while (-1 != nRead) {
            nRead = audioStream.read(abData, 0, abData.length);
            if (nRead >= 0) {
                line.write(abData, 0, nRead);
            }
        }
    } catch (IOException e) {
        logger.error("Error while playing audio: {}", e.getMessage());
        return;
    } finally {
        line.drain();
        line.close();
        try {
            audioStream.close();
        } catch (IOException e) {
        }
    }
}
Example 90
Project: speechalyzer-master  File: PlayWave.java View source code
public void run() {
    File soundFile = new File(filename);
    if (!soundFile.exists()) {
        System.err.println("Wave file not found: " + filename);
        return;
    }
    AudioInputStream audioInputStream = null;
    try {
        audioInputStream = AudioSystem.getAudioInputStream(soundFile);
    } catch (UnsupportedAudioFileException e1) {
        e1.printStackTrace();
        return;
    } catch (IOException e1) {
        e1.printStackTrace();
        return;
    }
    AudioFormat format = null;
    if (audioFormat.compareTo(AUDIOFORMAT_WAV) == 0) {
        format = audioInputStream.getFormat();
    } else if (audioFormat.compareTo(AUDIOFORMAT_PCM_22050) == 0) {
        format = AudioUtil.FORMAT_PCM_22KHZ;
    } else {
        System.err.println("undefined autio format: " + audioFormat);
        return;
    }
    SourceDataLine auline = null;
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    try {
        auline = (SourceDataLine) AudioSystem.getLine(info);
        auline.open(format);
    } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
    } catch (Exception e) {
        e.printStackTrace();
        return;
    }
    if (auline.isControlSupported(FloatControl.Type.PAN)) {
        FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN);
        if (curPosition == Position.RIGHT)
            pan.setValue(1.0f);
        else if (curPosition == Position.LEFT)
            pan.setValue(-1.0f);
    }
    auline.start();
    int nBytesRead = 0;
    byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
    _playing = true;
    try {
        while (nBytesRead != -1) {
            nBytesRead = audioInputStream.read(abData, 0, abData.length);
            if (nBytesRead >= 0)
                auline.write(abData, 0, nBytesRead);
            if (!_playing)
                break;
        }
        _playing = false;
        audioInputStream.close();
    } catch (IOException e) {
        e.printStackTrace();
        return;
    } finally {
        auline.drain();
        auline.close();
        auline = null;
        audioInputStream = null;
        soundFile = null;
    }
}
Example 91
Project: swing-minizoo-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 92
Project: TaiWebDeployUtils-master  File: MAudioSample.java View source code
public void run() {
    try {
        line.open(format, bufferSize() * format.getFrameSize() * 4);
    } catch (LineUnavailableException e) {
        Minim.error("Error opening SourceDataLine: " + e.getMessage());
    }
    line.start();
    while (!finished) {
        // clear the buffer
        buffer.makeSilence();
        // build our signal from all the marks
        for (int i = 0; i < marks.length; i++) {
            int begin = marks[i];
            if (begin == -1)
                continue;
            //Minim.debug("Sample trigger in process at marks[" + i + "] = " + marks[i]);
            int j, k;
            for (j = begin, k = 0; j < samples.getSampleCount() && k < buffer.getSampleCount(); j++, k++) {
                if (type() == Minim.MONO) {
                    buffer.getChannel(0)[k] += samples.getChannel(0)[j];
                } else {
                    buffer.getChannel(0)[k] += samples.getChannel(0)[j];
                    buffer.getChannel(1)[k] += samples.getChannel(1)[j];
                }
            }
            if (j < samples.getSampleCount()) {
                marks[i] = j;
            } else {
                //Minim.debug("Sample trigger ended.");
                marks[i] = -1;
            }
        }
        // apply effects and broadcast samples to our listeners
        if (type() == Minim.MONO) {
            if (effects.hasEnabled()) {
                effects.process(buffer.getChannel(0));
            }
            splitter.samples(buffer.getChannel(0));
        } else {
            if (effects.hasEnabled()) {
                effects.process(buffer.getChannel(0), buffer.getChannel(1));
            }
            splitter.samples(buffer.getChannel(0), buffer.getChannel(1));
        }
        // write to the line
        int wrote = buffer.convertToByteArray(bytes, 0, format);
        line.write(bytes, 0, wrote);
    }
    line.drain();
    line.stop();
    line.close();
    line = null;
}
Example 93
Project: unifrog-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource() throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
					FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                    c.setValue(c.getMaximum());
                }*/
            source.start();
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null)
        throw new JavaLayerException("cannot obtain source audio line", t);
}
Example 94
Project: Amber-IDE-master  File: AudioIO.java View source code
public static Mixer findMixer(AudioFormat format) {
    DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, format);
    Mixer.Info[] mixerInfos = AudioSystem.getMixerInfo();
    //check each available mixer to see if it is acceptable
    for (int i = 0; i < mixerInfos.length; i++) {
        Mixer mixer = AudioSystem.getMixer(mixerInfos[i]);
        //first check if it supports our line
        if (!mixer.isLineSupported(lineInfo)) {
            //nope
            continue;
        }
        //now check if we've used up our lines
        int maxLines = mixer.getMaxLines(lineInfo);
        //if it's not specified, it's supposedly unlimited
        if (maxLines == AudioSystem.NOT_SPECIFIED) {
            return mixer;
        }
        //otherwise we should count them
        int linesOpen = 0;
        Line[] sourceLines = mixer.getSourceLines();
        for (int s = 0; s < sourceLines.length; s++) {
            //check if it matches our line
            if (sourceLines[s].getLineInfo().matches(lineInfo)) {
                //one line used up
                linesOpen++;
            }
        }
        //now we can see if any are available
        if (maxLines > linesOpen) {
            return mixer;
        }
    }
    //couldn't find one
    return null;
}
Example 95
Project: anemone-master  File: Audio.java View source code
public void run() {
    SourceDataLine line = null;
    try {
        try {
            line = (SourceDataLine) AudioSystem.getLine(new DataLine.Info(SourceDataLine.class, fmt));
            line.open(fmt, bufsize);
            line.start();
        } catch (Exception e) {
            e.printStackTrace();
            return;
        }
        byte[] buf = new byte[1024];
        while (true) {
            if (Thread.interrupted())
                throw (new InterruptedException());
            synchronized (queuemon) {
                Collection<Runnable> queue = Audio.queue;
                Audio.queue = new LinkedList<Runnable>();
                for (Runnable r : queue) r.run();
            }
            synchronized (ncl) {
                for (CS cs : ncl) clips.add(cs);
                ncl.clear();
            }
            fillbuf(buf, 0, 1024);
            for (int off = 0; off < buf.length; off += line.write(buf, off, buf.length - off)) ;
        }
    } catch (InterruptedException e) {
    } finally {
        synchronized (Audio.class) {
            player = null;
        }
        if (line != null)
            line.close();
    }
}
Example 96
Project: common-chicken-runtime-engine-master  File: Beeper.java View source code
/**
     * Start playing the specified beep type.
     *
     * @param bt the beep type to play.
     * @see BeepType
     */
public static void beep(BeepType bt) {
    System.out.println("Beeping: " + bt);
    try {
        AudioFormat format = new AudioFormat(bt.rate(), 8, 1, false, true);
        SourceDataLine line = AudioSystem.getSourceDataLine(format);
        try {
            line.open(format);
            line.start();
            long time = 0;
            while (true) {
                byte[] more = new byte[1024];
                int i = 0;
                try {
                    for (; i < more.length; i++) {
                        more[i] = bt.generateOne(time++);
                    }
                } catch (CompletedException ex) {
                    line.write(more, 0, i);
                    break;
                }
                line.write(more, 0, i);
            }
            line.drain();
        } finally {
            line.close();
        }
    } catch (LineUnavailableException ex) {
        ex.printStackTrace();
    }
}
Example 97
Project: epic-inventor-master  File: JavaSoundAudioDevice.java View source code
// createSource fix.
protected void createSource2(boolean start) throws JavaLayerException {
    Throwable t = null;
    try {
        Line line = AudioSystem.getLine(getSourceLineInfo());
        if (line instanceof SourceDataLine) {
            source = (SourceDataLine) line;
            //source.open(fmt, millisecondsToBytes(fmt, 2000));
            source.open(fmt);
            /*
                if (source.isControlSupported(FloatControl.Type.MASTER_GAIN))
                {
                FloatControl c = (FloatControl)source.getControl(FloatControl.Type.MASTER_GAIN);
                c.setValue(c.getMaximum());
                }*/
            if (start) {
                source.start();
            }
        }
    } catch (RuntimeException ex) {
        t = ex;
    } catch (LinkageError ex) {
        t = ex;
    } catch (LineUnavailableException ex) {
        t = ex;
    }
    if (source == null) {
        throw new JavaLayerException("cannot obtain source audio line", t);
    }
}
Example 98
Project: freecol-android-master  File: SoundPlayer.java View source code
private void setVolume(SourceDataLine line, int vol) {
    try {
        FloatControl control = (FloatControl) line.getControl(FloatControl.Type.MASTER_GAIN);
        if (control != null) {
            // The gain (dB) and volume (percent) are log related.
            //   50% volume  = -6dB
            //   10% volume  = -20dB
            //   1% volume   = -40dB
            // Use max/min for 100,0%.
            float gain = (vol <= 0) ? control.getMinimum() : (vol >= 100) ? control.getMaximum() : 20.0f * (float) Math.log10(0.01f * vol);
            control.setValue(gain);
            logger.finest("Using volume " + vol + "%, gain = " + gain);
        } else {
            logger.warning("No master gain control," + " unable to change the volume.");
        }
    } catch (Exception e) {
        logger.log(Level.WARNING, "Could not set volume", e);
    }
}
Example 99
Project: geogebra-master  File: Decoder.java View source code
public void play(String name, InputStream in) throws IOException {
    stop = false;
    int frameCount = Integer.MAX_VALUE;
    // int testing;
    // frameCount = 100;
    Decoder decoder = new Decoder();
    Bitstream stream = new Bitstream(in);
    SourceDataLine line1 = null;
    int error = 0;
    for (int frame = 0; !stop && frame < frameCount; frame++) {
        if (pause) {
            line1.stop();
            while (pause && !stop) {
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                }
            }
            line1.flush();
            line1.start();
        }
        try {
            Header header = stream.readFrame();
            if (header == null) {
                break;
            }
            if (decoder.channels == 0) {
                int channels1 = (header.mode() == Header.MODE_SINGLE_CHANNEL) ? 1 : 2;
                float sampleRate = header.frequency();
                int sampleSize = 16;
                AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sampleRate, sampleSize, channels1, channels1 * (sampleSize / 8), sampleRate, true);
                // big endian
                SourceDataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
                line1 = (SourceDataLine) AudioSystem.getLine(info);
                if (BENCHMARK) {
                    decoder.initOutputBuffer(null, channels1);
                } else {
                    decoder.initOutputBuffer(line1, channels1);
                }
                // TODO sometimes the line can not be opened (maybe not
                // enough system resources?): display error message
                // System.out.println(line.getFormat().toString());
                line1.open(format);
                line1.start();
            }
            while (line1.available() < 100) {
                Thread.yield();
                Thread.sleep(200);
            }
            decoder.decodeFrame(header, stream);
        } catch (Exception e) {
            if (error++ > 1000) {
                break;
            }
            Log.debug("Error at: " + name + " Frame: " + frame + " Error: " + e.toString());
        } finally {
            stream.closeFrame();
        }
    }
    if (error > 0) {
        Log.debug("errors: " + error);
    }
    in.close();
    if (line1 != null) {
        line1.stop();
        line1.close();
        line1 = null;
    }
}
Example 100
Project: Grimja-master  File: VideoViewer.java View source code
public void run() {
    final int framePeriod = (int) (1000f / data.fps);
    int last_frame = -1;
    boolean valid = true;
    final WritableRaster raster = surface.getRaster();
    try {
        // Todo: sync is WAY off
        if (data.audio.stream != null) {
            AudioTrack audio = data.audio;
            AudioFormat af = new AudioFormat(audio.sampleRate, audio.bits, audio.channels, true, true);
            SourceDataLine sdl = (SourceDataLine) AudioSystem.getLine(new DataLine.Info(SourceDataLine.class, af));
            sdl.open();
            sdl.start();
            byte[] buf = new byte[sdl.getBufferSize()];
            audio.stream.seek(0);
            int pos = 0;
            final int bytesPerSample = (audio.bits / 8) * audio.channels;
            final int bytesPerSec = bytesPerSample * audio.sampleRate;
            final int bytesPerMs = bytesPerSec / 1000;
            // VIMA is 50 frames ahead according to residual...
            final int frameOff = 500;
            while (valid) {
                int ms = pos / bytesPerMs;
                ms -= frameOff;
                if (ms < 0)
                    ms = 0;
                int frame = ms / framePeriod;
                if (frame != last_frame && frame != last_frame + 1)
                    // Interpolate, we can't drop frames!
                    frame = last_frame + 1;
                last_frame = frame;
                data.stream.setFrame(frame);
                valid = playing && data.stream.readFrame(raster, data.width, data.height);
                if (!valid)
                    break;
                viewer.repaint();
                int read = audio.stream.read(buf, 0, Math.min(buf.length, sdl.available()));
                pos += read;
                if (read == -1)
                    break;
                sdl.write(buf, 0, read);
            }
            if (playing)
                sdl.drain();
            else
                sdl.flush();
            sdl.stop();
            sdl.close();
        } else {
            // Otherwise just try and keep the FPS
            long start = System.currentTimeMillis();
            while (valid) {
                long elapsed = System.currentTimeMillis() - start;
                int frame = (int) (elapsed / framePeriod);
                if (frame != last_frame && frame != last_frame + 1)
                    // Interpolate, can't drop...
                    frame = last_frame + 1;
                last_frame = frame;
                data.stream.setFrame(frame);
                valid = playing && data.stream.readFrame(raster, data.width, data.height);
                viewer.repaint();
            }
        }
    } catch (Exception e) {
        MainWindow.getInstance().handleException(e);
    }
    stopAction.setEnabled(false);
    playAction.setEnabled(true);
}
Example 101
Project: Haven-and-Hearth-client-modified-by-Ender-master  File: Audio.java View source code
public void run() {
    SourceDataLine line = null;
    try {
        try {
            line = (SourceDataLine) AudioSystem.getLine(new DataLine.Info(SourceDataLine.class, fmt));
            line.open(fmt, bufsize);
            line.start();
        } catch (Exception e) {
            e.printStackTrace();
            return;
        }
        byte[] buf = new byte[1024];
        while (true) {
            if (Thread.interrupted())
                throw (new InterruptedException());
            synchronized (queuemon) {
                Collection<Runnable> queue = Audio.queue;
                Audio.queue = new LinkedList<Runnable>();
                for (Runnable r : queue) r.run();
            }
            synchronized (ncl) {
                for (CS cs : ncl) clips.add(cs);
                ncl.clear();
            }
            fillbuf(buf, 0, 1024);
            for (int off = 0; off < buf.length; off += line.write(buf, off, buf.length - off)) ;
        }
    } catch (InterruptedException e) {
    } finally {
        synchronized (Audio.class) {
            player = null;
        }
        if (line != null)
            line.close();
    }
}