hammasaidi
Messages postés28Date d'inscriptionlundi 1 février 2010StatutMembreDernière intervention21 avril 2010
-
16 févr. 2010 à 12:34
hammasaidi
Messages postés28Date d'inscriptionlundi 1 février 2010StatutMembreDernière intervention21 avril 2010
-
22 févr. 2010 à 13:23
bonjour,
j'ai ce code et suis bloqué que à la place représenter la forme d'onde de signal enregistrer par le microphone je veux représenter la forme d'onde d'un son existe dans mon Pc c'es à dire l'importer par un JFileChoser
merci pour vos aide
public void saveToFile(String name, AudioFileFormat.Type fileType) {
if (audioInputStream == null) {
reportStatus("No loaded audio to save");
return;
} else if (file != null) {
createAudioInputStream(file, false);
}
// reset to the beginnning of the captured data
try {
audioInputStream.reset();
} catch (Exception e) {
reportStatus("Unable to reset stream " + e);
return;
}
File file new File(fileName name);
try {
if (AudioSystem.write(audioInputStream, fileType, file) == -1) {
throw new IOException("Problems writing to file");
}
} catch (Exception ex) { reportStatus(ex.toString()); }
samplingGraph.repaint();
}
// reload the file if loaded by file
if (file != null) {
createAudioInputStream(file, false);
}
// make sure we have something to play
if (audioInputStream == null) {
shutDown("No loaded audio to play back");
return;
}
// reset to the beginnning of the stream
try {
audioInputStream.reset();
} catch (Exception e) {
shutDown("Unable to reset the stream\n" + e);
return;
}
// get an AudioInputStream of the desired format for playback
AudioFormat format = formatControls.getFormat();
AudioInputStream playbackInputStream = AudioSystem.getAudioInputStream(format, audioInputStream);
if (playbackInputStream == null) {
shutDown("Unable to convert stream of format " + audioInputStream + " to format " + format);
return;
}
// define the required attributes for our line,
// and make sure a compatible line is supported.
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
format);
if (!AudioSystem.isLineSupported(info)) {
shutDown("Line matching " + info + " not supported.");
return;
}
// get and open the source data line for playback.
try {
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format, bufSize);
} catch (LineUnavailableException ex) {
shutDown("Unable to open the line: " + ex);
return;
}
// play back the captured audio data
int frameSizeInBytes = format.getFrameSize();
int bufferLengthInFrames = line.getBufferSize() / 8;
int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes;
byte[] data = new byte[bufferLengthInBytes];
int numBytesRead = 0;
// start the source data line
line.start();
while (thread != null) {
try {
if ((numBytesRead playbackInputStream.read(data)) -1) {
break;
}
int numBytesRemaining = numBytesRead;
while (numBytesRemaining > 0 ) {
numBytesRemaining -= line.write(data, 0, numBytesRemaining);
}
} catch (Exception e) {
shutDown("Error during playback: " + e);
break;
}
}
// we reached the end of the stream. let the data play out, then
// stop and close the line.
if (thread != null) {
line.drain();
}
line.stop();
line.close();
line = null;
shutDown(null);
}
} // End class Playback
/**
* Reads data from the input channel and writes to the output stream
*/
class Capture implements Runnable {
TargetDataLine line;
Thread thread;
public void start() {
errStr = null;
thread = new Thread(this);
thread.setName("Capture");
thread.start();
}
// define the required attributes for our line,
// and make sure a compatible line is supported.
AudioFormat format = formatControls.getFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class,
format);
if (!AudioSystem.isLineSupported(info)) {
shutDown("Line matching " + info + " not supported.");
return;
}
// get and open the target data line for capture.
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format, line.getBufferSize());
} catch (LineUnavailableException ex) {
shutDown("Unable to open the line: " + ex);
return;
} catch (SecurityException ex) {
shutDown(ex.toString());
// JavaSound.showInfoDialog();
return;
} catch (Exception ex) {
shutDown(ex.toString());
return;
}
// play back the captured audio data
ByteArrayOutputStream out = new ByteArrayOutputStream();
int frameSizeInBytes = format.getFrameSize();
int bufferLengthInFrames = line.getBufferSize() / 8;
int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes;
byte[] data = new byte[bufferLengthInBytes];
int numBytesRead;
Vector v = new Vector(groups.size());
for (int i = 0; i < groups.size(); i++) {
ButtonGroup g = (ButtonGroup) groups.get(i);
for (Enumeration e = g.getElements();e.hasMoreElements();) {
AbstractButton b = (AbstractButton) e.nextElement();
if (b.isSelected()) {
v.add(b.getText());
break;
}
}
}
if (encString.equals("linear")) {
if (signedString.equals("signed")) {
encoding = AudioFormat.Encoding.PCM_SIGNED;
} else {
encoding = AudioFormat.Encoding.PCM_UNSIGNED;
}
} else if (encString.equals("alaw")) {
encoding = AudioFormat.Encoding.ALAW;
}
return new AudioFormat(encoding, rate, sampleSize,
channels, (sampleSize/8)*channels, rate, bigEndian);
}
public void setFormat(AudioFormat format) {
AudioFormat.Encoding type = format.getEncoding();
if (type == AudioFormat.Encoding.ULAW) {
ulawB.doClick();
} else if (type == AudioFormat.Encoding.ALAW) {
alawB.doClick();
} else if (type == AudioFormat.Encoding.PCM_SIGNED) {
linrB.doClick(); signB.doClick();
} else if (type == AudioFormat.Encoding.PCM_UNSIGNED) {
linrB.doClick(); unsignB.doClick();
}
float rate = format.getFrameRate();
}
} // End class FormatControls
/**
* Render a WaveForm.
*/
class SamplingGraph extends JPanel implements Runnable {
private Thread thread;
private Font font10 = new Font("serif", Font.PLAIN, 10);
private Font font12 = new Font("serif", Font.PLAIN, 12);
Color jfcBlue = new Color(204, 204, 255);
Color pink = new Color(255, 175, 175);
public SamplingGraph() {
setBackground(new Color(20, 20, 20));
}
public void createWaveForm(byte[] audioBytes) {
lines.removeAllElements(); // clear the old vector
AudioFormat format = audioInputStream.getFormat();
if (audioBytes == null) {
try {
audioBytes = new byte[
(int) (audioInputStream.getFrameLength()
* format.getFrameSize())];
audioInputStream.read(audioBytes);
} catch (Exception ex) {
reportStatus(ex.toString());
return;
}
}
Dimension d = getSize();
int w = d.width;
int h = d.height-15;
int[] audioData = null;
if (format.getSampleSizeInBits() == 16) {
int nlengthInSamples = audioBytes.length / 2;
audioData = new int[nlengthInSamples];
if (format.isBigEndian()) {
for (int i = 0; i < nlengthInSamples; i++) {
/* First byte is MSB (high order) */
int MSB = (int) audioBytes[2*i];
/* Second byte is LSB (low order) */
int LSB = (int) audioBytes[2*i+1];
audioData[i] = MSB << 8 | (255 & LSB);
}
} else {
for (int i = 0; i < nlengthInSamples; i++) {
/* First byte is LSB (low order) */
int LSB = (int) audioBytes[2*i];
/* Second byte is MSB (high order) */
int MSB = (int) audioBytes[2*i+1];
audioData[i] = MSB << 8 | (255 & LSB);
}
}
} else if (format.getSampleSizeInBits() == 8) {
int nlengthInSamples = audioBytes.length;
audioData = new int[nlengthInSamples];
if (format.getEncoding().toString().startsWith("PCM_SIGN")) {
for (int i = 0; i < audioBytes.length; i++) {
audioData[i] = audioBytes[i];
}
} else {
for (int i = 0; i < audioBytes.length; i++) {
audioData[i] = audioBytes[i] - 128;
}
}
}