KickJava   Java API By Example, From Geeks To Geeks.

Java > Open Source Codes > javax > sound > sampled > DataLine


1 /*
2  * @(#)DataLine.java 1.33 04/07/14
3  *
4  * Copyright 2004 Sun Microsystems, Inc. All rights reserved.
5  * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
6  */

7
8 package javax.sound.sampled;
9
10 /**
11  * <code>DataLine</code> adds media-related functionality to its
12  * superinterface, <code>{@link Line}</code>. This functionality includes
13  * transport-control methods that start, stop, drain, and flush
14  * the audio data that passes through the line. A data line can also
15  * report the current position, volume, and audio format of the media.
16  * Data lines are used for output of audio by means of the
17  * subinterfaces <code>{@link SourceDataLine}</code> or
18  * <code>{@link Clip}</code>, which allow an application program to write data. Similarly,
19  * audio input is handled by the subinterface <code>{@link TargetDataLine}</code>,
20  * which allows data to be read.
21  * <p>
22  * A data line has an internal buffer in which
23  * the incoming or outgoing audio data is queued. The
24  * <code>{@link #drain()}</code> method blocks until this internal buffer
25  * becomes empty, usually because all queued data has been processed. The
26  * <code>{@link #flush()}</code> method discards any available queued data
27  * from the internal buffer.
28  * <p>
29  * A data line produces <code>{@link LineEvent.Type#START START}</code> and
30  * <code>{@link LineEvent.Type#STOP STOP}</code> events whenever
31  * it begins or ceases active presentation or capture of data. These events
32  * can be generated in response to specific requests, or as a result of
33  * less direct state changes. For example, if <code>{@link #start()}</code> is called
34  * on an inactive data line, and data is available for capture or playback, a
35  * <code>START</code> event will be generated shortly, when data playback
36  * or capture actually begins. Or, if the flow of data to an active data
37  * line is constricted so that a gap occurs in the presentation of data,
38  * a <code>STOP</code> event is generated.
39  * <p>
40  * Mixers often support synchronized control of multiple data lines.
41  * Synchronization can be established through the Mixer interface's
42  * <code>{@link Mixer#synchronize synchronize}</code> method.
43  * See the description of the <code>{@link Mixer Mixer}</code> interface
44  * for a more complete description.
45  *
46  * @author Kara Kytle
47  * @version 1.33, 04/07/14
48  * @see LineEvent
49  * @since 1.3
50  */

51 public interface DataLine extends Line JavaDoc {
52     
53     
54     /**
55      * Drains queued data from the line by continuing data I/O until the
56      * data line's internal buffer has been emptied.
57      * This method blocks until the draining is complete. Because this is a
58      * blocking method, it should be used with care. If <code>drain()</code>
59      * is invoked on a stopped line that has data in its queue, the method will
60      * block until the line is running and the data queue becomes empty. If
61      * <code>drain()</code> is invoked by one thread, and another continues to
62      * fill the data queue, the operation will not complete.
63      * This method always returns when the data line is closed.
64      *
65      * @see #flush()
66      */

67     public void drain();
68     
69     /**
70      * Flushes queued data from the line. The flushed data is discarded.
71      * In some cases, not all queued data can be discarded. For example, a
72      * mixer can flush data from the buffer for a specific input line, but any
73      * unplayed data already in the output buffer (the result of the mix) will
74      * still be played. You can invoke this method after pausing a line (the
75      * normal case) if you want to skip the "stale" data when you restart
76      * playback or capture. (It is legal to flush a line that is not stopped,
77      * but doing so on an active line is likely to cause a discontinuity in the
78      * data, resulting in a perceptible click.)
79      *
80      * @see #stop()
81      * @see #drain()
82      */

83     public void flush();
84     
85     /**
86      * Allows a line to engage in data I/O. If invoked on a line
87      * that is already running, this method does nothing. Unless the data in
88      * the buffer has been flushed, the line resumes I/O starting
89      * with the first frame that was unprocessed at the time the line was
90      * stopped. When audio capture or playback starts, a
91      * <code>{@link LineEvent.Type#START START}</code> event is generated.
92      *
93      * @see #stop()
94      * @see #isRunning()
95      * @see LineEvent
96      */

97     public void start();
98     
99     /**
100      * Stops the line. A stopped line should cease I/O activity.
101      * If the line is open and running, however, it should retain the resources required
102      * to resume activity. A stopped line should retain any audio data in its buffer
103      * instead of discarding it, so that upon resumption the I/O can continue where it left off,
104      * if possible. (This doesn't guarantee that there will never be discontinuities beyond the
105      * current buffer, of course; if the stopped condition continues
106      * for too long, input or output samples might be dropped.) If desired, the retained data can be
107      * discarded by invoking the <code>flush</code> method.
108      * When audio capture or playback stops, a <code>{@link LineEvent.Type#STOP STOP}</code> event is generated.
109      *
110      * @see #start()
111      * @see #isRunning()
112      * @see #flush()
113      * @see LineEvent
114      */

115     public void stop();
116     
117     /**
118      * Indicates whether the line is running. The default is <code>false</code>.
119      * An open line begins running when the first data is presented in response to an
120      * invocation of the <code>start</code> method, and continues
121      * until presentation ceases in response to a call to <code>stop</code> or
122      * because playback completes.
123      * @return <code>true</code> if the line is running, otherwise <code>false</code>
124      * @see #start()
125      * @see #stop()
126      */

127     public boolean isRunning();
128     
129     /**
130      * Indicates whether the line is engaging in active I/O (such as playback
131      * or capture). When an inactive line becomes active, it sends a
132      * <code>{@link LineEvent.Type#START START}</code> event to its listeners. Similarly, when
133      * an active line becomes inactive, it sends a
134      * <code>{@link LineEvent.Type#STOP STOP}</code> event.
135      * @return <code>true</code> if the line is actively capturing or rendering
136      * sound, otherwise <code>false</code>
137      * @see #isOpen
138      * @see #addLineListener
139      * @see #removeLineListener
140      * @see LineEvent
141      * @see LineListener
142      */

143     public boolean isActive();
144     
145     /**
146      * Obtains the current format (encoding, sample rate, number of channels,
147      * etc.) of the data line's audio data.
148      *
149      * <p>If the line is not open and has never been opened, it returns
150      * the default format. The default format is an implementation
151      * specific audio format, or, if the <code>DataLine.Info</code>
152      * object, which was used to retrieve this <code>DataLine</code>,
153      * specifies at least one fully qualified audio format, the
154      * last one will be used as the default format. Opening the
155      * line with a specific audio format (e.g.
156      * {@link SourceDataLine#open(AudioFormat)}) will override the
157      * default format.
158      *
159      * @return current audio data format
160      * @see AudioFormat
161      */

162     public AudioFormat JavaDoc getFormat();
163     
164     /**
165      * Obtains the maximum number of bytes of data that will fit in the data line's
166      * internal buffer. For a source data line, this is the size of the buffer to
167      * which data can be written. For a target data line, it is the size of
168      * the buffer from which data can be read. Note that
169      * the units used are bytes, but will always correspond to an integral
170      * number of sample frames of audio data.
171      *
172      * @return the size of the buffer in bytes
173      */

174     public int getBufferSize();
175     
176     /**
177      * Obtains the number of bytes of data currently available to the
178      * application for processing in the data line's internal buffer. For a
179      * source data line, this is the amount of data that can be written to the
180      * buffer without blocking. For a target data line, this is the amount of data
181      * available to be read by the application. For a clip, this value is always
182      * 0 because the audio data is loaded into the buffer when the clip is opened,
183      * and persists without modification until the clip is closed.
184      * <p>
185      * Note that the units used are bytes, but will always
186      * correspond to an integral number of sample frames of audio data.
187      * <p>
188      * An application is guaranteed that a read or
189      * write operation of up to the number of bytes returned from
190      * <code>available()</code> will not block; however, there is no guarantee
191      * that attempts to read or write more data will block.
192      *
193      * @return the amount of data available, in bytes
194      */

195     public int available();
196     
197     /**
198      * Obtains the current position in the audio data, in sample frames.
199      * The frame position measures the number of sample
200      * frames captured by, or rendered from, the line since it was opened.
201      * This return value will wrap around after 2^31 frames. It is recommended
202      * to use <code>getLongFramePosition</code> instead.
203      *
204      * @return the number of frames already processed since the line was opened
205      * @see #getLongFramePosition()
206      */

207     public int getFramePosition();
208     
209
210     /**
211      * Obtains the current position in the audio data, in sample frames.
212      * The frame position measures the number of sample
213      * frames captured by, or rendered from, the line since it was opened.
214      *
215      * @return the number of frames already processed since the line was opened
216      * @since 1.5
217      */

218     public long getLongFramePosition();
219  
220
221     /**
222      * Obtains the current position in the audio data, in microseconds.
223      * The microsecond position measures the time corresponding to the number
224      * of sample frames captured by, or rendered from, the line since it was opened.
225      * The level of precision is not guaranteed. For example, an implementation
226      * might calculate the microsecond position from the current frame position
227      * and the audio sample frame rate. The precision in microseconds would
228      * then be limited to the number of microseconds per sample frame.
229      *
230      * @return the number of microseconds of data processed since the line was opened
231      */

232     public long getMicrosecondPosition();
233     
234     /**
235      * Obtains the current volume level for the line. This level is a measure
236      * of the signal's current amplitude, and should not be confused with the
237      * current setting of a gain control. The range is from 0.0 (silence) to
238      * 1.0 (maximum possible amplitude for the sound waveform). The units
239      * measure linear amplitude, not decibels.
240      *
241      * @return the current amplitude of the signal in this line, or
242      * <code>{@link AudioSystem#NOT_SPECIFIED}</code>
243      */

244     public float getLevel();
245     
246     /**
247      * Besides the class information inherited from its superclass,
248      * <code>DataLine.Info</code> provides additional information specific to data lines.
249      * This information includes:
250      * <ul>
251      * <li> the audio formats supported by the data line
252      * <li> the minimum and maximum sizes of its internal buffer
253      * </ul>
254      * Because a <code>Line.Info</code> knows the class of the line its describes, a
255      * <code>DataLine.Info</code> object can describe <code>DataLine</code>
256      * subinterfaces such as <code>{@link SourceDataLine}</code>,
257      * <code>{@link TargetDataLine}</code>, and <code>{@link Clip}</code>.
258      * You can query a mixer for lines of any of these types, passing an appropriate
259      * instance of <code>DataLine.Info</code> as the argument to a method such as
260      * <code>{@link Mixer#getLine Mixer.getLine(Line.Info)}</code>.
261      *
262      * @see Line.Info
263      * @author Kara Kytle
264      * @version 1.33, 04/07/14
265      * @since 1.3
266      */

267     public static class Info extends Line.Info JavaDoc {
268     
269     private AudioFormat JavaDoc[] formats;
270     private int minBufferSize;
271     private int maxBufferSize;
272     
273     /**
274      * Constructs a data line's info object from the specified information,
275      * which includes a set of supported audio formats and a range for the buffer size.
276      * This constructor is typically used by mixer implementations
277      * when returning information about a supported line.
278      *
279      * @param lineClass the class of the data line described by the info object
280      * @param formats set of formats supported
281      * @param minBufferSize minimum buffer size supported by the data line, in bytes
282      * @param maxBufferSize maximum buffer size supported by the data line, in bytes
283      */

284     public Info(Class JavaDoc<?> lineClass, AudioFormat JavaDoc[] formats, int minBufferSize, int maxBufferSize) {
285         
286         super(lineClass);
287         
288         if (formats == null) {
289         this.formats = new AudioFormat JavaDoc[0];
290         } else {
291         this.formats = formats;
292         }
293         
294         this.minBufferSize = minBufferSize;
295         this.maxBufferSize = maxBufferSize;
296     }
297     
298     
299     /**
300      * Constructs a data line's info object from the specified information,
301      * which includes a single audio format and a desired buffer size.
302      * This constructor is typically used by an application to
303      * describe a desired line.
304      *
305      * @param lineClass the class of the data line described by the info object
306      * @param format desired format
307      * @param bufferSize desired buffer size in bytes
308      */

309     public Info(Class JavaDoc<?> lineClass, AudioFormat JavaDoc format, int bufferSize) {
310         
311         super(lineClass);
312         
313         if (format == null) {
314         this.formats = new AudioFormat JavaDoc[0];
315         } else {
316         AudioFormat JavaDoc[] formats = { format };
317         this.formats = formats;
318         }
319         
320         this.minBufferSize = bufferSize;
321         this.maxBufferSize = bufferSize;
322     }
323     
324     
325     /**
326      * Constructs a data line's info object from the specified information,
327      * which includes a single audio format.
328      * This constructor is typically used by an application to
329      * describe a desired line.
330      *
331      * @param lineClass the class of the data line described by the info object
332      * @param format desired format
333      */

334     public Info(Class JavaDoc<?> lineClass, AudioFormat JavaDoc format) {
335         this(lineClass, format, AudioSystem.NOT_SPECIFIED);
336     }
337     
338     
339     /**
340      * Obtains a set of audio formats supported by the data line.
341      * Note that <code>isFormatSupported(AudioFormat)</code> might return
342      * <code>true</code> for certain additional formats that are missing from
343      * the set returned by <code>getFormats()</code>. The reverse is not
344      * the case: <code>isFormatSupported(AudioFormat)</code> is guaranteed to return
345      * <code>true</code> for all formats returned by <code>getFormats()</code>.
346      *
347      * Some fields in the AudioFormat instances can be set to
348      * {@link javax.sound.sampled.AudioSystem#NOT_SPECIFIED NOT_SPECIFIED}
349      * if that field does not apply to the format,
350      * or if the format supports a wide range of values for that field.
351      * For example, a multi-channel device supporting up to
352      * 64 channels, could set the channel field in the
353      * <code>AudioFormat</code> instances returned by this
354      * method to <code>NOT_SPECIFIED</code>.
355      *
356      * @return a set of supported audio formats.
357      * @see #isFormatSupported(AudioFormat)
358      */

359     public AudioFormat JavaDoc[] getFormats() {
360         
361         AudioFormat JavaDoc[] returnedArray = new AudioFormat JavaDoc[formats.length];
362         System.arraycopy(formats, 0, returnedArray, 0, formats.length);
363         return returnedArray;
364     }
365     
366     /**
367      * Indicates whether this data line supports a particular audio format.
368      * The default implementation of this method simply returns <code>true</code> if
369      * the specified format matches any of the supported formats.
370      *
371      * @param format the audio format for which support is queried.
372      * @return <code>true</code> if the format is supported, otherwise <code>false</code>
373      * @see #getFormats
374      * @see AudioFormat#matches
375      */

376     public boolean isFormatSupported(AudioFormat JavaDoc format) {
377         
378         for (int i = 0; i < formats.length; i++) {
379         if (format.matches(formats[i])) {
380             return true;
381         }
382         }
383         
384         return false;
385     }
386     
387     /**
388      * Obtains the minimum buffer size supported by the data line.
389      * @return minimum buffer size in bytes, or <code>AudioSystem.NOT_SPECIFIED</code>
390      */

391     public int getMinBufferSize() {
392         return minBufferSize;
393     }
394     
395     
396     /**
397      * Obtains the maximum buffer size supported by the data line.
398      * @return maximum buffer size in bytes, or <code>AudioSystem.NOT_SPECIFIED</code>
399      */

400     public int getMaxBufferSize() {
401         return maxBufferSize;
402     }
403     
404     
405     /**
406      * Determines whether the specified info object matches this one.
407      * To match, the superclass match requirements must be met. In
408      * addition, this object's minimum buffer size must be at least as
409      * large as that of the object specified, its maximum buffer size must
410      * be at most as large as that of the object specified, and all of its
411      * formats must match formats supported by the object specified.
412      * @return <code>true</code> if this object matches the one specified,
413      * otherwise <code>false</code>.
414      */

415     public boolean matches(Line.Info JavaDoc info) {
416         
417         if (! (super.matches(info)) ) {
418         return false;
419         }
420         
421         Info dataLineInfo = (Info)info;
422
423         // treat anything < 0 as NOT_SPECIFIED
424
// demo code in old Java Sound Demo used a wrong buffer calculation
425
// that would lead to arbitrary negative values
426
if ((getMaxBufferSize() >= 0) && (dataLineInfo.getMaxBufferSize() >= 0)) {
427         if (getMaxBufferSize() > dataLineInfo.getMaxBufferSize()) {
428             return false;
429         }
430         }
431         
432         if ((getMinBufferSize() >= 0) && (dataLineInfo.getMinBufferSize() >= 0)) {
433         if (getMinBufferSize() < dataLineInfo.getMinBufferSize()) {
434             return false;
435         }
436         }
437         
438         AudioFormat JavaDoc[] localFormats = getFormats();
439         
440         if (localFormats != null) {
441         
442         for (int i = 0; i < localFormats.length; i++) {
443             if (! (localFormats[i] == null) ) {
444             if (! (dataLineInfo.isFormatSupported(localFormats[i])) ) {
445                 return false;
446             }
447             }
448         }
449         }
450         
451         return true;
452     }
453     
454     /**
455      * Obtains a textual description of the data line info.
456      * @return a string description
457      */

458     public String JavaDoc toString() {
459         
460         StringBuffer JavaDoc buf = new StringBuffer JavaDoc();
461         
462         if ( (formats.length == 1) && (formats[0] != null) ) {
463         buf.append(" supporting format " + formats[0]);
464         } else if (getFormats().length > 1) {
465         buf.append(" supporting " + getFormats().length + " audio formats");
466         }
467         
468         if ( (minBufferSize != AudioSystem.NOT_SPECIFIED) && (maxBufferSize != AudioSystem.NOT_SPECIFIED) ) {
469         buf.append(", and buffers of " + minBufferSize + " to " + maxBufferSize + " bytes");
470         } else if ( (minBufferSize != AudioSystem.NOT_SPECIFIED) && (minBufferSize > 0) ) {
471         buf.append(", and buffers of at least " + minBufferSize + " bytes");
472         } else if (maxBufferSize != AudioSystem.NOT_SPECIFIED) {
473         buf.append(", and buffers of up to " + minBufferSize + " bytes");
474         }
475         
476         return new String JavaDoc(super.toString() + buf);
477     }
478     } // class Info
479

480 } // interface DataLine
481
Popular Tags