Add AudioFormat.getBytesPerSample and use it

Change-Id: I90a40987ea99006af4a6e31136078c04976eb54d
diff --git a/core/java/android/speech/tts/BlockingAudioTrack.java b/core/java/android/speech/tts/BlockingAudioTrack.java
index 186cb49..92bb0ac 100644
--- a/core/java/android/speech/tts/BlockingAudioTrack.java
+++ b/core/java/android/speech/tts/BlockingAudioTrack.java
@@ -83,7 +83,7 @@
         mVolume = volume;
         mPan = pan;
 
-        mBytesPerFrame = getBytesPerFrame(mAudioFormat) * mChannelCount;
+        mBytesPerFrame = AudioFormat.getBytesPerSample(mAudioFormat) * mChannelCount;
         mIsShortUtterance = false;
         mAudioBufferSize = 0;
         mBytesWritten = 0;
@@ -229,17 +229,6 @@
         return audioTrack;
     }
 
-    private static int getBytesPerFrame(int audioFormat) {
-        if (audioFormat == AudioFormat.ENCODING_PCM_8BIT) {
-            return 1;
-        } else if (audioFormat == AudioFormat.ENCODING_PCM_16BIT) {
-            return 2;
-        }
-
-        return -1;
-    }
-
-
     private void blockUntilDone(AudioTrack audioTrack) {
         if (mBytesWritten <= 0) {
             return;
diff --git a/core/java/android/speech/tts/FileSynthesisCallback.java b/core/java/android/speech/tts/FileSynthesisCallback.java
index 717aeb6..d84f7f0 100644
--- a/core/java/android/speech/tts/FileSynthesisCallback.java
+++ b/core/java/android/speech/tts/FileSynthesisCallback.java
@@ -278,8 +278,7 @@
 
     private ByteBuffer makeWavHeader(int sampleRateInHz, int audioFormat, int channelCount,
             int dataLength) {
-        // TODO: is AudioFormat.ENCODING_DEFAULT always the same as ENCODING_PCM_16BIT?
-        int sampleSizeInBytes = (audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
+        int sampleSizeInBytes = AudioFormat.getBytesPerSample(audioFormat);
         int byteRate = sampleRateInHz * sampleSizeInBytes * channelCount;
         short blockAlign = (short) (sampleSizeInBytes * channelCount);
         short bitsPerSample = (short) (sampleSizeInBytes * 8);
diff --git a/media/java/android/media/AudioFormat.java b/media/java/android/media/AudioFormat.java
index ad0d459..6b524b5a 100644
--- a/media/java/android/media/AudioFormat.java
+++ b/media/java/android/media/AudioFormat.java
@@ -139,4 +139,19 @@
     public static final int CHANNEL_IN_FRONT_BACK = CHANNEL_IN_FRONT | CHANNEL_IN_BACK;
     // CHANNEL_IN_ALL is not yet defined; if added then it should match AUDIO_CHANNEL_IN_ALL
 
+    /** @hide */
+    public static int getBytesPerSample(int audioFormat)
+    {
+        switch (audioFormat) {
+        case ENCODING_PCM_8BIT:
+            return 1;
+        case ENCODING_PCM_16BIT:
+        case ENCODING_DEFAULT:
+            return 2;
+        case ENCODING_INVALID:
+        default:
+            throw new IllegalArgumentException("Bad audio format " + audioFormat);
+        }
+    }
+
 }
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index a4891f8..384e120 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -319,7 +319,7 @@
         // NB: this section is only valid with PCM data.
         // To update when supporting compressed formats
         int frameSizeInBytes = mChannelCount
-            * (mAudioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
+            * (AudioFormat.getBytesPerSample(mAudioFormat));
         if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
             throw new IllegalArgumentException("Invalid audio buffer size.");
         }
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index 17840f2..1899685 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -518,7 +518,7 @@
         // NB: this section is only valid with PCM data.
         //     To update when supporting compressed formats
         int frameSizeInBytes = mChannelCount
-                * (mAudioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
+                * (AudioFormat.getBytesPerSample(mAudioFormat));
         if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
             throw new IllegalArgumentException("Invalid audio buffer size.");
         }
diff --git a/media/java/android/media/JetPlayer.java b/media/java/android/media/JetPlayer.java
index bd91fc5..7735e78 100644
--- a/media/java/android/media/JetPlayer.java
+++ b/media/java/android/media/JetPlayer.java
@@ -169,9 +169,11 @@
                             
             native_setup(new WeakReference<JetPlayer>(this),
                     JetPlayer.getMaxTracks(),
-                    // bytes to frame conversion: sample format is ENCODING_PCM_16BIT, 2 channels
+                    // bytes to frame conversion:
                     // 1200 == minimum buffer size in frames on generation 1 hardware
-                    Math.max(1200, buffSizeInBytes / 4));
+                    Math.max(1200, buffSizeInBytes /
+                            (AudioFormat.getBytesPerSample(AudioFormat.ENCODING_PCM_16BIT) *
+                            2 /*channels*/)));
         }
     }