Allow overlapping sync and async startup requests
[chromium-blink-merge.git] / media / base / android / webaudio_media_codec_bridge.cc
blob94f059a9a1e82f99a47e91ddfcdd768b8c0c4ec7
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/base/android/webaudio_media_codec_bridge.h"
7 #include <errno.h>
8 #include <fcntl.h>
9 #include <sys/stat.h>
10 #include <sys/types.h>
11 #include <unistd.h>
12 #include <vector>
14 #include "base/android/jni_android.h"
15 #include "base/android/jni_array.h"
16 #include "base/android/jni_string.h"
17 #include "base/basictypes.h"
18 #include "base/logging.h"
19 #include "base/posix/eintr_wrapper.h"
20 #include "base/stl_util.h"
21 #include "jni/WebAudioMediaCodecBridge_jni.h"
22 #include "media/base/android/webaudio_media_codec_info.h"
25 using base::android::AttachCurrentThread;
27 namespace media {
29 void WebAudioMediaCodecBridge::RunWebAudioMediaCodec(
30 base::SharedMemoryHandle encoded_audio_handle,
31 base::FileDescriptor pcm_output,
32 uint32_t data_size) {
33 WebAudioMediaCodecBridge bridge(encoded_audio_handle, pcm_output, data_size);
35 bridge.DecodeInMemoryAudioFile();
38 WebAudioMediaCodecBridge::WebAudioMediaCodecBridge(
39 base::SharedMemoryHandle encoded_audio_handle,
40 base::FileDescriptor pcm_output,
41 uint32_t data_size)
42 : encoded_audio_handle_(encoded_audio_handle),
43 pcm_output_(pcm_output.fd),
44 data_size_(data_size) {
45 DVLOG(1) << "WebAudioMediaCodecBridge start **********************"
46 << " output fd = " << pcm_output.fd;
49 WebAudioMediaCodecBridge::~WebAudioMediaCodecBridge() {
50 if (close(pcm_output_)) {
51 DVLOG(1) << "Couldn't close output fd " << pcm_output_
52 << ": " << strerror(errno);
56 int WebAudioMediaCodecBridge::SaveEncodedAudioToFile(
57 JNIEnv* env,
58 jobject context) {
59 // Create a temporary file where we can save the encoded audio data.
60 std::string temporaryFile =
61 base::android::ConvertJavaStringToUTF8(
62 env,
63 Java_WebAudioMediaCodecBridge_CreateTempFile(env, context).obj());
65 // Open the file and unlink it, so that it will be actually removed
66 // when we close the file.
67 int fd = open(temporaryFile.c_str(), O_RDWR);
68 if (unlink(temporaryFile.c_str())) {
69 VLOG(0) << "Couldn't unlink temp file " << temporaryFile
70 << ": " << strerror(errno);
73 if (fd < 0) {
74 return -1;
77 // Create a local mapping of the shared memory containing the
78 // encoded audio data, and save the contents to the temporary file.
79 base::SharedMemory encoded_data(encoded_audio_handle_, true);
81 if (!encoded_data.Map(data_size_)) {
82 VLOG(0) << "Unable to map shared memory!";
83 return -1;
86 if (static_cast<uint32_t>(write(fd, encoded_data.memory(), data_size_))
87 != data_size_) {
88 VLOG(0) << "Failed to write all audio data to temp file!";
89 return -1;
92 lseek(fd, 0, SEEK_SET);
94 return fd;
97 bool WebAudioMediaCodecBridge::DecodeInMemoryAudioFile() {
98 JNIEnv* env = AttachCurrentThread();
99 CHECK(env);
101 jobject context = base::android::GetApplicationContext();
103 int sourceFd = SaveEncodedAudioToFile(env, context);
105 if (sourceFd < 0)
106 return false;
108 jboolean decoded = Java_WebAudioMediaCodecBridge_decodeAudioFile(
109 env,
110 context,
111 reinterpret_cast<intptr_t>(this),
112 sourceFd,
113 data_size_);
115 close(sourceFd);
117 DVLOG(1) << "decoded = " << (decoded ? "true" : "false");
119 return decoded;
122 void WebAudioMediaCodecBridge::InitializeDestination(
123 JNIEnv* env,
124 jobject /*java object*/,
125 jint channel_count,
126 jint sample_rate,
127 jlong duration_microsec) {
128 // Send information about this audio file: number of channels,
129 // sample rate (Hz), and the number of frames.
130 struct WebAudioMediaCodecInfo info = {
131 static_cast<unsigned long>(channel_count),
132 static_cast<unsigned long>(sample_rate),
133 // The number of frames is the duration of the file
134 // (in microseconds) times the sample rate.
135 static_cast<unsigned long>(
136 0.5 + (duration_microsec * 0.000001 *
137 sample_rate))
140 DVLOG(1) << "InitializeDestination:"
141 << " channel count = " << channel_count
142 << " rate = " << sample_rate
143 << " duration = " << duration_microsec << " microsec";
145 HANDLE_EINTR(write(pcm_output_, &info, sizeof(info)));
148 void WebAudioMediaCodecBridge::OnChunkDecoded(
149 JNIEnv* env,
150 jobject /*java object*/,
151 jobject buf,
152 jint buf_size,
153 jint input_channel_count,
154 jint output_channel_count) {
156 if (buf_size <= 0 || !buf)
157 return;
159 int8_t* buffer =
160 static_cast<int8_t*>(env->GetDirectBufferAddress(buf));
161 size_t count = static_cast<size_t>(buf_size);
162 std::vector<int16_t> decoded_data;
164 if (input_channel_count == 1 && output_channel_count == 2) {
165 // See crbug.com/266006. The file has one channel, but the
166 // decoder decided to return two channels. To be consistent with
167 // the number of channels in the file, only send one channel (the
168 // first).
169 int16_t* data = static_cast<int16_t*>(env->GetDirectBufferAddress(buf));
170 int frame_count = buf_size / sizeof(*data) / 2;
172 decoded_data.resize(frame_count);
173 for (int k = 0; k < frame_count; ++k) {
174 decoded_data[k] = *data;
175 data += 2;
177 buffer = reinterpret_cast<int8_t*>(vector_as_array(&decoded_data));
178 DCHECK(buffer);
179 count = frame_count * sizeof(*data);
182 // Write out the data to the pipe in small chunks if necessary.
183 while (count > 0) {
184 int bytes_to_write = (count >= PIPE_BUF) ? PIPE_BUF : count;
185 ssize_t bytes_written = HANDLE_EINTR(write(pcm_output_,
186 buffer,
187 bytes_to_write));
188 if (bytes_written == -1)
189 break;
190 count -= bytes_written;
191 buffer += bytes_written;
195 bool WebAudioMediaCodecBridge::RegisterWebAudioMediaCodecBridge(JNIEnv* env) {
196 return RegisterNativesImpl(env);
199 } // namespace