Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / media / base / android / webaudio_media_codec_bridge.cc
blob12861d67ce12e98f026443133e1e513b7ca34d57
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/base/android/webaudio_media_codec_bridge.h"
7 #include <errno.h>
8 #include <fcntl.h>
9 #include <sys/stat.h>
10 #include <sys/types.h>
11 #include <unistd.h>
12 #include <vector>
14 #include "base/android/jni_android.h"
15 #include "base/android/jni_array.h"
16 #include "base/android/jni_string.h"
17 #include "base/basictypes.h"
18 #include "base/files/scoped_file.h"
19 #include "base/logging.h"
20 #include "base/posix/eintr_wrapper.h"
21 #include "base/stl_util.h"
22 #include "jni/WebAudioMediaCodecBridge_jni.h"
23 #include "media/base/android/webaudio_media_codec_info.h"
26 using base::android::AttachCurrentThread;
28 namespace media {
30 void WebAudioMediaCodecBridge::RunWebAudioMediaCodec(
31 base::SharedMemoryHandle encoded_audio_handle,
32 base::FileDescriptor pcm_output,
33 uint32_t data_size) {
34 WebAudioMediaCodecBridge bridge(encoded_audio_handle, pcm_output, data_size);
36 bridge.DecodeInMemoryAudioFile();
39 WebAudioMediaCodecBridge::WebAudioMediaCodecBridge(
40 base::SharedMemoryHandle encoded_audio_handle,
41 base::FileDescriptor pcm_output,
42 uint32_t data_size)
43 : encoded_audio_handle_(encoded_audio_handle),
44 pcm_output_(pcm_output.fd),
45 data_size_(data_size) {
46 DVLOG(1) << "WebAudioMediaCodecBridge start **********************"
47 << " output fd = " << pcm_output.fd;
50 WebAudioMediaCodecBridge::~WebAudioMediaCodecBridge() {
51 if (close(pcm_output_)) {
52 DVLOG(1) << "Couldn't close output fd " << pcm_output_
53 << ": " << strerror(errno);
57 int WebAudioMediaCodecBridge::SaveEncodedAudioToFile(
58 JNIEnv* env,
59 jobject context) {
60 // Create a temporary file where we can save the encoded audio data.
61 std::string temporaryFile =
62 base::android::ConvertJavaStringToUTF8(
63 env,
64 Java_WebAudioMediaCodecBridge_createTempFile(env, context).obj());
66 // Open the file and unlink it, so that it will be actually removed
67 // when we close the file.
68 base::ScopedFD fd(open(temporaryFile.c_str(), O_RDWR));
69 if (unlink(temporaryFile.c_str())) {
70 VLOG(0) << "Couldn't unlink temp file " << temporaryFile
71 << ": " << strerror(errno);
74 if (!fd.is_valid()) {
75 return -1;
78 // Create a local mapping of the shared memory containing the
79 // encoded audio data, and save the contents to the temporary file.
80 base::SharedMemory encoded_data(encoded_audio_handle_, true);
82 if (!encoded_data.Map(data_size_)) {
83 VLOG(0) << "Unable to map shared memory!";
84 return -1;
87 if (static_cast<uint32_t>(write(fd.get(), encoded_data.memory(), data_size_))
88 != data_size_) {
89 VLOG(0) << "Failed to write all audio data to temp file!";
90 return -1;
93 lseek(fd.get(), 0, SEEK_SET);
95 return fd.release();
98 bool WebAudioMediaCodecBridge::DecodeInMemoryAudioFile() {
99 JNIEnv* env = AttachCurrentThread();
100 CHECK(env);
102 jobject context = base::android::GetApplicationContext();
104 int sourceFd = SaveEncodedAudioToFile(env, context);
106 if (sourceFd < 0)
107 return false;
109 jboolean decoded = Java_WebAudioMediaCodecBridge_decodeAudioFile(
110 env,
111 context,
112 reinterpret_cast<intptr_t>(this),
113 sourceFd,
114 data_size_);
116 close(sourceFd);
118 DVLOG(1) << "decoded = " << (decoded ? "true" : "false");
120 return decoded;
123 void WebAudioMediaCodecBridge::InitializeDestination(
124 JNIEnv* env,
125 jobject /*java object*/,
126 jint channel_count,
127 jint sample_rate,
128 jlong duration_microsec) {
129 // Send information about this audio file: number of channels,
130 // sample rate (Hz), and the number of frames.
131 struct WebAudioMediaCodecInfo info = {
132 static_cast<unsigned long>(channel_count),
133 static_cast<unsigned long>(sample_rate),
134 // The number of frames is the duration of the file
135 // (in microseconds) times the sample rate.
136 static_cast<unsigned long>(
137 0.5 + (duration_microsec * 0.000001 *
138 sample_rate))
141 DVLOG(1) << "InitializeDestination:"
142 << " channel count = " << channel_count
143 << " rate = " << sample_rate
144 << " duration = " << duration_microsec << " microsec";
146 HANDLE_EINTR(write(pcm_output_, &info, sizeof(info)));
149 void WebAudioMediaCodecBridge::OnChunkDecoded(
150 JNIEnv* env,
151 jobject /*java object*/,
152 jobject buf,
153 jint buf_size,
154 jint input_channel_count,
155 jint output_channel_count) {
157 if (buf_size <= 0 || !buf)
158 return;
160 int8_t* buffer =
161 static_cast<int8_t*>(env->GetDirectBufferAddress(buf));
162 size_t count = static_cast<size_t>(buf_size);
163 std::vector<int16_t> decoded_data;
165 if (input_channel_count == 1 && output_channel_count == 2) {
166 // See crbug.com/266006. The file has one channel, but the
167 // decoder decided to return two channels. To be consistent with
168 // the number of channels in the file, only send one channel (the
169 // first).
170 int16_t* data = static_cast<int16_t*>(env->GetDirectBufferAddress(buf));
171 int frame_count = buf_size / sizeof(*data) / 2;
173 decoded_data.resize(frame_count);
174 for (int k = 0; k < frame_count; ++k) {
175 decoded_data[k] = *data;
176 data += 2;
178 buffer = reinterpret_cast<int8_t*>(vector_as_array(&decoded_data));
179 DCHECK(buffer);
180 count = frame_count * sizeof(*data);
183 // Write out the data to the pipe in small chunks if necessary.
184 while (count > 0) {
185 int bytes_to_write = (count >= PIPE_BUF) ? PIPE_BUF : count;
186 ssize_t bytes_written = HANDLE_EINTR(write(pcm_output_,
187 buffer,
188 bytes_to_write));
189 if (bytes_written == -1)
190 break;
191 count -= bytes_written;
192 buffer += bytes_written;
196 bool WebAudioMediaCodecBridge::RegisterWebAudioMediaCodecBridge(JNIEnv* env) {
197 return RegisterNativesImpl(env);
200 } // namespace