1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // For loading files, we make use of overlapped i/o to ensure that reading from
6 // the filesystem (e.g., a network filesystem) does not block the calling
7 // thread. An alternative approach would be to use a background thread or pool
8 // of threads, but it seems better to leverage the operating system's ability
9 // to do background file reads for us.
11 // Since overlapped reads require a 'static' buffer for the duration of the
12 // asynchronous read, the URLRequestFileJob keeps a buffer as a member var. In
13 // URLRequestFileJob::Read, data is simply copied from the object's buffer into
14 // the given buffer. If there is no data to copy, the URLRequestFileJob
15 // attempts to read more from the file to fill its buffer. If reading from the
16 // file does not complete synchronously, then the URLRequestFileJob waits for a
17 // signal from the OS that the overlapped read has completed. It does so by
18 // leveraging the MessageLoop::WatchObject API.
20 #include "net/url_request/url_request_file_job.h"
22 #include "base/bind.h"
23 #include "base/compiler_specific.h"
24 #include "base/files/file_util.h"
25 #include "base/message_loop/message_loop.h"
26 #include "base/profiler/scoped_tracker.h"
27 #include "base/strings/string_util.h"
28 #include "base/synchronization/lock.h"
29 #include "base/task_runner.h"
30 #include "base/threading/thread_restrictions.h"
31 #include "build/build_config.h"
32 #include "net/base/file_stream.h"
33 #include "net/base/filename_util.h"
34 #include "net/base/io_buffer.h"
35 #include "net/base/load_flags.h"
36 #include "net/base/mime_util.h"
37 #include "net/base/net_errors.h"
38 #include "net/filter/filter.h"
39 #include "net/http/http_util.h"
40 #include "net/url_request/url_request_error_job.h"
41 #include "net/url_request/url_request_file_dir_job.h"
45 #include "base/win/shortcut.h"
50 URLRequestFileJob::FileMetaInfo::FileMetaInfo()
52 mime_type_result(false),
57 URLRequestFileJob::URLRequestFileJob(
59 NetworkDelegate
* network_delegate
,
60 const base::FilePath
& file_path
,
61 const scoped_refptr
<base::TaskRunner
>& file_task_runner
)
62 : URLRequestJob(request
, network_delegate
),
63 file_path_(file_path
),
64 stream_(new FileStream(file_task_runner
)),
65 file_task_runner_(file_task_runner
),
67 weak_ptr_factory_(this) {}
69 void URLRequestFileJob::Start() {
70 FileMetaInfo
* meta_info
= new FileMetaInfo();
71 file_task_runner_
->PostTaskAndReply(
73 base::Bind(&URLRequestFileJob::FetchMetaInfo
, file_path_
,
74 base::Unretained(meta_info
)),
75 base::Bind(&URLRequestFileJob::DidFetchMetaInfo
,
76 weak_ptr_factory_
.GetWeakPtr(),
77 base::Owned(meta_info
)));
80 void URLRequestFileJob::Kill() {
82 weak_ptr_factory_
.InvalidateWeakPtrs();
84 URLRequestJob::Kill();
87 bool URLRequestFileJob::ReadRawData(IOBuffer
* dest
,
90 // TODO(vadimt): Remove ScopedTracker below once crbug.com/423948 is fixed.
91 tracked_objects::ScopedTracker
tracking_profile(
92 FROM_HERE_WITH_EXPLICIT_FUNCTION(
93 "423948 URLRequestFileJob::ReadRawData"));
95 DCHECK_NE(dest_size
, 0);
97 DCHECK_GE(remaining_bytes_
, 0);
99 if (remaining_bytes_
< dest_size
)
100 dest_size
= static_cast<int>(remaining_bytes_
);
102 // If we should copy zero bytes because |remaining_bytes_| is zero, short
109 int rv
= stream_
->Read(dest
,
111 base::Bind(&URLRequestFileJob::DidRead
,
112 weak_ptr_factory_
.GetWeakPtr(),
113 make_scoped_refptr(dest
)));
115 // TODO(vadimt): Remove ScopedTracker below once crbug.com/423948 is fixed.
116 tracked_objects::ScopedTracker
tracking_profile1(
117 FROM_HERE_WITH_EXPLICIT_FUNCTION(
118 "423948 URLRequestFileJob::ReadRawData1"));
121 // Data is immediately available.
123 remaining_bytes_
-= rv
;
124 DCHECK_GE(remaining_bytes_
, 0);
128 // Otherwise, a read error occured. We may just need to wait...
129 if (rv
== ERR_IO_PENDING
) {
130 SetStatus(URLRequestStatus(URLRequestStatus::IO_PENDING
, 0));
132 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED
, rv
));
137 bool URLRequestFileJob::IsRedirectResponse(GURL
* location
,
138 int* http_status_code
) {
139 if (meta_info_
.is_directory
) {
140 // This happens when we discovered the file is a directory, so needs a
141 // slash at the end of the path.
142 std::string new_path
= request_
->url().path();
143 new_path
.push_back('/');
144 GURL::Replacements replacements
;
145 replacements
.SetPathStr(new_path
);
147 *location
= request_
->url().ReplaceComponents(replacements
);
148 *http_status_code
= 301; // simulate a permanent redirect
153 // Follow a Windows shortcut.
154 // We just resolve .lnk file, ignore others.
155 if (!LowerCaseEqualsASCII(file_path_
.Extension(), ".lnk"))
158 base::FilePath new_path
= file_path_
;
160 resolved
= base::win::ResolveShortcut(new_path
, &new_path
, NULL
);
162 // If shortcut is not resolved succesfully, do not redirect.
166 *location
= FilePathToFileURL(new_path
);
167 *http_status_code
= 301;
174 Filter
* URLRequestFileJob::SetupFilter() const {
175 // Bug 9936 - .svgz files needs to be decompressed.
176 return LowerCaseEqualsASCII(file_path_
.Extension(), ".svgz")
177 ? Filter::GZipFactory() : NULL
;
180 bool URLRequestFileJob::GetMimeType(std::string
* mime_type
) const {
182 if (meta_info_
.mime_type_result
) {
183 *mime_type
= meta_info_
.mime_type
;
189 void URLRequestFileJob::SetExtraRequestHeaders(
190 const HttpRequestHeaders
& headers
) {
191 std::string range_header
;
192 if (headers
.GetHeader(HttpRequestHeaders::kRange
, &range_header
)) {
193 // We only care about "Range" header here.
194 std::vector
<HttpByteRange
> ranges
;
195 if (HttpUtil::ParseRangeHeader(range_header
, &ranges
)) {
196 if (ranges
.size() == 1) {
197 byte_range_
= ranges
[0];
199 // We don't support multiple range requests in one single URL request,
200 // because we need to do multipart encoding here.
201 // TODO(hclam): decide whether we want to support multiple range
203 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED
,
204 ERR_REQUEST_RANGE_NOT_SATISFIABLE
));
210 void URLRequestFileJob::OnSeekComplete(int64 result
) {
213 void URLRequestFileJob::OnReadComplete(net::IOBuffer
* buf
, int result
) {
216 URLRequestFileJob::~URLRequestFileJob() {
219 void URLRequestFileJob::FetchMetaInfo(const base::FilePath
& file_path
,
220 FileMetaInfo
* meta_info
) {
221 base::File::Info file_info
;
222 meta_info
->file_exists
= base::GetFileInfo(file_path
, &file_info
);
223 if (meta_info
->file_exists
) {
224 meta_info
->file_size
= file_info
.size
;
225 meta_info
->is_directory
= file_info
.is_directory
;
227 // On Windows GetMimeTypeFromFile() goes to the registry. Thus it should be
228 // done in WorkerPool.
229 meta_info
->mime_type_result
= GetMimeTypeFromFile(file_path
,
230 &meta_info
->mime_type
);
233 void URLRequestFileJob::DidFetchMetaInfo(const FileMetaInfo
* meta_info
) {
234 meta_info_
= *meta_info
;
236 // We use URLRequestFileJob to handle files as well as directories without
238 // If a directory does not exist, we return ERR_FILE_NOT_FOUND. Otherwise,
239 // we will append trailing slash and redirect to FileDirJob.
240 // A special case is "\" on Windows. We should resolve as invalid.
241 // However, Windows resolves "\" to "C:\", thus reports it as existent.
242 // So what happens is we append it with trailing slash and redirect it to
243 // FileDirJob where it is resolved as invalid.
244 if (!meta_info_
.file_exists
) {
245 DidOpen(ERR_FILE_NOT_FOUND
);
248 if (meta_info_
.is_directory
) {
253 int flags
= base::File::FLAG_OPEN
|
254 base::File::FLAG_READ
|
255 base::File::FLAG_ASYNC
;
256 int rv
= stream_
->Open(file_path_
, flags
,
257 base::Bind(&URLRequestFileJob::DidOpen
,
258 weak_ptr_factory_
.GetWeakPtr()));
259 if (rv
!= ERR_IO_PENDING
)
263 void URLRequestFileJob::DidOpen(int result
) {
264 // TODO(vadimt): Remove ScopedTracker below once crbug.com/423948 is fixed.
265 tracked_objects::ScopedTracker
tracking_profile(
266 FROM_HERE_WITH_EXPLICIT_FUNCTION("423948 URLRequestFileJob::DidOpen"));
269 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED
, result
));
273 if (!byte_range_
.ComputeBounds(meta_info_
.file_size
)) {
274 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED
,
275 ERR_REQUEST_RANGE_NOT_SATISFIABLE
));
279 remaining_bytes_
= byte_range_
.last_byte_position() -
280 byte_range_
.first_byte_position() + 1;
281 DCHECK_GE(remaining_bytes_
, 0);
283 if (remaining_bytes_
> 0 && byte_range_
.first_byte_position() != 0) {
284 // TODO(vadimt): Remove ScopedTracker below once crbug.com/423948 is fixed.
285 tracked_objects::ScopedTracker
tracking_profile1(
286 FROM_HERE_WITH_EXPLICIT_FUNCTION(
287 "423948 URLRequestFileJob::DidOpen 1"));
289 int rv
= stream_
->Seek(base::File::FROM_BEGIN
,
290 byte_range_
.first_byte_position(),
291 base::Bind(&URLRequestFileJob::DidSeek
,
292 weak_ptr_factory_
.GetWeakPtr()));
293 if (rv
!= ERR_IO_PENDING
) {
294 // stream_->Seek() failed, so pass an intentionally erroneous value
299 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek()
300 // the value that would mean seek success. This way we skip the code
301 // handling seek failure.
302 DidSeek(byte_range_
.first_byte_position());
306 void URLRequestFileJob::DidSeek(int64 result
) {
307 OnSeekComplete(result
);
308 if (result
!= byte_range_
.first_byte_position()) {
309 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED
,
310 ERR_REQUEST_RANGE_NOT_SATISFIABLE
));
314 set_expected_content_size(remaining_bytes_
);
315 NotifyHeadersComplete();
318 void URLRequestFileJob::DidRead(scoped_refptr
<net::IOBuffer
> buf
, int result
) {
320 SetStatus(URLRequestStatus()); // Clear the IO_PENDING status
321 remaining_bytes_
-= result
;
322 DCHECK_GE(remaining_bytes_
, 0);
325 OnReadComplete(buf
.get(), result
);
329 NotifyDone(URLRequestStatus());
330 } else if (result
< 0) {
331 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED
, result
));
334 NotifyReadComplete(result
);