Disable view source for Developer Tools.
[chromium-blink-merge.git] / chrome / browser / extensions / updater / extension_downloader.cc
blobca4193c6e5fb23c8dfc34f7ac07a581e3c6a64e5
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
7 #include <utility>
9 #include "base/bind.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/memory/scoped_handle.h"
15 #include "base/metrics/histogram.h"
16 #include "base/metrics/sparse_histogram.h"
17 #include "base/platform_file.h"
18 #include "base/stl_util.h"
19 #include "base/strings/string_util.h"
20 #include "base/time/time.h"
21 #include "base/version.h"
22 #include "chrome/browser/chrome_notification_types.h"
23 #include "chrome/browser/extensions/updater/request_queue_impl.h"
24 #include "chrome/browser/extensions/updater/safe_manifest_parser.h"
25 #include "chrome/browser/metrics/metrics_service.h"
26 #include "chrome/common/chrome_switches.h"
27 #include "chrome/common/chrome_version_info.h"
28 #include "chrome/common/extensions/extension_constants.h"
29 #include "chrome/common/extensions/manifest_url_handler.h"
30 #include "content/public/browser/browser_thread.h"
31 #include "content/public/browser/notification_details.h"
32 #include "content/public/browser/notification_service.h"
33 #include "net/base/backoff_entry.h"
34 #include "net/base/load_flags.h"
35 #include "net/base/net_errors.h"
36 #include "net/url_request/url_fetcher.h"
37 #include "net/url_request/url_request_context_getter.h"
38 #include "net/url_request/url_request_status.h"
40 using base::Time;
41 using base::TimeDelta;
42 using content::BrowserThread;
44 namespace extensions {
46 const char ExtensionDownloader::kBlacklistAppID[] = "com.google.crx.blacklist";
48 namespace {
50 const net::BackoffEntry::Policy kDefaultBackoffPolicy = {
51 // Number of initial errors (in sequence) to ignore before applying
52 // exponential back-off rules.
55 // Initial delay for exponential back-off in ms.
56 2000,
58 // Factor by which the waiting time will be multiplied.
61 // Fuzzing percentage. ex: 10% will spread requests randomly
62 // between 90%-100% of the calculated time.
63 0.1,
65 // Maximum amount of time we are willing to delay our request in ms.
66 -1,
68 // Time to keep an entry from being discarded even when it
69 // has no significant state, -1 to never discard.
70 -1,
72 // Don't use initial delay unless the last request was an error.
73 false,
76 const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
77 const char kDefaultInstallSource[] = "";
79 #define RETRY_HISTOGRAM(name, retry_count, url) \
80 if ((url).DomainIs("google.com")) \
81 UMA_HISTOGRAM_CUSTOM_COUNTS( \
82 "Extensions." name "RetryCountGoogleUrl", retry_count, 1, \
83 kMaxRetries, kMaxRetries+1); \
84 else \
85 UMA_HISTOGRAM_CUSTOM_COUNTS( \
86 "Extensions." name "RetryCountOtherUrl", retry_count, 1, \
87 kMaxRetries, kMaxRetries+1)
89 bool ShouldRetryRequest(const net::URLRequestStatus& status,
90 int response_code) {
91 // Retry if the response code is a server error, or the request failed because
92 // of network errors as opposed to file errors.
93 return (response_code >= 500 && status.is_success()) ||
94 status.status() == net::URLRequestStatus::FAILED;
97 } // namespace
99 UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
100 : id(id), version(version) {}
102 UpdateDetails::~UpdateDetails() {}
104 ExtensionDownloader::ExtensionFetch::ExtensionFetch() : url() {}
106 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
107 const std::string& id,
108 const GURL& url,
109 const std::string& package_hash,
110 const std::string& version,
111 const std::set<int>& request_ids)
112 : id(id), url(url), package_hash(package_hash), version(version),
113 request_ids(request_ids) {}
115 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
117 ExtensionDownloader::ExtensionDownloader(
118 ExtensionDownloaderDelegate* delegate,
119 net::URLRequestContextGetter* request_context)
120 : delegate_(delegate),
121 request_context_(request_context),
122 weak_ptr_factory_(this),
123 manifests_queue_(&kDefaultBackoffPolicy,
124 base::Bind(&ExtensionDownloader::CreateManifestFetcher,
125 base::Unretained(this))),
126 extensions_queue_(&kDefaultBackoffPolicy,
127 base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
128 base::Unretained(this))) {
129 DCHECK(delegate_);
130 DCHECK(request_context_);
133 ExtensionDownloader::~ExtensionDownloader() {}
135 bool ExtensionDownloader::AddExtension(const Extension& extension,
136 int request_id) {
137 // Skip extensions with empty update URLs converted from user
138 // scripts.
139 if (extension.converted_from_user_script() &&
140 ManifestURL::GetUpdateURL(&extension).is_empty()) {
141 return false;
144 // If the extension updates itself from the gallery, ignore any update URL
145 // data. At the moment there is no extra data that an extension can
146 // communicate to the the gallery update servers.
147 std::string update_url_data;
148 if (!ManifestURL::UpdatesFromGallery(&extension))
149 update_url_data = delegate_->GetUpdateUrlData(extension.id());
151 return AddExtensionData(extension.id(), *extension.version(),
152 extension.GetType(),
153 ManifestURL::GetUpdateURL(&extension),
154 update_url_data, request_id);
157 bool ExtensionDownloader::AddPendingExtension(const std::string& id,
158 const GURL& update_url,
159 int request_id) {
160 // Use a zero version to ensure that a pending extension will always
161 // be updated, and thus installed (assuming all extensions have
162 // non-zero versions).
163 Version version("0.0.0.0");
164 DCHECK(version.IsValid());
166 return AddExtensionData(id,
167 version,
168 Manifest::TYPE_UNKNOWN,
169 update_url,
170 std::string(),
171 request_id);
174 void ExtensionDownloader::StartAllPending() {
175 ReportStats();
176 url_stats_ = URLStats();
178 for (FetchMap::iterator it = fetches_preparing_.begin();
179 it != fetches_preparing_.end(); ++it) {
180 std::vector<linked_ptr<ManifestFetchData> >& list = it->second;
181 for (size_t i = 0; i < list.size(); ++i) {
182 StartUpdateCheck(scoped_ptr<ManifestFetchData>(list[i].release()));
185 fetches_preparing_.clear();
188 void ExtensionDownloader::StartBlacklistUpdate(
189 const std::string& version,
190 const ManifestFetchData::PingData& ping_data,
191 int request_id) {
192 // Note: it is very important that we use the https version of the update
193 // url here to avoid DNS hijacking of the blacklist, which is not validated
194 // by a public key signature like .crx files are.
195 scoped_ptr<ManifestFetchData> blacklist_fetch(
196 new ManifestFetchData(extension_urls::GetWebstoreUpdateUrl(),
197 request_id));
198 DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
199 blacklist_fetch->AddExtension(kBlacklistAppID,
200 version,
201 &ping_data,
202 std::string(),
203 kDefaultInstallSource);
204 StartUpdateCheck(blacklist_fetch.Pass());
207 bool ExtensionDownloader::AddExtensionData(const std::string& id,
208 const Version& version,
209 Manifest::Type extension_type,
210 const GURL& extension_update_url,
211 const std::string& update_url_data,
212 int request_id) {
213 GURL update_url(extension_update_url);
214 // Skip extensions with non-empty invalid update URLs.
215 if (!update_url.is_empty() && !update_url.is_valid()) {
216 LOG(WARNING) << "Extension " << id << " has invalid update url "
217 << update_url;
218 return false;
221 // Make sure we use SSL for store-hosted extensions.
222 if (extension_urls::IsWebstoreUpdateUrl(update_url) &&
223 !update_url.SchemeIsSecure())
224 update_url = extension_urls::GetWebstoreUpdateUrl();
226 // Skip extensions with empty IDs.
227 if (id.empty()) {
228 LOG(WARNING) << "Found extension with empty ID";
229 return false;
232 if (update_url.DomainIs("google.com")) {
233 url_stats_.google_url_count++;
234 } else if (update_url.is_empty()) {
235 url_stats_.no_url_count++;
236 // Fill in default update URL.
237 update_url = extension_urls::GetWebstoreUpdateUrl();
238 } else {
239 url_stats_.other_url_count++;
242 switch (extension_type) {
243 case Manifest::TYPE_THEME:
244 ++url_stats_.theme_count;
245 break;
246 case Manifest::TYPE_EXTENSION:
247 case Manifest::TYPE_USER_SCRIPT:
248 ++url_stats_.extension_count;
249 break;
250 case Manifest::TYPE_HOSTED_APP:
251 case Manifest::TYPE_LEGACY_PACKAGED_APP:
252 ++url_stats_.app_count;
253 break;
254 case Manifest::TYPE_PLATFORM_APP:
255 ++url_stats_.platform_app_count;
256 break;
257 case Manifest::TYPE_UNKNOWN:
258 default:
259 ++url_stats_.pending_count;
260 break;
263 std::vector<GURL> update_urls;
264 update_urls.push_back(update_url);
265 // If UMA is enabled, also add to ManifestFetchData for the
266 // webstore update URL.
267 if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
268 MetricsServiceHelper::IsMetricsReportingEnabled()) {
269 update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
272 for (size_t i = 0; i < update_urls.size(); ++i) {
273 DCHECK(!update_urls[i].is_empty());
274 DCHECK(update_urls[i].is_valid());
276 std::string install_source = i == 0 ?
277 kDefaultInstallSource : kNotFromWebstoreInstallSource;
279 ManifestFetchData::PingData ping_data;
280 ManifestFetchData::PingData* optional_ping_data = NULL;
281 if (delegate_->GetPingDataForExtension(id, &ping_data))
282 optional_ping_data = &ping_data;
284 // Find or create a ManifestFetchData to add this extension to.
285 bool added = false;
286 FetchMap::iterator existing_iter = fetches_preparing_.find(
287 std::make_pair(request_id, update_urls[i]));
288 if (existing_iter != fetches_preparing_.end() &&
289 !existing_iter->second.empty()) {
290 // Try to add to the ManifestFetchData at the end of the list.
291 ManifestFetchData* existing_fetch = existing_iter->second.back().get();
292 if (existing_fetch->AddExtension(id, version.GetString(),
293 optional_ping_data, update_url_data,
294 install_source)) {
295 added = true;
298 if (!added) {
299 // Otherwise add a new element to the list, if the list doesn't exist or
300 // if its last element is already full.
301 linked_ptr<ManifestFetchData> fetch(
302 new ManifestFetchData(update_urls[i], request_id));
303 fetches_preparing_[std::make_pair(request_id, update_urls[i])].
304 push_back(fetch);
305 added = fetch->AddExtension(id, version.GetString(),
306 optional_ping_data,
307 update_url_data,
308 install_source);
309 DCHECK(added);
313 return true;
316 void ExtensionDownloader::ReportStats() const {
317 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
318 url_stats_.extension_count);
319 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
320 url_stats_.theme_count);
321 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
322 url_stats_.app_count);
323 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
324 url_stats_.platform_app_count);
325 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
326 url_stats_.pending_count);
327 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
328 url_stats_.google_url_count);
329 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
330 url_stats_.other_url_count);
331 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
332 url_stats_.no_url_count);
335 void ExtensionDownloader::StartUpdateCheck(
336 scoped_ptr<ManifestFetchData> fetch_data) {
337 const std::set<std::string>& id_set(fetch_data->extension_ids());
339 if (CommandLine::ForCurrentProcess()->HasSwitch(
340 switches::kDisableBackgroundNetworking)) {
341 NotifyExtensionsDownloadFailed(id_set,
342 fetch_data->request_ids(),
343 ExtensionDownloaderDelegate::DISABLED);
344 return;
347 RequestQueue<ManifestFetchData>::iterator i;
348 for (i = manifests_queue_.begin(); i != manifests_queue_.end(); ++i) {
349 if (fetch_data->full_url() == i->full_url()) {
350 // This url is already scheduled to be fetched.
351 i->Merge(*fetch_data);
352 return;
356 if (manifests_queue_.active_request() &&
357 manifests_queue_.active_request()->full_url() == fetch_data->full_url()) {
358 manifests_queue_.active_request()->Merge(*fetch_data);
359 } else {
360 UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
361 fetch_data->full_url().possibly_invalid_spec().length());
363 manifests_queue_.ScheduleRequest(fetch_data.Pass());
367 void ExtensionDownloader::CreateManifestFetcher() {
368 if (VLOG_IS_ON(2)) {
369 std::vector<std::string> id_vector(
370 manifests_queue_.active_request()->extension_ids().begin(),
371 manifests_queue_.active_request()->extension_ids().end());
372 std::string id_list = JoinString(id_vector, ',');
373 VLOG(2) << "Fetching " << manifests_queue_.active_request()->full_url()
374 << " for " << id_list;
377 manifest_fetcher_.reset(net::URLFetcher::Create(
378 kManifestFetcherId, manifests_queue_.active_request()->full_url(),
379 net::URLFetcher::GET, this));
380 manifest_fetcher_->SetRequestContext(request_context_);
381 manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
382 net::LOAD_DO_NOT_SAVE_COOKIES |
383 net::LOAD_DISABLE_CACHE);
384 // Update checks can be interrupted if a network change is detected; this is
385 // common for the retail mode AppPack on ChromeOS. Retrying once should be
386 // enough to recover in those cases; let the fetcher retry up to 3 times
387 // just in case. http://crosbug.com/130602
388 manifest_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
389 manifest_fetcher_->Start();
392 void ExtensionDownloader::OnURLFetchComplete(
393 const net::URLFetcher* source) {
394 VLOG(2) << source->GetResponseCode() << " " << source->GetURL();
396 if (source == manifest_fetcher_.get()) {
397 std::string data;
398 source->GetResponseAsString(&data);
399 OnManifestFetchComplete(source->GetURL(),
400 source->GetStatus(),
401 source->GetResponseCode(),
402 source->GetBackoffDelay(),
403 data);
404 } else if (source == extension_fetcher_.get()) {
405 OnCRXFetchComplete(source,
406 source->GetURL(),
407 source->GetStatus(),
408 source->GetResponseCode(),
409 source->GetBackoffDelay());
410 } else {
411 NOTREACHED();
415 void ExtensionDownloader::OnManifestFetchComplete(
416 const GURL& url,
417 const net::URLRequestStatus& status,
418 int response_code,
419 const base::TimeDelta& backoff_delay,
420 const std::string& data) {
421 // We want to try parsing the manifest, and if it indicates updates are
422 // available, we want to fire off requests to fetch those updates.
423 if (status.status() == net::URLRequestStatus::SUCCESS &&
424 (response_code == 200 || (url.SchemeIsFile() && data.length() > 0))) {
425 RETRY_HISTOGRAM("ManifestFetchSuccess",
426 manifests_queue_.active_request_failure_count(), url);
427 VLOG(2) << "beginning manifest parse for " << url;
428 scoped_refptr<SafeManifestParser> safe_parser(
429 new SafeManifestParser(
430 data,
431 manifests_queue_.reset_active_request().release(),
432 base::Bind(&ExtensionDownloader::HandleManifestResults,
433 weak_ptr_factory_.GetWeakPtr())));
434 safe_parser->Start();
435 } else {
436 VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
437 << "' response code:" << response_code;
438 if (ShouldRetryRequest(status, response_code) &&
439 manifests_queue_.active_request_failure_count() < kMaxRetries) {
440 manifests_queue_.RetryRequest(backoff_delay);
441 } else {
442 RETRY_HISTOGRAM("ManifestFetchFailure",
443 manifests_queue_.active_request_failure_count(), url);
444 NotifyExtensionsDownloadFailed(
445 manifests_queue_.active_request()->extension_ids(),
446 manifests_queue_.active_request()->request_ids(),
447 ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED);
450 manifest_fetcher_.reset();
451 manifests_queue_.reset_active_request();
453 // If we have any pending manifest requests, fire off the next one.
454 manifests_queue_.StartNextRequest();
457 void ExtensionDownloader::HandleManifestResults(
458 const ManifestFetchData& fetch_data,
459 const UpdateManifest::Results* results) {
460 // Keep a list of extensions that will not be updated, so that the |delegate_|
461 // can be notified once we're done here.
462 std::set<std::string> not_updated(fetch_data.extension_ids());
464 if (!results) {
465 NotifyExtensionsDownloadFailed(
466 not_updated,
467 fetch_data.request_ids(),
468 ExtensionDownloaderDelegate::MANIFEST_INVALID);
469 return;
472 // Examine the parsed manifest and kick off fetches of any new crx files.
473 std::vector<int> updates;
474 DetermineUpdates(fetch_data, *results, &updates);
475 for (size_t i = 0; i < updates.size(); i++) {
476 const UpdateManifest::Result* update = &(results->list.at(updates[i]));
477 const std::string& id = update->extension_id;
478 not_updated.erase(id);
480 GURL crx_url = update->crx_url;
481 if (id != kBlacklistAppID) {
482 NotifyUpdateFound(update->extension_id, update->version);
483 } else {
484 // The URL of the blacklist file is returned by the server and we need to
485 // be sure that we continue to be able to reliably detect whether a URL
486 // references a blacklist file.
487 DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url)) << crx_url;
489 // Force https (crbug.com/129587).
490 if (!crx_url.SchemeIsSecure()) {
491 url_canon::Replacements<char> replacements;
492 std::string scheme("https");
493 replacements.SetScheme(scheme.c_str(),
494 url_parse::Component(0, scheme.size()));
495 crx_url = crx_url.ReplaceComponents(replacements);
498 scoped_ptr<ExtensionFetch> fetch(new ExtensionFetch(
499 update->extension_id, crx_url, update->package_hash,
500 update->version, fetch_data.request_ids()));
501 FetchUpdatedExtension(fetch.Pass());
504 // If the manifest response included a <daystart> element, we want to save
505 // that value for any extensions which had sent a ping in the request.
506 if (fetch_data.base_url().DomainIs("google.com") &&
507 results->daystart_elapsed_seconds >= 0) {
508 Time day_start =
509 Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
511 const std::set<std::string>& extension_ids = fetch_data.extension_ids();
512 std::set<std::string>::const_iterator i;
513 for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
514 const std::string& id = *i;
515 ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
516 result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
517 result.day_start = day_start;
521 NotifyExtensionsDownloadFailed(
522 not_updated,
523 fetch_data.request_ids(),
524 ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
527 void ExtensionDownloader::DetermineUpdates(
528 const ManifestFetchData& fetch_data,
529 const UpdateManifest::Results& possible_updates,
530 std::vector<int>* result) {
531 // This will only be valid if one of possible_updates specifies
532 // browser_min_version.
533 Version browser_version;
535 for (size_t i = 0; i < possible_updates.list.size(); i++) {
536 const UpdateManifest::Result* update = &possible_updates.list[i];
537 const std::string& id = update->extension_id;
539 if (!fetch_data.Includes(id)) {
540 VLOG(2) << "Ignoring " << id << " from this manifest";
541 continue;
544 if (VLOG_IS_ON(2)) {
545 if (update->version.empty())
546 VLOG(2) << "manifest indicates " << id << " has no update";
547 else
548 VLOG(2) << "manifest indicates " << id
549 << " latest version is '" << update->version << "'";
552 if (!delegate_->IsExtensionPending(id)) {
553 // If we're not installing pending extension, and the update
554 // version is the same or older than what's already installed,
555 // we don't want it.
556 std::string version;
557 if (!delegate_->GetExtensionExistingVersion(id, &version)) {
558 VLOG(2) << id << " is not installed";
559 continue;
562 VLOG(2) << id << " is at '" << version << "'";
564 Version existing_version(version);
565 Version update_version(update->version);
567 if (!update_version.IsValid() ||
568 update_version.CompareTo(existing_version) <= 0) {
569 continue;
573 // If the update specifies a browser minimum version, do we qualify?
574 if (update->browser_min_version.length() > 0) {
575 // First determine the browser version if we haven't already.
576 if (!browser_version.IsValid()) {
577 chrome::VersionInfo version_info;
578 if (version_info.is_valid())
579 browser_version = Version(version_info.Version());
581 Version browser_min_version(update->browser_min_version);
582 if (browser_version.IsValid() && browser_min_version.IsValid() &&
583 browser_min_version.CompareTo(browser_version) > 0) {
584 // TODO(asargent) - We may want this to show up in the extensions UI
585 // eventually. (http://crbug.com/12547).
586 LOG(WARNING) << "Updated version of extension " << id
587 << " available, but requires chrome version "
588 << update->browser_min_version;
589 continue;
592 VLOG(2) << "will try to update " << id;
593 result->push_back(i);
597 // Begins (or queues up) download of an updated extension.
598 void ExtensionDownloader::FetchUpdatedExtension(
599 scoped_ptr<ExtensionFetch> fetch_data) {
600 if (!fetch_data->url.is_valid()) {
601 // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
602 LOG(ERROR) << "Invalid URL: '" << fetch_data->url.possibly_invalid_spec()
603 << "' for extension " << fetch_data->id;
604 return;
607 for (RequestQueue<ExtensionFetch>::iterator iter =
608 extensions_queue_.begin();
609 iter != extensions_queue_.end(); ++iter) {
610 if (iter->id == fetch_data->id || iter->url == fetch_data->url) {
611 iter->request_ids.insert(fetch_data->request_ids.begin(),
612 fetch_data->request_ids.end());
613 return; // already scheduled
617 if (extensions_queue_.active_request() &&
618 extensions_queue_.active_request()->url == fetch_data->url) {
619 extensions_queue_.active_request()->request_ids.insert(
620 fetch_data->request_ids.begin(), fetch_data->request_ids.end());
621 } else {
622 extensions_queue_.ScheduleRequest(fetch_data.Pass());
626 void ExtensionDownloader::CreateExtensionFetcher() {
627 extension_fetcher_.reset(net::URLFetcher::Create(
628 kExtensionFetcherId, extensions_queue_.active_request()->url,
629 net::URLFetcher::GET, this));
630 extension_fetcher_->SetRequestContext(request_context_);
631 extension_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
632 net::LOAD_DO_NOT_SAVE_COOKIES |
633 net::LOAD_DISABLE_CACHE);
634 extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
635 // Download CRX files to a temp file. The blacklist is small and will be
636 // processed in memory, so it is fetched into a string.
637 if (extensions_queue_.active_request()->id != kBlacklistAppID) {
638 extension_fetcher_->SaveResponseToTemporaryFile(
639 BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
642 VLOG(2) << "Starting fetch of " << extensions_queue_.active_request()->url
643 << " for " << extensions_queue_.active_request()->id;
645 extension_fetcher_->Start();
648 void ExtensionDownloader::OnCRXFetchComplete(
649 const net::URLFetcher* source,
650 const GURL& url,
651 const net::URLRequestStatus& status,
652 int response_code,
653 const base::TimeDelta& backoff_delay) {
654 const std::string& id = extensions_queue_.active_request()->id;
655 const std::set<int>& request_ids =
656 extensions_queue_.active_request()->request_ids;
657 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
659 if (status.status() == net::URLRequestStatus::SUCCESS &&
660 (response_code == 200 || url.SchemeIsFile())) {
661 RETRY_HISTOGRAM("CrxFetchSuccess",
662 extensions_queue_.active_request_failure_count(), url);
663 base::FilePath crx_path;
664 // Take ownership of the file at |crx_path|.
665 CHECK(source->GetResponseAsFilePath(true, &crx_path));
666 delegate_->OnExtensionDownloadFinished(
667 id, crx_path, url, extensions_queue_.active_request()->version,
668 ping, request_ids);
669 } else {
670 VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
671 << "' response code:" << response_code;
672 if (ShouldRetryRequest(status, response_code) &&
673 extensions_queue_.active_request_failure_count() < kMaxRetries) {
674 extensions_queue_.RetryRequest(backoff_delay);
675 } else {
676 RETRY_HISTOGRAM("CrxFetchFailure",
677 extensions_queue_.active_request_failure_count(), url);
678 // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
679 UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status.error());
680 delegate_->OnExtensionDownloadFailed(
681 id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
685 extension_fetcher_.reset();
686 if (extensions_queue_.active_request())
687 ping_results_.erase(id);
688 extensions_queue_.reset_active_request();
690 // If there are any pending downloads left, start the next one.
691 extensions_queue_.StartNextRequest();
694 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
695 const std::set<std::string>& extension_ids,
696 const std::set<int>& request_ids,
697 ExtensionDownloaderDelegate::Error error) {
698 for (std::set<std::string>::const_iterator it = extension_ids.begin();
699 it != extension_ids.end(); ++it) {
700 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[*it];
701 delegate_->OnExtensionDownloadFailed(*it, error, ping, request_ids);
702 ping_results_.erase(*it);
706 void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
707 const std::string& version) {
708 UpdateDetails updateInfo(id, Version(version));
709 content::NotificationService::current()->Notify(
710 chrome::NOTIFICATION_EXTENSION_UPDATE_FOUND,
711 content::NotificationService::AllBrowserContextsAndSources(),
712 content::Details<UpdateDetails>(&updateInfo));
715 } // namespace extensions