1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/memory/scoped_handle.h"
15 #include "base/metrics/histogram.h"
16 #include "base/metrics/sparse_histogram.h"
17 #include "base/platform_file.h"
18 #include "base/stl_util.h"
19 #include "base/strings/string_util.h"
20 #include "base/time/time.h"
21 #include "base/version.h"
22 #include "chrome/browser/chrome_notification_types.h"
23 #include "chrome/browser/extensions/updater/extension_cache.h"
24 #include "chrome/browser/extensions/updater/request_queue_impl.h"
25 #include "chrome/browser/extensions/updater/safe_manifest_parser.h"
26 #include "chrome/browser/metrics/metrics_service.h"
27 #include "chrome/common/chrome_switches.h"
28 #include "chrome/common/chrome_version_info.h"
29 #include "chrome/common/extensions/extension_constants.h"
30 #include "chrome/common/extensions/manifest_url_handler.h"
31 #include "content/public/browser/browser_thread.h"
32 #include "content/public/browser/notification_details.h"
33 #include "content/public/browser/notification_service.h"
34 #include "net/base/backoff_entry.h"
35 #include "net/base/load_flags.h"
36 #include "net/base/net_errors.h"
37 #include "net/url_request/url_fetcher.h"
38 #include "net/url_request/url_request_context_getter.h"
39 #include "net/url_request/url_request_status.h"
42 using base::TimeDelta
;
43 using content::BrowserThread
;
45 namespace extensions
{
47 const char ExtensionDownloader::kBlacklistAppID
[] = "com.google.crx.blacklist";
51 const net::BackoffEntry::Policy kDefaultBackoffPolicy
= {
52 // Number of initial errors (in sequence) to ignore before applying
53 // exponential back-off rules.
56 // Initial delay for exponential back-off in ms.
59 // Factor by which the waiting time will be multiplied.
62 // Fuzzing percentage. ex: 10% will spread requests randomly
63 // between 90%-100% of the calculated time.
66 // Maximum amount of time we are willing to delay our request in ms.
69 // Time to keep an entry from being discarded even when it
70 // has no significant state, -1 to never discard.
73 // Don't use initial delay unless the last request was an error.
77 const char kNotFromWebstoreInstallSource
[] = "notfromwebstore";
78 const char kDefaultInstallSource
[] = "";
80 #define RETRY_HISTOGRAM(name, retry_count, url) \
81 if ((url).DomainIs("google.com")) { \
82 UMA_HISTOGRAM_CUSTOM_COUNTS( \
83 "Extensions." name "RetryCountGoogleUrl", retry_count, 1, \
84 kMaxRetries, kMaxRetries+1); \
86 UMA_HISTOGRAM_CUSTOM_COUNTS( \
87 "Extensions." name "RetryCountOtherUrl", retry_count, 1, \
88 kMaxRetries, kMaxRetries+1); \
91 bool ShouldRetryRequest(const net::URLRequestStatus
& status
,
93 // Retry if the response code is a server error, or the request failed because
94 // of network errors as opposed to file errors.
95 return (response_code
>= 500 && status
.is_success()) ||
96 status
.status() == net::URLRequestStatus::FAILED
;
101 UpdateDetails::UpdateDetails(const std::string
& id
, const Version
& version
)
102 : id(id
), version(version
) {}
104 UpdateDetails::~UpdateDetails() {}
106 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
107 : url(), is_protected(false) {}
109 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
110 const std::string
& id
,
112 const std::string
& package_hash
,
113 const std::string
& version
,
114 const std::set
<int>& request_ids
)
115 : id(id
), url(url
), package_hash(package_hash
), version(version
),
116 request_ids(request_ids
), is_protected(false) {}
118 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
120 ExtensionDownloader::ExtensionDownloader(
121 ExtensionDownloaderDelegate
* delegate
,
122 net::URLRequestContextGetter
* request_context
)
123 : delegate_(delegate
),
124 request_context_(request_context
),
125 weak_ptr_factory_(this),
126 manifests_queue_(&kDefaultBackoffPolicy
,
127 base::Bind(&ExtensionDownloader::CreateManifestFetcher
,
128 base::Unretained(this))),
129 extensions_queue_(&kDefaultBackoffPolicy
,
130 base::Bind(&ExtensionDownloader::CreateExtensionFetcher
,
131 base::Unretained(this))),
132 extension_cache_(NULL
) {
134 DCHECK(request_context_
);
137 ExtensionDownloader::~ExtensionDownloader() {}
139 bool ExtensionDownloader::AddExtension(const Extension
& extension
,
141 // Skip extensions with empty update URLs converted from user
143 if (extension
.converted_from_user_script() &&
144 ManifestURL::GetUpdateURL(&extension
).is_empty()) {
148 // If the extension updates itself from the gallery, ignore any update URL
149 // data. At the moment there is no extra data that an extension can
150 // communicate to the the gallery update servers.
151 std::string update_url_data
;
152 if (!ManifestURL::UpdatesFromGallery(&extension
))
153 update_url_data
= delegate_
->GetUpdateUrlData(extension
.id());
155 return AddExtensionData(extension
.id(), *extension
.version(),
157 ManifestURL::GetUpdateURL(&extension
),
158 update_url_data
, request_id
);
161 bool ExtensionDownloader::AddPendingExtension(const std::string
& id
,
162 const GURL
& update_url
,
164 // Use a zero version to ensure that a pending extension will always
165 // be updated, and thus installed (assuming all extensions have
166 // non-zero versions).
167 Version
version("0.0.0.0");
168 DCHECK(version
.IsValid());
170 return AddExtensionData(id
,
172 Manifest::TYPE_UNKNOWN
,
178 void ExtensionDownloader::StartAllPending(ExtensionCache
* cache
) {
180 extension_cache_
= cache
;
181 extension_cache_
->Start(base::Bind(
182 &ExtensionDownloader::DoStartAllPending
,
183 weak_ptr_factory_
.GetWeakPtr()));
189 void ExtensionDownloader::DoStartAllPending() {
191 url_stats_
= URLStats();
193 for (FetchMap::iterator it
= fetches_preparing_
.begin();
194 it
!= fetches_preparing_
.end(); ++it
) {
195 std::vector
<linked_ptr
<ManifestFetchData
> >& list
= it
->second
;
196 for (size_t i
= 0; i
< list
.size(); ++i
) {
197 StartUpdateCheck(scoped_ptr
<ManifestFetchData
>(list
[i
].release()));
200 fetches_preparing_
.clear();
203 void ExtensionDownloader::StartBlacklistUpdate(
204 const std::string
& version
,
205 const ManifestFetchData::PingData
& ping_data
,
207 // Note: it is very important that we use the https version of the update
208 // url here to avoid DNS hijacking of the blacklist, which is not validated
209 // by a public key signature like .crx files are.
210 scoped_ptr
<ManifestFetchData
> blacklist_fetch(
211 new ManifestFetchData(extension_urls::GetWebstoreUpdateUrl(),
213 DCHECK(blacklist_fetch
->base_url().SchemeIsSecure());
214 blacklist_fetch
->AddExtension(kBlacklistAppID
,
218 kDefaultInstallSource
);
219 StartUpdateCheck(blacklist_fetch
.Pass());
222 bool ExtensionDownloader::AddExtensionData(const std::string
& id
,
223 const Version
& version
,
224 Manifest::Type extension_type
,
225 const GURL
& extension_update_url
,
226 const std::string
& update_url_data
,
228 GURL
update_url(extension_update_url
);
229 // Skip extensions with non-empty invalid update URLs.
230 if (!update_url
.is_empty() && !update_url
.is_valid()) {
231 LOG(WARNING
) << "Extension " << id
<< " has invalid update url "
236 // Make sure we use SSL for store-hosted extensions.
237 if (extension_urls::IsWebstoreUpdateUrl(update_url
) &&
238 !update_url
.SchemeIsSecure())
239 update_url
= extension_urls::GetWebstoreUpdateUrl();
241 // Skip extensions with empty IDs.
243 LOG(WARNING
) << "Found extension with empty ID";
247 if (update_url
.DomainIs("google.com")) {
248 url_stats_
.google_url_count
++;
249 } else if (update_url
.is_empty()) {
250 url_stats_
.no_url_count
++;
251 // Fill in default update URL.
252 update_url
= extension_urls::GetWebstoreUpdateUrl();
254 url_stats_
.other_url_count
++;
257 switch (extension_type
) {
258 case Manifest::TYPE_THEME
:
259 ++url_stats_
.theme_count
;
261 case Manifest::TYPE_EXTENSION
:
262 case Manifest::TYPE_USER_SCRIPT
:
263 ++url_stats_
.extension_count
;
265 case Manifest::TYPE_HOSTED_APP
:
266 case Manifest::TYPE_LEGACY_PACKAGED_APP
:
267 ++url_stats_
.app_count
;
269 case Manifest::TYPE_PLATFORM_APP
:
270 ++url_stats_
.platform_app_count
;
272 case Manifest::TYPE_UNKNOWN
:
274 ++url_stats_
.pending_count
;
278 std::vector
<GURL
> update_urls
;
279 update_urls
.push_back(update_url
);
280 // If UMA is enabled, also add to ManifestFetchData for the
281 // webstore update URL.
282 if (!extension_urls::IsWebstoreUpdateUrl(update_url
) &&
283 MetricsServiceHelper::IsMetricsReportingEnabled()) {
284 update_urls
.push_back(extension_urls::GetWebstoreUpdateUrl());
287 for (size_t i
= 0; i
< update_urls
.size(); ++i
) {
288 DCHECK(!update_urls
[i
].is_empty());
289 DCHECK(update_urls
[i
].is_valid());
291 std::string install_source
= i
== 0 ?
292 kDefaultInstallSource
: kNotFromWebstoreInstallSource
;
294 ManifestFetchData::PingData ping_data
;
295 ManifestFetchData::PingData
* optional_ping_data
= NULL
;
296 if (delegate_
->GetPingDataForExtension(id
, &ping_data
))
297 optional_ping_data
= &ping_data
;
299 // Find or create a ManifestFetchData to add this extension to.
301 FetchMap::iterator existing_iter
= fetches_preparing_
.find(
302 std::make_pair(request_id
, update_urls
[i
]));
303 if (existing_iter
!= fetches_preparing_
.end() &&
304 !existing_iter
->second
.empty()) {
305 // Try to add to the ManifestFetchData at the end of the list.
306 ManifestFetchData
* existing_fetch
= existing_iter
->second
.back().get();
307 if (existing_fetch
->AddExtension(id
, version
.GetString(),
308 optional_ping_data
, update_url_data
,
314 // Otherwise add a new element to the list, if the list doesn't exist or
315 // if its last element is already full.
316 linked_ptr
<ManifestFetchData
> fetch(
317 new ManifestFetchData(update_urls
[i
], request_id
));
318 fetches_preparing_
[std::make_pair(request_id
, update_urls
[i
])].
320 added
= fetch
->AddExtension(id
, version
.GetString(),
331 void ExtensionDownloader::ReportStats() const {
332 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
333 url_stats_
.extension_count
);
334 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
335 url_stats_
.theme_count
);
336 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
337 url_stats_
.app_count
);
338 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
339 url_stats_
.platform_app_count
);
340 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
341 url_stats_
.pending_count
);
342 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
343 url_stats_
.google_url_count
);
344 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
345 url_stats_
.other_url_count
);
346 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
347 url_stats_
.no_url_count
);
350 void ExtensionDownloader::StartUpdateCheck(
351 scoped_ptr
<ManifestFetchData
> fetch_data
) {
352 const std::set
<std::string
>& id_set(fetch_data
->extension_ids());
354 if (CommandLine::ForCurrentProcess()->HasSwitch(
355 switches::kDisableBackgroundNetworking
)) {
356 NotifyExtensionsDownloadFailed(id_set
,
357 fetch_data
->request_ids(),
358 ExtensionDownloaderDelegate::DISABLED
);
362 RequestQueue
<ManifestFetchData
>::iterator i
;
363 for (i
= manifests_queue_
.begin(); i
!= manifests_queue_
.end(); ++i
) {
364 if (fetch_data
->full_url() == i
->full_url()) {
365 // This url is already scheduled to be fetched.
366 i
->Merge(*fetch_data
);
371 if (manifests_queue_
.active_request() &&
372 manifests_queue_
.active_request()->full_url() == fetch_data
->full_url()) {
373 manifests_queue_
.active_request()->Merge(*fetch_data
);
375 UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
376 fetch_data
->full_url().possibly_invalid_spec().length());
378 manifests_queue_
.ScheduleRequest(fetch_data
.Pass());
382 void ExtensionDownloader::CreateManifestFetcher() {
384 std::vector
<std::string
> id_vector(
385 manifests_queue_
.active_request()->extension_ids().begin(),
386 manifests_queue_
.active_request()->extension_ids().end());
387 std::string id_list
= JoinString(id_vector
, ',');
388 VLOG(2) << "Fetching " << manifests_queue_
.active_request()->full_url()
389 << " for " << id_list
;
392 manifest_fetcher_
.reset(net::URLFetcher::Create(
393 kManifestFetcherId
, manifests_queue_
.active_request()->full_url(),
394 net::URLFetcher::GET
, this));
395 manifest_fetcher_
->SetRequestContext(request_context_
);
396 manifest_fetcher_
->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES
|
397 net::LOAD_DO_NOT_SAVE_COOKIES
|
398 net::LOAD_DISABLE_CACHE
);
399 // Update checks can be interrupted if a network change is detected; this is
400 // common for the retail mode AppPack on ChromeOS. Retrying once should be
401 // enough to recover in those cases; let the fetcher retry up to 3 times
402 // just in case. http://crosbug.com/130602
403 manifest_fetcher_
->SetAutomaticallyRetryOnNetworkChanges(3);
404 manifest_fetcher_
->Start();
407 void ExtensionDownloader::OnURLFetchComplete(
408 const net::URLFetcher
* source
) {
409 VLOG(2) << source
->GetResponseCode() << " " << source
->GetURL();
411 if (source
== manifest_fetcher_
.get()) {
413 source
->GetResponseAsString(&data
);
414 OnManifestFetchComplete(source
->GetURL(),
416 source
->GetResponseCode(),
417 source
->GetBackoffDelay(),
419 } else if (source
== extension_fetcher_
.get()) {
420 OnCRXFetchComplete(source
,
423 source
->GetResponseCode(),
424 source
->GetBackoffDelay());
430 void ExtensionDownloader::OnManifestFetchComplete(
432 const net::URLRequestStatus
& status
,
434 const base::TimeDelta
& backoff_delay
,
435 const std::string
& data
) {
436 // We want to try parsing the manifest, and if it indicates updates are
437 // available, we want to fire off requests to fetch those updates.
438 if (status
.status() == net::URLRequestStatus::SUCCESS
&&
439 (response_code
== 200 || (url
.SchemeIsFile() && data
.length() > 0))) {
440 RETRY_HISTOGRAM("ManifestFetchSuccess",
441 manifests_queue_
.active_request_failure_count(), url
);
442 VLOG(2) << "beginning manifest parse for " << url
;
443 scoped_refptr
<SafeManifestParser
> safe_parser(
444 new SafeManifestParser(
446 manifests_queue_
.reset_active_request().release(),
447 base::Bind(&ExtensionDownloader::HandleManifestResults
,
448 weak_ptr_factory_
.GetWeakPtr())));
449 safe_parser
->Start();
451 VLOG(1) << "Failed to fetch manifest '" << url
.possibly_invalid_spec()
452 << "' response code:" << response_code
;
453 if (ShouldRetryRequest(status
, response_code
) &&
454 manifests_queue_
.active_request_failure_count() < kMaxRetries
) {
455 manifests_queue_
.RetryRequest(backoff_delay
);
457 RETRY_HISTOGRAM("ManifestFetchFailure",
458 manifests_queue_
.active_request_failure_count(), url
);
459 NotifyExtensionsDownloadFailed(
460 manifests_queue_
.active_request()->extension_ids(),
461 manifests_queue_
.active_request()->request_ids(),
462 ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED
);
465 manifest_fetcher_
.reset();
466 manifests_queue_
.reset_active_request();
468 // If we have any pending manifest requests, fire off the next one.
469 manifests_queue_
.StartNextRequest();
472 void ExtensionDownloader::HandleManifestResults(
473 const ManifestFetchData
& fetch_data
,
474 const UpdateManifest::Results
* results
) {
475 // Keep a list of extensions that will not be updated, so that the |delegate_|
476 // can be notified once we're done here.
477 std::set
<std::string
> not_updated(fetch_data
.extension_ids());
480 NotifyExtensionsDownloadFailed(
482 fetch_data
.request_ids(),
483 ExtensionDownloaderDelegate::MANIFEST_INVALID
);
487 // Examine the parsed manifest and kick off fetches of any new crx files.
488 std::vector
<int> updates
;
489 DetermineUpdates(fetch_data
, *results
, &updates
);
490 for (size_t i
= 0; i
< updates
.size(); i
++) {
491 const UpdateManifest::Result
* update
= &(results
->list
.at(updates
[i
]));
492 const std::string
& id
= update
->extension_id
;
493 not_updated
.erase(id
);
495 GURL crx_url
= update
->crx_url
;
496 if (id
!= kBlacklistAppID
) {
497 NotifyUpdateFound(update
->extension_id
, update
->version
);
499 // The URL of the blacklist file is returned by the server and we need to
500 // be sure that we continue to be able to reliably detect whether a URL
501 // references a blacklist file.
502 DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url
)) << crx_url
;
504 // Force https (crbug.com/129587).
505 if (!crx_url
.SchemeIsSecure()) {
506 url_canon::Replacements
<char> replacements
;
507 std::string
scheme("https");
508 replacements
.SetScheme(scheme
.c_str(),
509 url_parse::Component(0, scheme
.size()));
510 crx_url
= crx_url
.ReplaceComponents(replacements
);
513 scoped_ptr
<ExtensionFetch
> fetch(new ExtensionFetch(
514 update
->extension_id
, crx_url
, update
->package_hash
,
515 update
->version
, fetch_data
.request_ids()));
516 FetchUpdatedExtension(fetch
.Pass());
519 // If the manifest response included a <daystart> element, we want to save
520 // that value for any extensions which had sent a ping in the request.
521 if (fetch_data
.base_url().DomainIs("google.com") &&
522 results
->daystart_elapsed_seconds
>= 0) {
524 Time::Now() - TimeDelta::FromSeconds(results
->daystart_elapsed_seconds
);
526 const std::set
<std::string
>& extension_ids
= fetch_data
.extension_ids();
527 std::set
<std::string
>::const_iterator i
;
528 for (i
= extension_ids
.begin(); i
!= extension_ids
.end(); i
++) {
529 const std::string
& id
= *i
;
530 ExtensionDownloaderDelegate::PingResult
& result
= ping_results_
[id
];
531 result
.did_ping
= fetch_data
.DidPing(id
, ManifestFetchData::ROLLCALL
);
532 result
.day_start
= day_start
;
536 NotifyExtensionsDownloadFailed(
538 fetch_data
.request_ids(),
539 ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE
);
542 void ExtensionDownloader::DetermineUpdates(
543 const ManifestFetchData
& fetch_data
,
544 const UpdateManifest::Results
& possible_updates
,
545 std::vector
<int>* result
) {
546 // This will only be valid if one of possible_updates specifies
547 // browser_min_version.
548 Version browser_version
;
550 for (size_t i
= 0; i
< possible_updates
.list
.size(); i
++) {
551 const UpdateManifest::Result
* update
= &possible_updates
.list
[i
];
552 const std::string
& id
= update
->extension_id
;
554 if (!fetch_data
.Includes(id
)) {
555 VLOG(2) << "Ignoring " << id
<< " from this manifest";
560 if (update
->version
.empty())
561 VLOG(2) << "manifest indicates " << id
<< " has no update";
563 VLOG(2) << "manifest indicates " << id
564 << " latest version is '" << update
->version
<< "'";
567 if (!delegate_
->IsExtensionPending(id
)) {
568 // If we're not installing pending extension, and the update
569 // version is the same or older than what's already installed,
572 if (!delegate_
->GetExtensionExistingVersion(id
, &version
)) {
573 VLOG(2) << id
<< " is not installed";
577 VLOG(2) << id
<< " is at '" << version
<< "'";
579 Version
existing_version(version
);
580 Version
update_version(update
->version
);
582 if (!update_version
.IsValid() ||
583 update_version
.CompareTo(existing_version
) <= 0) {
588 // If the update specifies a browser minimum version, do we qualify?
589 if (update
->browser_min_version
.length() > 0) {
590 // First determine the browser version if we haven't already.
591 if (!browser_version
.IsValid()) {
592 chrome::VersionInfo version_info
;
593 if (version_info
.is_valid())
594 browser_version
= Version(version_info
.Version());
596 Version
browser_min_version(update
->browser_min_version
);
597 if (browser_version
.IsValid() && browser_min_version
.IsValid() &&
598 browser_min_version
.CompareTo(browser_version
) > 0) {
599 // TODO(asargent) - We may want this to show up in the extensions UI
600 // eventually. (http://crbug.com/12547).
601 LOG(WARNING
) << "Updated version of extension " << id
602 << " available, but requires chrome version "
603 << update
->browser_min_version
;
607 VLOG(2) << "will try to update " << id
;
608 result
->push_back(i
);
612 // Begins (or queues up) download of an updated extension.
613 void ExtensionDownloader::FetchUpdatedExtension(
614 scoped_ptr
<ExtensionFetch
> fetch_data
) {
615 if (!fetch_data
->url
.is_valid()) {
616 // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
617 LOG(ERROR
) << "Invalid URL: '" << fetch_data
->url
.possibly_invalid_spec()
618 << "' for extension " << fetch_data
->id
;
622 for (RequestQueue
<ExtensionFetch
>::iterator iter
=
623 extensions_queue_
.begin();
624 iter
!= extensions_queue_
.end(); ++iter
) {
625 if (iter
->id
== fetch_data
->id
|| iter
->url
== fetch_data
->url
) {
626 iter
->request_ids
.insert(fetch_data
->request_ids
.begin(),
627 fetch_data
->request_ids
.end());
628 return; // already scheduled
632 if (extensions_queue_
.active_request() &&
633 extensions_queue_
.active_request()->url
== fetch_data
->url
) {
634 extensions_queue_
.active_request()->request_ids
.insert(
635 fetch_data
->request_ids
.begin(), fetch_data
->request_ids
.end());
638 if (extension_cache_
&&
639 extension_cache_
->GetExtension(fetch_data
->id
, NULL
, &version
) &&
640 version
== fetch_data
->version
) {
641 base::FilePath crx_path
;
642 // Now get .crx file path and mark extension as used.
643 extension_cache_
->GetExtension(fetch_data
->id
, &crx_path
, &version
);
644 NotifyDelegateDownloadFinished(fetch_data
.Pass(), crx_path
, false);
646 extensions_queue_
.ScheduleRequest(fetch_data
.Pass());
651 void ExtensionDownloader::NotifyDelegateDownloadFinished(
652 scoped_ptr
<ExtensionFetch
> fetch_data
,
653 const base::FilePath
& crx_path
,
654 bool file_ownership_passed
) {
655 delegate_
->OnExtensionDownloadFinished(fetch_data
->id
, crx_path
,
656 file_ownership_passed
, fetch_data
->url
, fetch_data
->version
,
657 ping_results_
[fetch_data
->id
], fetch_data
->request_ids
);
658 ping_results_
.erase(fetch_data
->id
);
661 void ExtensionDownloader::CreateExtensionFetcher() {
662 const ExtensionFetch
* fetch
= extensions_queue_
.active_request();
663 int load_flags
= net::LOAD_DISABLE_CACHE
;
664 if (!fetch
->is_protected
|| !fetch
->url
.SchemeIs("https")) {
665 load_flags
|= net::LOAD_DO_NOT_SEND_COOKIES
|
666 net::LOAD_DO_NOT_SAVE_COOKIES
;
668 extension_fetcher_
.reset(net::URLFetcher::Create(
669 kExtensionFetcherId
, fetch
->url
, net::URLFetcher::GET
, this));
670 extension_fetcher_
->SetRequestContext(request_context_
);
671 extension_fetcher_
->SetLoadFlags(load_flags
);
672 extension_fetcher_
->SetAutomaticallyRetryOnNetworkChanges(3);
673 // Download CRX files to a temp file. The blacklist is small and will be
674 // processed in memory, so it is fetched into a string.
675 if (fetch
->id
!= kBlacklistAppID
) {
676 extension_fetcher_
->SaveResponseToTemporaryFile(
677 BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
680 VLOG(2) << "Starting fetch of " << fetch
->url
<< " for " << fetch
->id
;
682 extension_fetcher_
->Start();
685 void ExtensionDownloader::OnCRXFetchComplete(
686 const net::URLFetcher
* source
,
688 const net::URLRequestStatus
& status
,
690 const base::TimeDelta
& backoff_delay
) {
691 const std::string
& id
= extensions_queue_
.active_request()->id
;
692 if (status
.status() == net::URLRequestStatus::SUCCESS
&&
693 (response_code
== 200 || url
.SchemeIsFile())) {
694 RETRY_HISTOGRAM("CrxFetchSuccess",
695 extensions_queue_
.active_request_failure_count(), url
);
696 base::FilePath crx_path
;
697 // Take ownership of the file at |crx_path|.
698 CHECK(source
->GetResponseAsFilePath(true, &crx_path
));
699 scoped_ptr
<ExtensionFetch
> fetch_data
=
700 extensions_queue_
.reset_active_request();
701 if (extension_cache_
) {
702 const std::string
& version
= fetch_data
->version
;
703 extension_cache_
->PutExtension(id
, crx_path
, version
,
704 base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished
,
705 weak_ptr_factory_
.GetWeakPtr(),
706 base::Passed(&fetch_data
)));
708 NotifyDelegateDownloadFinished(fetch_data
.Pass(), crx_path
, true);
710 } else if (status
.status() == net::URLRequestStatus::SUCCESS
&&
711 (response_code
== 401 || response_code
== 403) &&
712 !extensions_queue_
.active_request()->is_protected
) {
713 // On 401 or 403, requeue this fetch with cookies enabled.
714 extensions_queue_
.active_request()->is_protected
= true;
715 extensions_queue_
.RetryRequest(backoff_delay
);
717 const std::set
<int>& request_ids
=
718 extensions_queue_
.active_request()->request_ids
;
719 const ExtensionDownloaderDelegate::PingResult
& ping
= ping_results_
[id
];
721 VLOG(1) << "Failed to fetch extension '" << url
.possibly_invalid_spec()
722 << "' response code:" << response_code
;
723 if (ShouldRetryRequest(status
, response_code
) &&
724 extensions_queue_
.active_request_failure_count() < kMaxRetries
) {
725 extensions_queue_
.RetryRequest(backoff_delay
);
727 RETRY_HISTOGRAM("CrxFetchFailure",
728 extensions_queue_
.active_request_failure_count(), url
);
729 // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
730 UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status
.error());
731 delegate_
->OnExtensionDownloadFailed(
732 id
, ExtensionDownloaderDelegate::CRX_FETCH_FAILED
, ping
, request_ids
);
734 ping_results_
.erase(id
);
735 extensions_queue_
.reset_active_request();
738 extension_fetcher_
.reset();
740 // If there are any pending downloads left, start the next one.
741 extensions_queue_
.StartNextRequest();
744 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
745 const std::set
<std::string
>& extension_ids
,
746 const std::set
<int>& request_ids
,
747 ExtensionDownloaderDelegate::Error error
) {
748 for (std::set
<std::string
>::const_iterator it
= extension_ids
.begin();
749 it
!= extension_ids
.end(); ++it
) {
750 const ExtensionDownloaderDelegate::PingResult
& ping
= ping_results_
[*it
];
751 delegate_
->OnExtensionDownloadFailed(*it
, error
, ping
, request_ids
);
752 ping_results_
.erase(*it
);
756 void ExtensionDownloader::NotifyUpdateFound(const std::string
& id
,
757 const std::string
& version
) {
758 UpdateDetails
updateInfo(id
, Version(version
));
759 content::NotificationService::current()->Notify(
760 chrome::NOTIFICATION_EXTENSION_UPDATE_FOUND
,
761 content::NotificationService::AllBrowserContextsAndSources(),
762 content::Details
<UpdateDetails
>(&updateInfo
));
765 } // namespace extensions