1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "extensions/browser/updater/extension_downloader.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "base/metrics/sparse_histogram.h"
16 #include "base/stl_util.h"
17 #include "base/strings/string_number_conversions.h"
18 #include "base/strings/string_util.h"
19 #include "base/strings/stringprintf.h"
20 #include "base/time/time.h"
21 #include "base/version.h"
22 #include "content/public/browser/browser_thread.h"
23 #include "content/public/browser/notification_details.h"
24 #include "content/public/browser/notification_service.h"
25 #include "extensions/browser/extensions_browser_client.h"
26 #include "extensions/browser/notification_types.h"
27 #include "extensions/browser/updater/extension_cache.h"
28 #include "extensions/browser/updater/request_queue_impl.h"
29 #include "extensions/browser/updater/safe_manifest_parser.h"
30 #include "extensions/common/extension_urls.h"
31 #include "extensions/common/manifest_url_handlers.h"
32 #include "google_apis/gaia/identity_provider.h"
33 #include "net/base/backoff_entry.h"
34 #include "net/base/load_flags.h"
35 #include "net/base/net_errors.h"
36 #include "net/http/http_request_headers.h"
37 #include "net/http/http_status_code.h"
38 #include "net/url_request/url_fetcher.h"
39 #include "net/url_request/url_request_context_getter.h"
40 #include "net/url_request/url_request_status.h"
43 using base::TimeDelta
;
44 using content::BrowserThread
;
46 namespace extensions
{
48 const char ExtensionDownloader::kBlacklistAppID
[] = "com.google.crx.blacklist";
52 const net::BackoffEntry::Policy kDefaultBackoffPolicy
= {
53 // Number of initial errors (in sequence) to ignore before applying
54 // exponential back-off rules.
57 // Initial delay for exponential back-off in ms.
60 // Factor by which the waiting time will be multiplied.
63 // Fuzzing percentage. ex: 10% will spread requests randomly
64 // between 90%-100% of the calculated time.
67 // Maximum amount of time we are willing to delay our request in ms.
70 // Time to keep an entry from being discarded even when it
71 // has no significant state, -1 to never discard.
74 // Don't use initial delay unless the last request was an error.
78 const char kAuthUserQueryKey
[] = "authuser";
80 const int kMaxAuthUserValue
= 10;
81 const int kMaxOAuth2Attempts
= 3;
83 const char kNotFromWebstoreInstallSource
[] = "notfromwebstore";
84 const char kDefaultInstallSource
[] = "";
86 const char kGoogleDotCom
[] = "google.com";
87 const char kTokenServiceConsumerId
[] = "extension_downloader";
88 const char kWebstoreOAuth2Scope
[] =
89 "https://www.googleapis.com/auth/chromewebstore.readonly";
91 #define RETRY_HISTOGRAM(name, retry_count, url) \
92 if ((url).DomainIs(kGoogleDotCom)) { \
93 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
99 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \
106 bool ShouldRetryRequest(const net::URLRequestStatus
& status
,
108 // Retry if the response code is a server error, or the request failed because
109 // of network errors as opposed to file errors.
110 return ((response_code
>= 500 && status
.is_success()) ||
111 status
.status() == net::URLRequestStatus::FAILED
);
114 // This parses and updates a URL query such that the value of the |authuser|
115 // query parameter is incremented by 1. If parameter was not present in the URL,
116 // it will be added with a value of 1. All other query keys and values are
117 // preserved as-is. Returns |false| if the user index exceeds a hard-coded
119 bool IncrementAuthUserIndex(GURL
* url
) {
121 std::string old_query
= url
->query();
122 std::vector
<std::string
> new_query_parts
;
123 url::Component
query(0, old_query
.length());
124 url::Component key
, value
;
125 while (url::ExtractQueryKeyValue(old_query
.c_str(), &query
, &key
, &value
)) {
126 std::string key_string
= old_query
.substr(key
.begin
, key
.len
);
127 std::string value_string
= old_query
.substr(value
.begin
, value
.len
);
128 if (key_string
== kAuthUserQueryKey
) {
129 base::StringToInt(value_string
, &user_index
);
131 new_query_parts
.push_back(base::StringPrintf(
132 "%s=%s", key_string
.c_str(), value_string
.c_str()));
135 if (user_index
>= kMaxAuthUserValue
)
137 new_query_parts
.push_back(
138 base::StringPrintf("%s=%d", kAuthUserQueryKey
, user_index
+ 1));
139 std::string new_query_string
= base::JoinString(new_query_parts
, "&");
140 url::Component
new_query(0, new_query_string
.size());
141 url::Replacements
<char> replacements
;
142 replacements
.SetQuery(new_query_string
.c_str(), new_query
);
143 *url
= url
->ReplaceComponents(replacements
);
149 UpdateDetails::UpdateDetails(const std::string
& id
, const Version
& version
)
150 : id(id
), version(version
) {
153 UpdateDetails::~UpdateDetails() {
156 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
157 : url(), credentials(CREDENTIALS_NONE
) {
160 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
161 const std::string
& id
,
163 const std::string
& package_hash
,
164 const std::string
& version
,
165 const std::set
<int>& request_ids
)
168 package_hash(package_hash
),
170 request_ids(request_ids
),
171 credentials(CREDENTIALS_NONE
),
172 oauth2_attempt_count(0) {
175 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {
178 ExtensionDownloader::ExtensionDownloader(
179 ExtensionDownloaderDelegate
* delegate
,
180 net::URLRequestContextGetter
* request_context
)
181 : OAuth2TokenService::Consumer(kTokenServiceConsumerId
),
183 request_context_(request_context
),
184 manifests_queue_(&kDefaultBackoffPolicy
,
185 base::Bind(&ExtensionDownloader::CreateManifestFetcher
,
186 base::Unretained(this))),
187 extensions_queue_(&kDefaultBackoffPolicy
,
188 base::Bind(&ExtensionDownloader::CreateExtensionFetcher
,
189 base::Unretained(this))),
190 extension_cache_(NULL
),
191 enable_extra_update_metrics_(false),
192 weak_ptr_factory_(this) {
194 DCHECK(request_context_
.get());
197 ExtensionDownloader::~ExtensionDownloader() {
200 bool ExtensionDownloader::AddExtension(const Extension
& extension
,
202 // Skip extensions with empty update URLs converted from user
204 if (extension
.converted_from_user_script() &&
205 ManifestURL::GetUpdateURL(&extension
).is_empty()) {
209 // If the extension updates itself from the gallery, ignore any update URL
210 // data. At the moment there is no extra data that an extension can
211 // communicate to the the gallery update servers.
212 std::string update_url_data
;
213 if (!ManifestURL::UpdatesFromGallery(&extension
))
214 update_url_data
= delegate_
->GetUpdateUrlData(extension
.id());
216 std::string install_source
;
218 delegate_
->ShouldForceUpdate(extension
.id(), &install_source
);
219 return AddExtensionData(extension
.id(),
220 *extension
.version(),
222 ManifestURL::GetUpdateURL(&extension
),
229 bool ExtensionDownloader::AddPendingExtension(const std::string
& id
,
230 const GURL
& update_url
,
232 // Use a zero version to ensure that a pending extension will always
233 // be updated, and thus installed (assuming all extensions have
234 // non-zero versions).
235 Version
version("0.0.0.0");
236 DCHECK(version
.IsValid());
238 return AddExtensionData(id
,
240 Manifest::TYPE_UNKNOWN
,
248 void ExtensionDownloader::StartAllPending(ExtensionCache
* cache
) {
250 extension_cache_
= cache
;
251 extension_cache_
->Start(base::Bind(&ExtensionDownloader::DoStartAllPending
,
252 weak_ptr_factory_
.GetWeakPtr()));
258 void ExtensionDownloader::DoStartAllPending() {
260 url_stats_
= URLStats();
262 for (FetchMap::iterator it
= fetches_preparing_
.begin();
263 it
!= fetches_preparing_
.end();
265 std::vector
<linked_ptr
<ManifestFetchData
>>& list
= it
->second
;
266 for (size_t i
= 0; i
< list
.size(); ++i
) {
267 StartUpdateCheck(scoped_ptr
<ManifestFetchData
>(list
[i
].release()));
270 fetches_preparing_
.clear();
273 void ExtensionDownloader::StartBlacklistUpdate(
274 const std::string
& version
,
275 const ManifestFetchData::PingData
& ping_data
,
277 // Note: it is very important that we use the https version of the update
278 // url here to avoid DNS hijacking of the blacklist, which is not validated
279 // by a public key signature like .crx files are.
280 scoped_ptr
<ManifestFetchData
> blacklist_fetch(CreateManifestFetchData(
281 extension_urls::GetWebstoreUpdateUrl(), request_id
));
282 DCHECK(blacklist_fetch
->base_url().SchemeIsCryptographic());
283 blacklist_fetch
->AddExtension(kBlacklistAppID
,
287 kDefaultInstallSource
,
289 StartUpdateCheck(blacklist_fetch
.Pass());
292 void ExtensionDownloader::SetWebstoreIdentityProvider(
293 scoped_ptr
<IdentityProvider
> identity_provider
) {
294 identity_provider_
.swap(identity_provider
);
297 bool ExtensionDownloader::AddExtensionData(
298 const std::string
& id
,
299 const Version
& version
,
300 Manifest::Type extension_type
,
301 const GURL
& extension_update_url
,
302 const std::string
& update_url_data
,
305 const std::string
& install_source_override
) {
306 GURL
update_url(extension_update_url
);
307 // Skip extensions with non-empty invalid update URLs.
308 if (!update_url
.is_empty() && !update_url
.is_valid()) {
309 DLOG(WARNING
) << "Extension " << id
<< " has invalid update url "
314 // Make sure we use SSL for store-hosted extensions.
315 if (extension_urls::IsWebstoreUpdateUrl(update_url
) &&
316 !update_url
.SchemeIsCryptographic())
317 update_url
= extension_urls::GetWebstoreUpdateUrl();
319 // Skip extensions with empty IDs.
321 DLOG(WARNING
) << "Found extension with empty ID";
325 if (update_url
.DomainIs(kGoogleDotCom
)) {
326 url_stats_
.google_url_count
++;
327 } else if (update_url
.is_empty()) {
328 url_stats_
.no_url_count
++;
329 // Fill in default update URL.
330 update_url
= extension_urls::GetWebstoreUpdateUrl();
332 url_stats_
.other_url_count
++;
335 switch (extension_type
) {
336 case Manifest::TYPE_THEME
:
337 ++url_stats_
.theme_count
;
339 case Manifest::TYPE_EXTENSION
:
340 case Manifest::TYPE_USER_SCRIPT
:
341 ++url_stats_
.extension_count
;
343 case Manifest::TYPE_HOSTED_APP
:
344 case Manifest::TYPE_LEGACY_PACKAGED_APP
:
345 ++url_stats_
.app_count
;
347 case Manifest::TYPE_PLATFORM_APP
:
348 ++url_stats_
.platform_app_count
;
350 case Manifest::TYPE_UNKNOWN
:
352 ++url_stats_
.pending_count
;
356 std::vector
<GURL
> update_urls
;
357 update_urls
.push_back(update_url
);
358 // If metrics are enabled, also add to ManifestFetchData for the
359 // webstore update URL.
360 if (!extension_urls::IsWebstoreUpdateUrl(update_url
) &&
361 enable_extra_update_metrics_
) {
362 update_urls
.push_back(extension_urls::GetWebstoreUpdateUrl());
365 for (size_t i
= 0; i
< update_urls
.size(); ++i
) {
366 DCHECK(!update_urls
[i
].is_empty());
367 DCHECK(update_urls
[i
].is_valid());
369 std::string install_source
=
370 i
== 0 ? kDefaultInstallSource
: kNotFromWebstoreInstallSource
;
371 if (!install_source_override
.empty()) {
372 install_source
= install_source_override
;
375 ManifestFetchData::PingData ping_data
;
376 ManifestFetchData::PingData
* optional_ping_data
= NULL
;
377 if (delegate_
->GetPingDataForExtension(id
, &ping_data
))
378 optional_ping_data
= &ping_data
;
380 // Find or create a ManifestFetchData to add this extension to.
382 FetchMap::iterator existing_iter
=
383 fetches_preparing_
.find(std::make_pair(request_id
, update_urls
[i
]));
384 if (existing_iter
!= fetches_preparing_
.end() &&
385 !existing_iter
->second
.empty()) {
386 // Try to add to the ManifestFetchData at the end of the list.
387 ManifestFetchData
* existing_fetch
= existing_iter
->second
.back().get();
388 if (existing_fetch
->AddExtension(id
,
398 // Otherwise add a new element to the list, if the list doesn't exist or
399 // if its last element is already full.
400 linked_ptr
<ManifestFetchData
> fetch(
401 CreateManifestFetchData(update_urls
[i
], request_id
));
402 fetches_preparing_
[std::make_pair(request_id
, update_urls
[i
])].push_back(
404 added
= fetch
->AddExtension(id
,
417 void ExtensionDownloader::ReportStats() const {
418 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
419 url_stats_
.extension_count
);
420 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
421 url_stats_
.theme_count
);
422 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp", url_stats_
.app_count
);
423 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
424 url_stats_
.platform_app_count
);
425 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
426 url_stats_
.pending_count
);
427 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
428 url_stats_
.google_url_count
);
429 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
430 url_stats_
.other_url_count
);
431 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
432 url_stats_
.no_url_count
);
435 void ExtensionDownloader::StartUpdateCheck(
436 scoped_ptr
<ManifestFetchData
> fetch_data
) {
437 const std::set
<std::string
>& id_set(fetch_data
->extension_ids());
439 if (!ExtensionsBrowserClient::Get()->IsBackgroundUpdateAllowed()) {
440 NotifyExtensionsDownloadFailed(id_set
,
441 fetch_data
->request_ids(),
442 ExtensionDownloaderDelegate::DISABLED
);
446 RequestQueue
<ManifestFetchData
>::iterator i
;
447 for (i
= manifests_queue_
.begin(); i
!= manifests_queue_
.end(); ++i
) {
448 if (fetch_data
->full_url() == i
->full_url()) {
449 // This url is already scheduled to be fetched.
450 i
->Merge(*fetch_data
);
455 if (manifests_queue_
.active_request() &&
456 manifests_queue_
.active_request()->full_url() == fetch_data
->full_url()) {
457 manifests_queue_
.active_request()->Merge(*fetch_data
);
459 UMA_HISTOGRAM_COUNTS(
460 "Extensions.UpdateCheckUrlLength",
461 fetch_data
->full_url().possibly_invalid_spec().length());
463 manifests_queue_
.ScheduleRequest(fetch_data
.Pass());
467 void ExtensionDownloader::CreateManifestFetcher() {
469 std::vector
<std::string
> id_vector(
470 manifests_queue_
.active_request()->extension_ids().begin(),
471 manifests_queue_
.active_request()->extension_ids().end());
472 std::string id_list
= base::JoinString(id_vector
, ",");
473 VLOG(2) << "Fetching " << manifests_queue_
.active_request()->full_url()
474 << " for " << id_list
;
477 manifest_fetcher_
= net::URLFetcher::Create(
478 kManifestFetcherId
, manifests_queue_
.active_request()->full_url(),
479 net::URLFetcher::GET
, this);
480 manifest_fetcher_
->SetRequestContext(request_context_
.get());
481 manifest_fetcher_
->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES
|
482 net::LOAD_DO_NOT_SAVE_COOKIES
|
483 net::LOAD_DISABLE_CACHE
);
484 // Update checks can be interrupted if a network change is detected; this is
485 // common for the retail mode AppPack on ChromeOS. Retrying once should be
486 // enough to recover in those cases; let the fetcher retry up to 3 times
487 // just in case. http://crosbug.com/130602
488 manifest_fetcher_
->SetAutomaticallyRetryOnNetworkChanges(3);
489 manifest_fetcher_
->Start();
492 void ExtensionDownloader::OnURLFetchComplete(const net::URLFetcher
* source
) {
493 VLOG(2) << source
->GetResponseCode() << " " << source
->GetURL();
495 if (source
== manifest_fetcher_
.get()) {
497 source
->GetResponseAsString(&data
);
498 OnManifestFetchComplete(source
->GetURL(),
500 source
->GetResponseCode(),
501 source
->GetBackoffDelay(),
503 } else if (source
== extension_fetcher_
.get()) {
504 OnCRXFetchComplete(source
,
507 source
->GetResponseCode(),
508 source
->GetBackoffDelay());
514 void ExtensionDownloader::OnManifestFetchComplete(
516 const net::URLRequestStatus
& status
,
518 const base::TimeDelta
& backoff_delay
,
519 const std::string
& data
) {
520 // We want to try parsing the manifest, and if it indicates updates are
521 // available, we want to fire off requests to fetch those updates.
522 if (status
.status() == net::URLRequestStatus::SUCCESS
&&
523 (response_code
== 200 || (url
.SchemeIsFile() && data
.length() > 0))) {
524 RETRY_HISTOGRAM("ManifestFetchSuccess",
525 manifests_queue_
.active_request_failure_count(),
527 VLOG(2) << "beginning manifest parse for " << url
;
528 scoped_refptr
<SafeManifestParser
> safe_parser(new SafeManifestParser(
531 &ExtensionDownloader::HandleManifestResults
,
532 weak_ptr_factory_
.GetWeakPtr(),
533 base::Owned(manifests_queue_
.reset_active_request().release()))));
534 safe_parser
->Start();
536 VLOG(1) << "Failed to fetch manifest '" << url
.possibly_invalid_spec()
537 << "' response code:" << response_code
;
538 if (ShouldRetryRequest(status
, response_code
) &&
539 manifests_queue_
.active_request_failure_count() < kMaxRetries
) {
540 manifests_queue_
.RetryRequest(backoff_delay
);
542 RETRY_HISTOGRAM("ManifestFetchFailure",
543 manifests_queue_
.active_request_failure_count(),
545 NotifyExtensionsDownloadFailed(
546 manifests_queue_
.active_request()->extension_ids(),
547 manifests_queue_
.active_request()->request_ids(),
548 ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED
);
551 manifest_fetcher_
.reset();
552 manifests_queue_
.reset_active_request();
554 // If we have any pending manifest requests, fire off the next one.
555 manifests_queue_
.StartNextRequest();
558 void ExtensionDownloader::HandleManifestResults(
559 const ManifestFetchData
* fetch_data
,
560 const UpdateManifest::Results
* results
) {
561 // Keep a list of extensions that will not be updated, so that the |delegate_|
562 // can be notified once we're done here.
563 std::set
<std::string
> not_updated(fetch_data
->extension_ids());
566 VLOG(2) << "parsing manifest failed (" << fetch_data
->full_url() << ")";
567 NotifyExtensionsDownloadFailed(
568 not_updated
, fetch_data
->request_ids(),
569 ExtensionDownloaderDelegate::MANIFEST_INVALID
);
572 VLOG(2) << "parsing manifest succeeded (" << fetch_data
->full_url() << ")";
575 // Examine the parsed manifest and kick off fetches of any new crx files.
576 std::vector
<int> updates
;
577 DetermineUpdates(*fetch_data
, *results
, &updates
);
578 for (size_t i
= 0; i
< updates
.size(); i
++) {
579 const UpdateManifest::Result
* update
= &(results
->list
.at(updates
[i
]));
580 const std::string
& id
= update
->extension_id
;
581 not_updated
.erase(id
);
583 GURL crx_url
= update
->crx_url
;
584 if (id
!= kBlacklistAppID
) {
585 NotifyUpdateFound(update
->extension_id
, update
->version
);
587 // The URL of the blacklist file is returned by the server and we need to
588 // be sure that we continue to be able to reliably detect whether a URL
589 // references a blacklist file.
590 DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url
)) << crx_url
;
592 // Force https (crbug.com/129587).
593 if (!crx_url
.SchemeIsCryptographic()) {
594 url::Replacements
<char> replacements
;
595 std::string
scheme("https");
596 replacements
.SetScheme(scheme
.c_str(),
597 url::Component(0, scheme
.size()));
598 crx_url
= crx_url
.ReplaceComponents(replacements
);
601 scoped_ptr
<ExtensionFetch
> fetch(
602 new ExtensionFetch(update
->extension_id
, crx_url
, update
->package_hash
,
603 update
->version
, fetch_data
->request_ids()));
604 FetchUpdatedExtension(fetch
.Pass());
607 // If the manifest response included a <daystart> element, we want to save
608 // that value for any extensions which had sent a ping in the request.
609 if (fetch_data
->base_url().DomainIs(kGoogleDotCom
) &&
610 results
->daystart_elapsed_seconds
>= 0) {
612 Time::Now() - TimeDelta::FromSeconds(results
->daystart_elapsed_seconds
);
614 const std::set
<std::string
>& extension_ids
= fetch_data
->extension_ids();
615 std::set
<std::string
>::const_iterator i
;
616 for (i
= extension_ids
.begin(); i
!= extension_ids
.end(); i
++) {
617 const std::string
& id
= *i
;
618 ExtensionDownloaderDelegate::PingResult
& result
= ping_results_
[id
];
619 result
.did_ping
= fetch_data
->DidPing(id
, ManifestFetchData::ROLLCALL
);
620 result
.day_start
= day_start
;
624 NotifyExtensionsDownloadFailed(
625 not_updated
, fetch_data
->request_ids(),
626 ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE
);
629 void ExtensionDownloader::DetermineUpdates(
630 const ManifestFetchData
& fetch_data
,
631 const UpdateManifest::Results
& possible_updates
,
632 std::vector
<int>* result
) {
633 for (size_t i
= 0; i
< possible_updates
.list
.size(); i
++) {
634 const UpdateManifest::Result
* update
= &possible_updates
.list
[i
];
635 const std::string
& id
= update
->extension_id
;
637 if (!fetch_data
.Includes(id
)) {
638 VLOG(2) << "Ignoring " << id
<< " from this manifest";
643 if (update
->version
.empty())
644 VLOG(2) << "manifest indicates " << id
<< " has no update";
646 VLOG(2) << "manifest indicates " << id
<< " latest version is '"
647 << update
->version
<< "'";
650 if (!delegate_
->IsExtensionPending(id
)) {
651 // If we're not installing pending extension, and the update
652 // version is the same or older than what's already installed,
655 if (!delegate_
->GetExtensionExistingVersion(id
, &version
)) {
656 VLOG(2) << id
<< " is not installed";
660 VLOG(2) << id
<< " is at '" << version
<< "'";
662 // We should skip the version check if update was forced.
663 if (!fetch_data
.DidForceUpdate(id
)) {
664 Version
existing_version(version
);
665 Version
update_version(update
->version
);
666 if (!update_version
.IsValid() ||
667 update_version
.CompareTo(existing_version
) <= 0) {
673 // If the update specifies a browser minimum version, do we qualify?
674 if (update
->browser_min_version
.length() > 0 &&
675 !ExtensionsBrowserClient::Get()->IsMinBrowserVersionSupported(
676 update
->browser_min_version
)) {
677 // TODO(asargent) - We may want this to show up in the extensions UI
678 // eventually. (http://crbug.com/12547).
679 DLOG(WARNING
) << "Updated version of extension " << id
680 << " available, but requires chrome version "
681 << update
->browser_min_version
;
684 VLOG(2) << "will try to update " << id
;
685 result
->push_back(i
);
689 // Begins (or queues up) download of an updated extension.
690 void ExtensionDownloader::FetchUpdatedExtension(
691 scoped_ptr
<ExtensionFetch
> fetch_data
) {
692 if (!fetch_data
->url
.is_valid()) {
693 // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
694 DLOG(WARNING
) << "Invalid URL: '" << fetch_data
->url
.possibly_invalid_spec()
695 << "' for extension " << fetch_data
->id
;
699 for (RequestQueue
<ExtensionFetch
>::iterator iter
= extensions_queue_
.begin();
700 iter
!= extensions_queue_
.end();
702 if (iter
->id
== fetch_data
->id
|| iter
->url
== fetch_data
->url
) {
703 iter
->request_ids
.insert(fetch_data
->request_ids
.begin(),
704 fetch_data
->request_ids
.end());
705 return; // already scheduled
709 if (extensions_queue_
.active_request() &&
710 extensions_queue_
.active_request()->url
== fetch_data
->url
) {
711 extensions_queue_
.active_request()->request_ids
.insert(
712 fetch_data
->request_ids
.begin(), fetch_data
->request_ids
.end());
715 if (extension_cache_
&&
716 extension_cache_
->GetExtension(fetch_data
->id
, fetch_data
->package_hash
,
718 version
== fetch_data
->version
) {
719 base::FilePath crx_path
;
720 // Now get .crx file path and mark extension as used.
721 extension_cache_
->GetExtension(fetch_data
->id
, fetch_data
->package_hash
,
722 &crx_path
, &version
);
723 NotifyDelegateDownloadFinished(fetch_data
.Pass(), true, crx_path
, false);
725 extensions_queue_
.ScheduleRequest(fetch_data
.Pass());
730 void ExtensionDownloader::NotifyDelegateDownloadFinished(
731 scoped_ptr
<ExtensionFetch
> fetch_data
,
733 const base::FilePath
& crx_path
,
734 bool file_ownership_passed
) {
735 // Dereference required params before passing a scoped_ptr.
736 const std::string
& id
= fetch_data
->id
;
737 const std::string
& package_hash
= fetch_data
->package_hash
;
738 const GURL
& url
= fetch_data
->url
;
739 const std::string
& version
= fetch_data
->version
;
740 const std::set
<int>& request_ids
= fetch_data
->request_ids
;
741 delegate_
->OnExtensionDownloadFinished(
742 CRXFileInfo(id
, crx_path
, package_hash
), file_ownership_passed
, url
,
743 version
, ping_results_
[id
], request_ids
,
744 from_cache
? base::Bind(&ExtensionDownloader::CacheInstallDone
,
745 weak_ptr_factory_
.GetWeakPtr(),
746 base::Passed(&fetch_data
))
747 : ExtensionDownloaderDelegate::InstallCallback());
749 ping_results_
.erase(id
);
752 void ExtensionDownloader::CacheInstallDone(
753 scoped_ptr
<ExtensionFetch
> fetch_data
,
754 bool should_download
) {
755 ping_results_
.erase(fetch_data
->id
);
756 if (should_download
) {
757 // Resume download from cached manifest data.
758 extensions_queue_
.ScheduleRequest(fetch_data
.Pass());
762 void ExtensionDownloader::CreateExtensionFetcher() {
763 const ExtensionFetch
* fetch
= extensions_queue_
.active_request();
764 extension_fetcher_
= net::URLFetcher::Create(kExtensionFetcherId
, fetch
->url
,
765 net::URLFetcher::GET
, this);
766 extension_fetcher_
->SetRequestContext(request_context_
.get());
767 extension_fetcher_
->SetAutomaticallyRetryOnNetworkChanges(3);
769 int load_flags
= net::LOAD_DISABLE_CACHE
;
770 bool is_secure
= fetch
->url
.SchemeIsCryptographic();
771 if (fetch
->credentials
!= ExtensionFetch::CREDENTIALS_COOKIES
|| !is_secure
) {
772 load_flags
|= net::LOAD_DO_NOT_SEND_COOKIES
| net::LOAD_DO_NOT_SAVE_COOKIES
;
774 extension_fetcher_
->SetLoadFlags(load_flags
);
776 // Download CRX files to a temp file. The blacklist is small and will be
777 // processed in memory, so it is fetched into a string.
778 if (fetch
->id
!= kBlacklistAppID
) {
779 extension_fetcher_
->SaveResponseToTemporaryFile(
780 BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
783 if (fetch
->credentials
== ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN
&&
785 if (access_token_
.empty()) {
786 // We should try OAuth2, but we have no token cached. This
787 // ExtensionFetcher will be started once the token fetch is complete,
788 // in either OnTokenFetchSuccess or OnTokenFetchFailure.
789 DCHECK(identity_provider_
.get());
790 OAuth2TokenService::ScopeSet webstore_scopes
;
791 webstore_scopes
.insert(kWebstoreOAuth2Scope
);
792 access_token_request_
=
793 identity_provider_
->GetTokenService()->StartRequest(
794 identity_provider_
->GetActiveAccountId(), webstore_scopes
, this);
797 extension_fetcher_
->AddExtraRequestHeader(
798 base::StringPrintf("%s: Bearer %s",
799 net::HttpRequestHeaders::kAuthorization
,
800 access_token_
.c_str()));
803 VLOG(2) << "Starting fetch of " << fetch
->url
<< " for " << fetch
->id
;
804 extension_fetcher_
->Start();
807 void ExtensionDownloader::OnCRXFetchComplete(
808 const net::URLFetcher
* source
,
810 const net::URLRequestStatus
& status
,
812 const base::TimeDelta
& backoff_delay
) {
813 ExtensionFetch
& active_request
= *extensions_queue_
.active_request();
814 const std::string
& id
= active_request
.id
;
815 if (status
.status() == net::URLRequestStatus::SUCCESS
&&
816 (response_code
== 200 || url
.SchemeIsFile())) {
817 RETRY_HISTOGRAM("CrxFetchSuccess",
818 extensions_queue_
.active_request_failure_count(),
820 base::FilePath crx_path
;
821 // Take ownership of the file at |crx_path|.
822 CHECK(source
->GetResponseAsFilePath(true, &crx_path
));
823 scoped_ptr
<ExtensionFetch
> fetch_data
=
824 extensions_queue_
.reset_active_request();
825 if (extension_cache_
) {
826 const std::string
& version
= fetch_data
->version
;
827 const std::string
& expected_hash
= fetch_data
->package_hash
;
828 extension_cache_
->PutExtension(
829 id
, expected_hash
, crx_path
, version
,
830 base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished
,
831 weak_ptr_factory_
.GetWeakPtr(), base::Passed(&fetch_data
),
834 NotifyDelegateDownloadFinished(fetch_data
.Pass(), false, crx_path
, true);
836 } else if (IterateFetchCredentialsAfterFailure(
837 &active_request
, status
, response_code
)) {
838 extensions_queue_
.RetryRequest(backoff_delay
);
840 const std::set
<int>& request_ids
= active_request
.request_ids
;
841 const ExtensionDownloaderDelegate::PingResult
& ping
= ping_results_
[id
];
842 VLOG(1) << "Failed to fetch extension '" << url
.possibly_invalid_spec()
843 << "' response code:" << response_code
;
844 if (ShouldRetryRequest(status
, response_code
) &&
845 extensions_queue_
.active_request_failure_count() < kMaxRetries
) {
846 extensions_queue_
.RetryRequest(backoff_delay
);
848 RETRY_HISTOGRAM("CrxFetchFailure",
849 extensions_queue_
.active_request_failure_count(),
851 // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
852 UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status
.error());
853 delegate_
->OnExtensionDownloadFailed(
854 id
, ExtensionDownloaderDelegate::CRX_FETCH_FAILED
, ping
, request_ids
);
856 ping_results_
.erase(id
);
857 extensions_queue_
.reset_active_request();
860 extension_fetcher_
.reset();
862 // If there are any pending downloads left, start the next one.
863 extensions_queue_
.StartNextRequest();
866 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
867 const std::set
<std::string
>& extension_ids
,
868 const std::set
<int>& request_ids
,
869 ExtensionDownloaderDelegate::Error error
) {
870 for (std::set
<std::string
>::const_iterator it
= extension_ids
.begin();
871 it
!= extension_ids
.end();
873 const ExtensionDownloaderDelegate::PingResult
& ping
= ping_results_
[*it
];
874 delegate_
->OnExtensionDownloadFailed(*it
, error
, ping
, request_ids
);
875 ping_results_
.erase(*it
);
879 void ExtensionDownloader::NotifyUpdateFound(const std::string
& id
,
880 const std::string
& version
) {
881 UpdateDetails
updateInfo(id
, Version(version
));
882 content::NotificationService::current()->Notify(
883 extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND
,
884 content::NotificationService::AllBrowserContextsAndSources(),
885 content::Details
<UpdateDetails
>(&updateInfo
));
888 bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
889 ExtensionFetch
* fetch
,
890 const net::URLRequestStatus
& status
,
892 bool auth_failure
= status
.status() == net::URLRequestStatus::CANCELED
||
893 (status
.status() == net::URLRequestStatus::SUCCESS
&&
894 (response_code
== net::HTTP_UNAUTHORIZED
||
895 response_code
== net::HTTP_FORBIDDEN
));
899 // Here we decide what to do next if the server refused to authorize this
901 switch (fetch
->credentials
) {
902 case ExtensionFetch::CREDENTIALS_NONE
:
903 if (fetch
->url
.DomainIs(kGoogleDotCom
) && identity_provider_
) {
904 fetch
->credentials
= ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN
;
906 fetch
->credentials
= ExtensionFetch::CREDENTIALS_COOKIES
;
909 case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN
:
910 fetch
->oauth2_attempt_count
++;
911 // OAuth2 may fail due to an expired access token, in which case we
912 // should invalidate the token and try again.
913 if (response_code
== net::HTTP_UNAUTHORIZED
&&
914 fetch
->oauth2_attempt_count
<= kMaxOAuth2Attempts
) {
915 DCHECK(identity_provider_
.get());
916 OAuth2TokenService::ScopeSet webstore_scopes
;
917 webstore_scopes
.insert(kWebstoreOAuth2Scope
);
918 identity_provider_
->GetTokenService()->InvalidateAccessToken(
919 identity_provider_
->GetActiveAccountId(), webstore_scopes
,
921 access_token_
.clear();
924 // Either there is no Gaia identity available, the active identity
925 // doesn't have access to this resource, or the server keeps returning
926 // 401s and we've retried too many times. Fall back on cookies.
927 if (access_token_
.empty() || response_code
== net::HTTP_FORBIDDEN
||
928 fetch
->oauth2_attempt_count
> kMaxOAuth2Attempts
) {
929 fetch
->credentials
= ExtensionFetch::CREDENTIALS_COOKIES
;
932 // Something else is wrong. Time to give up.
934 case ExtensionFetch::CREDENTIALS_COOKIES
:
935 if (response_code
== net::HTTP_FORBIDDEN
) {
936 // Try the next session identity, up to some maximum.
937 return IncrementAuthUserIndex(&fetch
->url
);
947 void ExtensionDownloader::OnGetTokenSuccess(
948 const OAuth2TokenService::Request
* request
,
949 const std::string
& access_token
,
950 const base::Time
& expiration_time
) {
951 access_token_
= access_token
;
952 extension_fetcher_
->AddExtraRequestHeader(
953 base::StringPrintf("%s: Bearer %s",
954 net::HttpRequestHeaders::kAuthorization
,
955 access_token_
.c_str()));
956 extension_fetcher_
->Start();
959 void ExtensionDownloader::OnGetTokenFailure(
960 const OAuth2TokenService::Request
* request
,
961 const GoogleServiceAuthError
& error
) {
962 // If we fail to get an access token, kick the pending fetch and let it fall
964 extension_fetcher_
->Start();
967 ManifestFetchData
* ExtensionDownloader::CreateManifestFetchData(
968 const GURL
& update_url
,
970 ManifestFetchData::PingMode ping_mode
= ManifestFetchData::NO_PING
;
971 if (update_url
.DomainIs(ping_enabled_domain_
.c_str()))
972 ping_mode
= ManifestFetchData::PING_WITH_ENABLED_STATE
;
973 return new ManifestFetchData(
974 update_url
, request_id
, brand_code_
, manifest_query_params_
, ping_mode
);
977 } // namespace extensions