1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/memory/scoped_handle.h"
15 #include "base/metrics/histogram.h"
16 #include "base/metrics/sparse_histogram.h"
17 #include "base/stl_util.h"
18 #include "base/strings/string_number_conversions.h"
19 #include "base/strings/string_util.h"
20 #include "base/strings/stringprintf.h"
21 #include "base/time/time.h"
22 #include "base/version.h"
23 #include "chrome/browser/chrome_notification_types.h"
24 #include "chrome/browser/extensions/updater/extension_cache.h"
25 #include "chrome/browser/extensions/updater/request_queue_impl.h"
26 #include "chrome/browser/extensions/updater/safe_manifest_parser.h"
27 #include "chrome/browser/metrics/chrome_metrics_service_accessor.h"
28 #include "chrome/common/chrome_switches.h"
29 #include "chrome/common/chrome_version_info.h"
30 #include "chrome/common/extensions/extension_constants.h"
31 #include "chrome/common/extensions/manifest_url_handler.h"
32 #include "content/public/browser/browser_thread.h"
33 #include "content/public/browser/notification_details.h"
34 #include "content/public/browser/notification_service.h"
35 #include "net/base/backoff_entry.h"
36 #include "net/base/load_flags.h"
37 #include "net/base/net_errors.h"
38 #include "net/url_request/url_fetcher.h"
39 #include "net/url_request/url_request_context_getter.h"
40 #include "net/url_request/url_request_status.h"
43 using base::TimeDelta
;
44 using content::BrowserThread
;
46 namespace extensions
{
48 const char ExtensionDownloader::kBlacklistAppID
[] = "com.google.crx.blacklist";
52 const net::BackoffEntry::Policy kDefaultBackoffPolicy
= {
53 // Number of initial errors (in sequence) to ignore before applying
54 // exponential back-off rules.
57 // Initial delay for exponential back-off in ms.
60 // Factor by which the waiting time will be multiplied.
63 // Fuzzing percentage. ex: 10% will spread requests randomly
64 // between 90%-100% of the calculated time.
67 // Maximum amount of time we are willing to delay our request in ms.
70 // Time to keep an entry from being discarded even when it
71 // has no significant state, -1 to never discard.
74 // Don't use initial delay unless the last request was an error.
78 const char kAuthUserQueryKey
[] = "authuser";
80 const int kMaxAuthUserValue
= 10;
82 const char kNotFromWebstoreInstallSource
[] = "notfromwebstore";
83 const char kDefaultInstallSource
[] = "";
85 #define RETRY_HISTOGRAM(name, retry_count, url) \
86 if ((url).DomainIs("google.com")) { \
87 UMA_HISTOGRAM_CUSTOM_COUNTS( \
88 "Extensions." name "RetryCountGoogleUrl", retry_count, 1, \
89 kMaxRetries, kMaxRetries+1); \
91 UMA_HISTOGRAM_CUSTOM_COUNTS( \
92 "Extensions." name "RetryCountOtherUrl", retry_count, 1, \
93 kMaxRetries, kMaxRetries+1); \
96 bool ShouldRetryRequest(const net::URLRequestStatus
& status
,
98 // Retry if the response code is a server error, or the request failed because
99 // of network errors as opposed to file errors.
100 return ((response_code
>= 500 && status
.is_success()) ||
101 status
.status() == net::URLRequestStatus::FAILED
);
104 bool ShouldRetryRequestWithCookies(const net::URLRequestStatus
& status
,
106 bool included_cookies
) {
107 if (included_cookies
)
110 if (status
.status() == net::URLRequestStatus::CANCELED
)
113 // Retry if a 401 or 403 is received.
114 return (status
.status() == net::URLRequestStatus::SUCCESS
&&
115 (response_code
== 401 || response_code
== 403));
118 bool ShouldRetryRequestWithNextUser(const net::URLRequestStatus
& status
,
120 bool included_cookies
) {
121 // Retry if a 403 is received in response to a request including cookies.
122 // Note that receiving a 401 in response to a request which included cookies
123 // should indicate that the |authuser| index was out of bounds for the profile
124 // and therefore Chrome should NOT retry with another index.
125 return (status
.status() == net::URLRequestStatus::SUCCESS
&&
126 response_code
== 403 && included_cookies
);
129 // This parses and updates a URL query such that the value of the |authuser|
130 // query parameter is incremented by 1. If parameter was not present in the URL,
131 // it will be added with a value of 1. All other query keys and values are
132 // preserved as-is. Returns |false| if the user index exceeds a hard-coded
134 bool IncrementAuthUserIndex(GURL
* url
) {
136 std::string old_query
= url
->query();
137 std::vector
<std::string
> new_query_parts
;
138 url::Component
query(0, old_query
.length());
139 url::Component key
, value
;
140 while (url::ExtractQueryKeyValue(old_query
.c_str(), &query
, &key
, &value
)) {
141 std::string key_string
= old_query
.substr(key
.begin
, key
.len
);
142 std::string value_string
= old_query
.substr(value
.begin
, value
.len
);
143 if (key_string
== kAuthUserQueryKey
) {
144 base::StringToInt(value_string
, &user_index
);
146 new_query_parts
.push_back(base::StringPrintf(
147 "%s=%s", key_string
.c_str(), value_string
.c_str()));
150 if (user_index
>= kMaxAuthUserValue
)
152 new_query_parts
.push_back(
153 base::StringPrintf("%s=%d", kAuthUserQueryKey
, user_index
+ 1));
154 std::string new_query_string
= JoinString(new_query_parts
, '&');
155 url::Component
new_query(0, new_query_string
.size());
156 url::Replacements
<char> replacements
;
157 replacements
.SetQuery(new_query_string
.c_str(), new_query
);
158 *url
= url
->ReplaceComponents(replacements
);
164 UpdateDetails::UpdateDetails(const std::string
& id
, const Version
& version
)
165 : id(id
), version(version
) {}
167 UpdateDetails::~UpdateDetails() {}
169 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
170 : url(), is_protected(false) {}
172 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
173 const std::string
& id
,
175 const std::string
& package_hash
,
176 const std::string
& version
,
177 const std::set
<int>& request_ids
)
178 : id(id
), url(url
), package_hash(package_hash
), version(version
),
179 request_ids(request_ids
), is_protected(false) {}
181 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
183 ExtensionDownloader::ExtensionDownloader(
184 ExtensionDownloaderDelegate
* delegate
,
185 net::URLRequestContextGetter
* request_context
)
186 : delegate_(delegate
),
187 request_context_(request_context
),
188 weak_ptr_factory_(this),
189 manifests_queue_(&kDefaultBackoffPolicy
,
190 base::Bind(&ExtensionDownloader::CreateManifestFetcher
,
191 base::Unretained(this))),
192 extensions_queue_(&kDefaultBackoffPolicy
,
193 base::Bind(&ExtensionDownloader::CreateExtensionFetcher
,
194 base::Unretained(this))),
195 extension_cache_(NULL
) {
197 DCHECK(request_context_
);
200 ExtensionDownloader::~ExtensionDownloader() {}
202 bool ExtensionDownloader::AddExtension(const Extension
& extension
,
204 // Skip extensions with empty update URLs converted from user
206 if (extension
.converted_from_user_script() &&
207 ManifestURL::GetUpdateURL(&extension
).is_empty()) {
211 // If the extension updates itself from the gallery, ignore any update URL
212 // data. At the moment there is no extra data that an extension can
213 // communicate to the the gallery update servers.
214 std::string update_url_data
;
215 if (!ManifestURL::UpdatesFromGallery(&extension
))
216 update_url_data
= delegate_
->GetUpdateUrlData(extension
.id());
218 return AddExtensionData(extension
.id(), *extension
.version(),
220 ManifestURL::GetUpdateURL(&extension
),
221 update_url_data
, request_id
);
224 bool ExtensionDownloader::AddPendingExtension(const std::string
& id
,
225 const GURL
& update_url
,
227 // Use a zero version to ensure that a pending extension will always
228 // be updated, and thus installed (assuming all extensions have
229 // non-zero versions).
230 Version
version("0.0.0.0");
231 DCHECK(version
.IsValid());
233 return AddExtensionData(id
,
235 Manifest::TYPE_UNKNOWN
,
241 void ExtensionDownloader::StartAllPending(ExtensionCache
* cache
) {
243 extension_cache_
= cache
;
244 extension_cache_
->Start(base::Bind(
245 &ExtensionDownloader::DoStartAllPending
,
246 weak_ptr_factory_
.GetWeakPtr()));
252 void ExtensionDownloader::DoStartAllPending() {
254 url_stats_
= URLStats();
256 for (FetchMap::iterator it
= fetches_preparing_
.begin();
257 it
!= fetches_preparing_
.end(); ++it
) {
258 std::vector
<linked_ptr
<ManifestFetchData
> >& list
= it
->second
;
259 for (size_t i
= 0; i
< list
.size(); ++i
) {
260 StartUpdateCheck(scoped_ptr
<ManifestFetchData
>(list
[i
].release()));
263 fetches_preparing_
.clear();
266 void ExtensionDownloader::StartBlacklistUpdate(
267 const std::string
& version
,
268 const ManifestFetchData::PingData
& ping_data
,
270 // Note: it is very important that we use the https version of the update
271 // url here to avoid DNS hijacking of the blacklist, which is not validated
272 // by a public key signature like .crx files are.
273 scoped_ptr
<ManifestFetchData
> blacklist_fetch(
274 new ManifestFetchData(extension_urls::GetWebstoreUpdateUrl(),
276 DCHECK(blacklist_fetch
->base_url().SchemeIsSecure());
277 blacklist_fetch
->AddExtension(kBlacklistAppID
,
281 kDefaultInstallSource
);
282 StartUpdateCheck(blacklist_fetch
.Pass());
285 bool ExtensionDownloader::AddExtensionData(const std::string
& id
,
286 const Version
& version
,
287 Manifest::Type extension_type
,
288 const GURL
& extension_update_url
,
289 const std::string
& update_url_data
,
291 GURL
update_url(extension_update_url
);
292 // Skip extensions with non-empty invalid update URLs.
293 if (!update_url
.is_empty() && !update_url
.is_valid()) {
294 LOG(WARNING
) << "Extension " << id
<< " has invalid update url "
299 // Make sure we use SSL for store-hosted extensions.
300 if (extension_urls::IsWebstoreUpdateUrl(update_url
) &&
301 !update_url
.SchemeIsSecure())
302 update_url
= extension_urls::GetWebstoreUpdateUrl();
304 // Skip extensions with empty IDs.
306 LOG(WARNING
) << "Found extension with empty ID";
310 if (update_url
.DomainIs("google.com")) {
311 url_stats_
.google_url_count
++;
312 } else if (update_url
.is_empty()) {
313 url_stats_
.no_url_count
++;
314 // Fill in default update URL.
315 update_url
= extension_urls::GetWebstoreUpdateUrl();
317 url_stats_
.other_url_count
++;
320 switch (extension_type
) {
321 case Manifest::TYPE_THEME
:
322 ++url_stats_
.theme_count
;
324 case Manifest::TYPE_EXTENSION
:
325 case Manifest::TYPE_USER_SCRIPT
:
326 ++url_stats_
.extension_count
;
328 case Manifest::TYPE_HOSTED_APP
:
329 case Manifest::TYPE_LEGACY_PACKAGED_APP
:
330 ++url_stats_
.app_count
;
332 case Manifest::TYPE_PLATFORM_APP
:
333 ++url_stats_
.platform_app_count
;
335 case Manifest::TYPE_UNKNOWN
:
337 ++url_stats_
.pending_count
;
341 std::vector
<GURL
> update_urls
;
342 update_urls
.push_back(update_url
);
343 // If UMA is enabled, also add to ManifestFetchData for the
344 // webstore update URL.
345 if (!extension_urls::IsWebstoreUpdateUrl(update_url
) &&
346 ChromeMetricsServiceAccessor::IsMetricsReportingEnabled()) {
347 update_urls
.push_back(extension_urls::GetWebstoreUpdateUrl());
350 for (size_t i
= 0; i
< update_urls
.size(); ++i
) {
351 DCHECK(!update_urls
[i
].is_empty());
352 DCHECK(update_urls
[i
].is_valid());
354 std::string install_source
= i
== 0 ?
355 kDefaultInstallSource
: kNotFromWebstoreInstallSource
;
357 ManifestFetchData::PingData ping_data
;
358 ManifestFetchData::PingData
* optional_ping_data
= NULL
;
359 if (delegate_
->GetPingDataForExtension(id
, &ping_data
))
360 optional_ping_data
= &ping_data
;
362 // Find or create a ManifestFetchData to add this extension to.
364 FetchMap::iterator existing_iter
= fetches_preparing_
.find(
365 std::make_pair(request_id
, update_urls
[i
]));
366 if (existing_iter
!= fetches_preparing_
.end() &&
367 !existing_iter
->second
.empty()) {
368 // Try to add to the ManifestFetchData at the end of the list.
369 ManifestFetchData
* existing_fetch
= existing_iter
->second
.back().get();
370 if (existing_fetch
->AddExtension(id
, version
.GetString(),
371 optional_ping_data
, update_url_data
,
377 // Otherwise add a new element to the list, if the list doesn't exist or
378 // if its last element is already full.
379 linked_ptr
<ManifestFetchData
> fetch(
380 new ManifestFetchData(update_urls
[i
], request_id
));
381 fetches_preparing_
[std::make_pair(request_id
, update_urls
[i
])].
383 added
= fetch
->AddExtension(id
, version
.GetString(),
394 void ExtensionDownloader::ReportStats() const {
395 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
396 url_stats_
.extension_count
);
397 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
398 url_stats_
.theme_count
);
399 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
400 url_stats_
.app_count
);
401 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
402 url_stats_
.platform_app_count
);
403 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
404 url_stats_
.pending_count
);
405 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
406 url_stats_
.google_url_count
);
407 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
408 url_stats_
.other_url_count
);
409 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
410 url_stats_
.no_url_count
);
413 void ExtensionDownloader::StartUpdateCheck(
414 scoped_ptr
<ManifestFetchData
> fetch_data
) {
415 const std::set
<std::string
>& id_set(fetch_data
->extension_ids());
417 if (CommandLine::ForCurrentProcess()->HasSwitch(
418 switches::kDisableBackgroundNetworking
)) {
419 NotifyExtensionsDownloadFailed(id_set
,
420 fetch_data
->request_ids(),
421 ExtensionDownloaderDelegate::DISABLED
);
425 RequestQueue
<ManifestFetchData
>::iterator i
;
426 for (i
= manifests_queue_
.begin(); i
!= manifests_queue_
.end(); ++i
) {
427 if (fetch_data
->full_url() == i
->full_url()) {
428 // This url is already scheduled to be fetched.
429 i
->Merge(*fetch_data
);
434 if (manifests_queue_
.active_request() &&
435 manifests_queue_
.active_request()->full_url() == fetch_data
->full_url()) {
436 manifests_queue_
.active_request()->Merge(*fetch_data
);
438 UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
439 fetch_data
->full_url().possibly_invalid_spec().length());
441 manifests_queue_
.ScheduleRequest(fetch_data
.Pass());
445 void ExtensionDownloader::CreateManifestFetcher() {
447 std::vector
<std::string
> id_vector(
448 manifests_queue_
.active_request()->extension_ids().begin(),
449 manifests_queue_
.active_request()->extension_ids().end());
450 std::string id_list
= JoinString(id_vector
, ',');
451 VLOG(2) << "Fetching " << manifests_queue_
.active_request()->full_url()
452 << " for " << id_list
;
455 manifest_fetcher_
.reset(net::URLFetcher::Create(
456 kManifestFetcherId
, manifests_queue_
.active_request()->full_url(),
457 net::URLFetcher::GET
, this));
458 manifest_fetcher_
->SetRequestContext(request_context_
);
459 manifest_fetcher_
->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES
|
460 net::LOAD_DO_NOT_SAVE_COOKIES
|
461 net::LOAD_DISABLE_CACHE
);
462 // Update checks can be interrupted if a network change is detected; this is
463 // common for the retail mode AppPack on ChromeOS. Retrying once should be
464 // enough to recover in those cases; let the fetcher retry up to 3 times
465 // just in case. http://crosbug.com/130602
466 manifest_fetcher_
->SetAutomaticallyRetryOnNetworkChanges(3);
467 manifest_fetcher_
->Start();
470 void ExtensionDownloader::OnURLFetchComplete(
471 const net::URLFetcher
* source
) {
472 VLOG(2) << source
->GetResponseCode() << " " << source
->GetURL();
474 if (source
== manifest_fetcher_
.get()) {
476 source
->GetResponseAsString(&data
);
477 OnManifestFetchComplete(source
->GetURL(),
479 source
->GetResponseCode(),
480 source
->GetBackoffDelay(),
482 } else if (source
== extension_fetcher_
.get()) {
483 OnCRXFetchComplete(source
,
486 source
->GetResponseCode(),
487 source
->GetBackoffDelay());
493 void ExtensionDownloader::OnManifestFetchComplete(
495 const net::URLRequestStatus
& status
,
497 const base::TimeDelta
& backoff_delay
,
498 const std::string
& data
) {
499 // We want to try parsing the manifest, and if it indicates updates are
500 // available, we want to fire off requests to fetch those updates.
501 if (status
.status() == net::URLRequestStatus::SUCCESS
&&
502 (response_code
== 200 || (url
.SchemeIsFile() && data
.length() > 0))) {
503 RETRY_HISTOGRAM("ManifestFetchSuccess",
504 manifests_queue_
.active_request_failure_count(), url
);
505 VLOG(2) << "beginning manifest parse for " << url
;
506 scoped_refptr
<SafeManifestParser
> safe_parser(
507 new SafeManifestParser(
509 manifests_queue_
.reset_active_request().release(),
510 base::Bind(&ExtensionDownloader::HandleManifestResults
,
511 weak_ptr_factory_
.GetWeakPtr())));
512 safe_parser
->Start();
514 VLOG(1) << "Failed to fetch manifest '" << url
.possibly_invalid_spec()
515 << "' response code:" << response_code
;
516 if (ShouldRetryRequest(status
, response_code
) &&
517 manifests_queue_
.active_request_failure_count() < kMaxRetries
) {
518 manifests_queue_
.RetryRequest(backoff_delay
);
520 RETRY_HISTOGRAM("ManifestFetchFailure",
521 manifests_queue_
.active_request_failure_count(), url
);
522 NotifyExtensionsDownloadFailed(
523 manifests_queue_
.active_request()->extension_ids(),
524 manifests_queue_
.active_request()->request_ids(),
525 ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED
);
528 manifest_fetcher_
.reset();
529 manifests_queue_
.reset_active_request();
531 // If we have any pending manifest requests, fire off the next one.
532 manifests_queue_
.StartNextRequest();
535 void ExtensionDownloader::HandleManifestResults(
536 const ManifestFetchData
& fetch_data
,
537 const UpdateManifest::Results
* results
) {
538 // Keep a list of extensions that will not be updated, so that the |delegate_|
539 // can be notified once we're done here.
540 std::set
<std::string
> not_updated(fetch_data
.extension_ids());
543 NotifyExtensionsDownloadFailed(
545 fetch_data
.request_ids(),
546 ExtensionDownloaderDelegate::MANIFEST_INVALID
);
550 // Examine the parsed manifest and kick off fetches of any new crx files.
551 std::vector
<int> updates
;
552 DetermineUpdates(fetch_data
, *results
, &updates
);
553 for (size_t i
= 0; i
< updates
.size(); i
++) {
554 const UpdateManifest::Result
* update
= &(results
->list
.at(updates
[i
]));
555 const std::string
& id
= update
->extension_id
;
556 not_updated
.erase(id
);
558 GURL crx_url
= update
->crx_url
;
559 if (id
!= kBlacklistAppID
) {
560 NotifyUpdateFound(update
->extension_id
, update
->version
);
562 // The URL of the blacklist file is returned by the server and we need to
563 // be sure that we continue to be able to reliably detect whether a URL
564 // references a blacklist file.
565 DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url
)) << crx_url
;
567 // Force https (crbug.com/129587).
568 if (!crx_url
.SchemeIsSecure()) {
569 url::Replacements
<char> replacements
;
570 std::string
scheme("https");
571 replacements
.SetScheme(scheme
.c_str(),
572 url::Component(0, scheme
.size()));
573 crx_url
= crx_url
.ReplaceComponents(replacements
);
576 scoped_ptr
<ExtensionFetch
> fetch(new ExtensionFetch(
577 update
->extension_id
, crx_url
, update
->package_hash
,
578 update
->version
, fetch_data
.request_ids()));
579 FetchUpdatedExtension(fetch
.Pass());
582 // If the manifest response included a <daystart> element, we want to save
583 // that value for any extensions which had sent a ping in the request.
584 if (fetch_data
.base_url().DomainIs("google.com") &&
585 results
->daystart_elapsed_seconds
>= 0) {
587 Time::Now() - TimeDelta::FromSeconds(results
->daystart_elapsed_seconds
);
589 const std::set
<std::string
>& extension_ids
= fetch_data
.extension_ids();
590 std::set
<std::string
>::const_iterator i
;
591 for (i
= extension_ids
.begin(); i
!= extension_ids
.end(); i
++) {
592 const std::string
& id
= *i
;
593 ExtensionDownloaderDelegate::PingResult
& result
= ping_results_
[id
];
594 result
.did_ping
= fetch_data
.DidPing(id
, ManifestFetchData::ROLLCALL
);
595 result
.day_start
= day_start
;
599 NotifyExtensionsDownloadFailed(
601 fetch_data
.request_ids(),
602 ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE
);
605 void ExtensionDownloader::DetermineUpdates(
606 const ManifestFetchData
& fetch_data
,
607 const UpdateManifest::Results
& possible_updates
,
608 std::vector
<int>* result
) {
609 // This will only be valid if one of possible_updates specifies
610 // browser_min_version.
611 Version browser_version
;
613 for (size_t i
= 0; i
< possible_updates
.list
.size(); i
++) {
614 const UpdateManifest::Result
* update
= &possible_updates
.list
[i
];
615 const std::string
& id
= update
->extension_id
;
617 if (!fetch_data
.Includes(id
)) {
618 VLOG(2) << "Ignoring " << id
<< " from this manifest";
623 if (update
->version
.empty())
624 VLOG(2) << "manifest indicates " << id
<< " has no update";
626 VLOG(2) << "manifest indicates " << id
627 << " latest version is '" << update
->version
<< "'";
630 if (!delegate_
->IsExtensionPending(id
)) {
631 // If we're not installing pending extension, and the update
632 // version is the same or older than what's already installed,
635 if (!delegate_
->GetExtensionExistingVersion(id
, &version
)) {
636 VLOG(2) << id
<< " is not installed";
640 VLOG(2) << id
<< " is at '" << version
<< "'";
642 Version
existing_version(version
);
643 Version
update_version(update
->version
);
645 if (!update_version
.IsValid() ||
646 update_version
.CompareTo(existing_version
) <= 0) {
651 // If the update specifies a browser minimum version, do we qualify?
652 if (update
->browser_min_version
.length() > 0) {
653 // First determine the browser version if we haven't already.
654 if (!browser_version
.IsValid()) {
655 chrome::VersionInfo version_info
;
656 if (version_info
.is_valid())
657 browser_version
= Version(version_info
.Version());
659 Version
browser_min_version(update
->browser_min_version
);
660 if (browser_version
.IsValid() && browser_min_version
.IsValid() &&
661 browser_min_version
.CompareTo(browser_version
) > 0) {
662 // TODO(asargent) - We may want this to show up in the extensions UI
663 // eventually. (http://crbug.com/12547).
664 LOG(WARNING
) << "Updated version of extension " << id
665 << " available, but requires chrome version "
666 << update
->browser_min_version
;
670 VLOG(2) << "will try to update " << id
;
671 result
->push_back(i
);
675 // Begins (or queues up) download of an updated extension.
676 void ExtensionDownloader::FetchUpdatedExtension(
677 scoped_ptr
<ExtensionFetch
> fetch_data
) {
678 if (!fetch_data
->url
.is_valid()) {
679 // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
680 LOG(ERROR
) << "Invalid URL: '" << fetch_data
->url
.possibly_invalid_spec()
681 << "' for extension " << fetch_data
->id
;
685 for (RequestQueue
<ExtensionFetch
>::iterator iter
=
686 extensions_queue_
.begin();
687 iter
!= extensions_queue_
.end(); ++iter
) {
688 if (iter
->id
== fetch_data
->id
|| iter
->url
== fetch_data
->url
) {
689 iter
->request_ids
.insert(fetch_data
->request_ids
.begin(),
690 fetch_data
->request_ids
.end());
691 return; // already scheduled
695 if (extensions_queue_
.active_request() &&
696 extensions_queue_
.active_request()->url
== fetch_data
->url
) {
697 extensions_queue_
.active_request()->request_ids
.insert(
698 fetch_data
->request_ids
.begin(), fetch_data
->request_ids
.end());
701 if (extension_cache_
&&
702 extension_cache_
->GetExtension(fetch_data
->id
, NULL
, &version
) &&
703 version
== fetch_data
->version
) {
704 base::FilePath crx_path
;
705 // Now get .crx file path and mark extension as used.
706 extension_cache_
->GetExtension(fetch_data
->id
, &crx_path
, &version
);
707 NotifyDelegateDownloadFinished(fetch_data
.Pass(), crx_path
, false);
709 extensions_queue_
.ScheduleRequest(fetch_data
.Pass());
714 void ExtensionDownloader::NotifyDelegateDownloadFinished(
715 scoped_ptr
<ExtensionFetch
> fetch_data
,
716 const base::FilePath
& crx_path
,
717 bool file_ownership_passed
) {
718 delegate_
->OnExtensionDownloadFinished(fetch_data
->id
, crx_path
,
719 file_ownership_passed
, fetch_data
->url
, fetch_data
->version
,
720 ping_results_
[fetch_data
->id
], fetch_data
->request_ids
);
721 ping_results_
.erase(fetch_data
->id
);
724 void ExtensionDownloader::CreateExtensionFetcher() {
725 const ExtensionFetch
* fetch
= extensions_queue_
.active_request();
726 int load_flags
= net::LOAD_DISABLE_CACHE
;
727 if (!fetch
->is_protected
|| !fetch
->url
.SchemeIs("https")) {
728 load_flags
|= net::LOAD_DO_NOT_SEND_COOKIES
|
729 net::LOAD_DO_NOT_SAVE_COOKIES
;
731 extension_fetcher_
.reset(net::URLFetcher::Create(
732 kExtensionFetcherId
, fetch
->url
, net::URLFetcher::GET
, this));
733 extension_fetcher_
->SetRequestContext(request_context_
);
734 extension_fetcher_
->SetLoadFlags(load_flags
);
735 extension_fetcher_
->SetAutomaticallyRetryOnNetworkChanges(3);
736 // Download CRX files to a temp file. The blacklist is small and will be
737 // processed in memory, so it is fetched into a string.
738 if (fetch
->id
!= kBlacklistAppID
) {
739 extension_fetcher_
->SaveResponseToTemporaryFile(
740 BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
743 VLOG(2) << "Starting fetch of " << fetch
->url
<< " for " << fetch
->id
;
745 extension_fetcher_
->Start();
748 void ExtensionDownloader::OnCRXFetchComplete(
749 const net::URLFetcher
* source
,
751 const net::URLRequestStatus
& status
,
753 const base::TimeDelta
& backoff_delay
) {
754 const std::string
& id
= extensions_queue_
.active_request()->id
;
755 if (status
.status() == net::URLRequestStatus::SUCCESS
&&
756 (response_code
== 200 || url
.SchemeIsFile())) {
757 RETRY_HISTOGRAM("CrxFetchSuccess",
758 extensions_queue_
.active_request_failure_count(), url
);
759 base::FilePath crx_path
;
760 // Take ownership of the file at |crx_path|.
761 CHECK(source
->GetResponseAsFilePath(true, &crx_path
));
762 scoped_ptr
<ExtensionFetch
> fetch_data
=
763 extensions_queue_
.reset_active_request();
764 if (extension_cache_
) {
765 const std::string
& version
= fetch_data
->version
;
766 extension_cache_
->PutExtension(id
, crx_path
, version
,
767 base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished
,
768 weak_ptr_factory_
.GetWeakPtr(),
769 base::Passed(&fetch_data
)));
771 NotifyDelegateDownloadFinished(fetch_data
.Pass(), crx_path
, true);
773 } else if (ShouldRetryRequestWithCookies(
776 extensions_queue_
.active_request()->is_protected
)) {
777 // Requeue the fetch with |is_protected| set, enabling cookies.
778 extensions_queue_
.active_request()->is_protected
= true;
779 extensions_queue_
.RetryRequest(backoff_delay
);
780 } else if (ShouldRetryRequestWithNextUser(
783 extensions_queue_
.active_request()->is_protected
) &&
784 IncrementAuthUserIndex(&extensions_queue_
.active_request()->url
)) {
785 extensions_queue_
.RetryRequest(backoff_delay
);
787 const std::set
<int>& request_ids
=
788 extensions_queue_
.active_request()->request_ids
;
789 const ExtensionDownloaderDelegate::PingResult
& ping
= ping_results_
[id
];
790 VLOG(1) << "Failed to fetch extension '" << url
.possibly_invalid_spec()
791 << "' response code:" << response_code
;
792 if (ShouldRetryRequest(status
, response_code
) &&
793 extensions_queue_
.active_request_failure_count() < kMaxRetries
) {
794 extensions_queue_
.RetryRequest(backoff_delay
);
796 RETRY_HISTOGRAM("CrxFetchFailure",
797 extensions_queue_
.active_request_failure_count(), url
);
798 // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
799 UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status
.error());
800 delegate_
->OnExtensionDownloadFailed(
801 id
, ExtensionDownloaderDelegate::CRX_FETCH_FAILED
, ping
, request_ids
);
803 ping_results_
.erase(id
);
804 extensions_queue_
.reset_active_request();
807 extension_fetcher_
.reset();
809 // If there are any pending downloads left, start the next one.
810 extensions_queue_
.StartNextRequest();
813 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
814 const std::set
<std::string
>& extension_ids
,
815 const std::set
<int>& request_ids
,
816 ExtensionDownloaderDelegate::Error error
) {
817 for (std::set
<std::string
>::const_iterator it
= extension_ids
.begin();
818 it
!= extension_ids
.end(); ++it
) {
819 const ExtensionDownloaderDelegate::PingResult
& ping
= ping_results_
[*it
];
820 delegate_
->OnExtensionDownloadFailed(*it
, error
, ping
, request_ids
);
821 ping_results_
.erase(*it
);
825 void ExtensionDownloader::NotifyUpdateFound(const std::string
& id
,
826 const std::string
& version
) {
827 UpdateDetails
updateInfo(id
, Version(version
));
828 content::NotificationService::current()->Notify(
829 chrome::NOTIFICATION_EXTENSION_UPDATE_FOUND
,
830 content::NotificationService::AllBrowserContextsAndSources(),
831 content::Details
<UpdateDetails
>(&updateInfo
));
834 } // namespace extensions