1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """An implementation of the server side of the Chromium sync protocol.
7 The details of the protocol are described mostly by comments in the protocol
8 buffer definition at chrome/browser/sync/protocol/sync.proto.
14 import google
.protobuf
.text_format
26 import app_list_specifics_pb2
27 import app_notification_specifics_pb2
28 import app_setting_specifics_pb2
29 import app_specifics_pb2
30 import article_specifics_pb2
31 import autofill_specifics_pb2
32 import bookmark_specifics_pb2
33 import client_commands_pb2
34 import dictionary_specifics_pb2
35 import get_updates_caller_info_pb2
36 import extension_setting_specifics_pb2
37 import extension_specifics_pb2
38 import favicon_image_specifics_pb2
39 import favicon_tracking_specifics_pb2
40 import history_delete_directive_specifics_pb2
41 import managed_user_setting_specifics_pb2
42 import managed_user_specifics_pb2
43 import managed_user_shared_setting_specifics_pb2
44 import managed_user_whitelist_specifics_pb2
45 import nigori_specifics_pb2
46 import password_specifics_pb2
47 import preference_specifics_pb2
48 import priority_preference_specifics_pb2
49 import search_engine_specifics_pb2
50 import session_specifics_pb2
53 import synced_notification_app_info_specifics_pb2
54 import synced_notification_specifics_pb2
55 import theme_specifics_pb2
56 import typed_url_specifics_pb2
57 import wifi_credential_specifics_pb2
59 # An enumeration of the various kinds of data that can be synced.
60 # Over the wire, this enumeration is not used: a sync object's type is
61 # inferred by which EntitySpecifics field it has. But in the context
62 # of a program, it is useful to have an enumeration.
64 TOP_LEVEL
, # The type of the 'Google Chrome' folder.
77 HISTORY_DELETE_DIRECTIVE
,
79 MANAGED_USER_SHARED_SETTING
,
80 MANAGED_USER_WHITELIST
,
89 SYNCED_NOTIFICATION_APP_INFO
,
95 WIFI_CREDENTIAL
) = range(32)
97 # An enumeration on the frequency at which the server should send errors
98 # to the client. This would be specified by the url that triggers the error.
99 # Note: This enum should be kept in the same order as the enum in sync_test.h.
100 SYNC_ERROR_FREQUENCY
= (
101 ERROR_FREQUENCY_NONE
,
102 ERROR_FREQUENCY_ALWAYS
,
103 ERROR_FREQUENCY_TWO_THIRDS
) = range(3)
105 # Well-known server tag of the top level 'Google Chrome' folder.
106 TOP_LEVEL_FOLDER_TAG
= 'google_chrome'
108 # Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
109 # to that datatype. Note that TOP_LEVEL has no such token.
110 SYNC_TYPE_FIELDS
= sync_pb2
.EntitySpecifics
.DESCRIPTOR
.fields_by_name
111 SYNC_TYPE_TO_DESCRIPTOR
= {
112 APP_LIST
: SYNC_TYPE_FIELDS
['app_list'],
113 APP_NOTIFICATION
: SYNC_TYPE_FIELDS
['app_notification'],
114 APP_SETTINGS
: SYNC_TYPE_FIELDS
['app_setting'],
115 APPS
: SYNC_TYPE_FIELDS
['app'],
116 ARTICLE
: SYNC_TYPE_FIELDS
['article'],
117 AUTOFILL
: SYNC_TYPE_FIELDS
['autofill'],
118 AUTOFILL_PROFILE
: SYNC_TYPE_FIELDS
['autofill_profile'],
119 BOOKMARK
: SYNC_TYPE_FIELDS
['bookmark'],
120 DEVICE_INFO
: SYNC_TYPE_FIELDS
['device_info'],
121 DICTIONARY
: SYNC_TYPE_FIELDS
['dictionary'],
122 EXPERIMENTS
: SYNC_TYPE_FIELDS
['experiments'],
123 EXTENSION_SETTINGS
: SYNC_TYPE_FIELDS
['extension_setting'],
124 EXTENSIONS
: SYNC_TYPE_FIELDS
['extension'],
125 FAVICON_IMAGES
: SYNC_TYPE_FIELDS
['favicon_image'],
126 FAVICON_TRACKING
: SYNC_TYPE_FIELDS
['favicon_tracking'],
127 HISTORY_DELETE_DIRECTIVE
: SYNC_TYPE_FIELDS
['history_delete_directive'],
128 MANAGED_USER_SHARED_SETTING
:
129 SYNC_TYPE_FIELDS
['managed_user_shared_setting'],
130 MANAGED_USER_SETTING
: SYNC_TYPE_FIELDS
['managed_user_setting'],
131 MANAGED_USER_WHITELIST
: SYNC_TYPE_FIELDS
['managed_user_whitelist'],
132 MANAGED_USER
: SYNC_TYPE_FIELDS
['managed_user'],
133 NIGORI
: SYNC_TYPE_FIELDS
['nigori'],
134 PASSWORD
: SYNC_TYPE_FIELDS
['password'],
135 PREFERENCE
: SYNC_TYPE_FIELDS
['preference'],
136 PRIORITY_PREFERENCE
: SYNC_TYPE_FIELDS
['priority_preference'],
137 SEARCH_ENGINE
: SYNC_TYPE_FIELDS
['search_engine'],
138 SESSION
: SYNC_TYPE_FIELDS
['session'],
139 SYNCED_NOTIFICATION
: SYNC_TYPE_FIELDS
["synced_notification"],
140 SYNCED_NOTIFICATION_APP_INFO
:
141 SYNC_TYPE_FIELDS
["synced_notification_app_info"],
142 THEME
: SYNC_TYPE_FIELDS
['theme'],
143 TYPED_URL
: SYNC_TYPE_FIELDS
['typed_url'],
144 WIFI_CREDENTIAL
: SYNC_TYPE_FIELDS
["wifi_credential"],
147 # The parent ID used to indicate a top-level node.
150 # Unix time epoch +1 day in struct_time format. The tuple corresponds to
151 # UTC Thursday Jan 2 1970, 00:00:00, non-dst.
152 # We have to add one day after start of epoch, since in timezones with positive
153 # UTC offset time.mktime throws an OverflowError,
154 # rather then returning negative number.
155 FIRST_DAY_UNIX_TIME_EPOCH
= (1970, 1, 2, 0, 0, 0, 4, 2, 0)
156 ONE_DAY_SECONDS
= 60 * 60 * 24
158 # The number of characters in the server-generated encryption key.
159 KEYSTORE_KEY_LENGTH
= 16
161 # The hashed client tags for some experiment nodes.
162 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG
= "pis8ZRzh98/MKLtVEio2mr42LQA="
163 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
= "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
165 class Error(Exception):
166 """Error class for this module."""
169 class ProtobufDataTypeFieldNotUnique(Error
):
170 """An entry should not have more than one data type present."""
173 class DataTypeIdNotRecognized(Error
):
174 """The requested data type is not recognized."""
177 class MigrationDoneError(Error
):
178 """A server-side migration occurred; clients must re-sync some datatypes.
181 datatypes: a list of the datatypes (python enum) needing migration.
184 def __init__(self
, datatypes
):
185 self
.datatypes
= datatypes
188 class StoreBirthdayError(Error
):
189 """The client sent a birthday that doesn't correspond to this server."""
192 class TransientError(Error
):
193 """The client would be sent a transient error."""
196 class SyncInducedError(Error
):
197 """The client would be sent an error."""
200 class InducedErrorFrequencyNotDefined(Error
):
201 """The error frequency defined is not handled."""
204 class ClientNotConnectedError(Error
):
205 """The client is not connected to the server."""
208 def GetEntryType(entry
):
209 """Extract the sync type from a SyncEntry.
212 entry: A SyncEntity protobuf object whose type to determine.
214 An enum value from ALL_TYPES if the entry's type can be determined, or None
215 if the type cannot be determined.
217 ProtobufDataTypeFieldNotUnique: More than one type was indicated by
220 if entry
.server_defined_unique_tag
== TOP_LEVEL_FOLDER_TAG
:
222 entry_types
= GetEntryTypesFromSpecifics(entry
.specifics
)
226 # If there is more than one, either there's a bug, or else the caller
227 # should use GetEntryTypes.
228 if len(entry_types
) > 1:
229 raise ProtobufDataTypeFieldNotUnique
230 return entry_types
[0]
233 def GetEntryTypesFromSpecifics(specifics
):
234 """Determine the sync types indicated by an EntitySpecifics's field(s).
236 If the specifics have more than one recognized data type field (as commonly
237 happens with the requested_types field of GetUpdatesMessage), all types
238 will be returned. Callers must handle the possibility of the returned
239 value having more than one item.
242 specifics: A EntitySpecifics protobuf message whose extensions to
245 A list of the sync types (values from ALL_TYPES) associated with each
246 recognized extension of the specifics message.
248 return [data_type
for data_type
, field_descriptor
249 in SYNC_TYPE_TO_DESCRIPTOR
.iteritems()
250 if specifics
.HasField(field_descriptor
.name
)]
253 def SyncTypeToProtocolDataTypeId(data_type
):
254 """Convert from a sync type (python enum) to the protocol's data type id."""
255 return SYNC_TYPE_TO_DESCRIPTOR
[data_type
].number
258 def ProtocolDataTypeIdToSyncType(protocol_data_type_id
):
259 """Convert from the protocol's data type id to a sync type (python enum)."""
260 for data_type
, field_descriptor
in SYNC_TYPE_TO_DESCRIPTOR
.iteritems():
261 if field_descriptor
.number
== protocol_data_type_id
:
263 raise DataTypeIdNotRecognized
266 def DataTypeStringToSyncTypeLoose(data_type_string
):
267 """Converts a human-readable string to a sync type (python enum).
269 Capitalization and pluralization don't matter; this function is appropriate
270 for values that might have been typed by a human being; e.g., command-line
271 flags or query parameters.
273 if data_type_string
.isdigit():
274 return ProtocolDataTypeIdToSyncType(int(data_type_string
))
275 name
= data_type_string
.lower().rstrip('s')
276 for data_type
, field_descriptor
in SYNC_TYPE_TO_DESCRIPTOR
.iteritems():
277 if field_descriptor
.name
.lower().rstrip('s') == name
:
279 raise DataTypeIdNotRecognized
282 def MakeNewKeystoreKey():
283 """Returns a new random keystore key."""
284 return ''.join(random
.choice(string
.ascii_uppercase
+ string
.digits
)
285 for x
in xrange(KEYSTORE_KEY_LENGTH
))
288 def SyncTypeToString(data_type
):
289 """Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
290 return SYNC_TYPE_TO_DESCRIPTOR
[data_type
].name
293 def CallerInfoToString(caller_info_source
):
294 """Formats a GetUpdatesSource enum value to a readable string."""
295 return get_updates_caller_info_pb2
.GetUpdatesCallerInfo \
296 .DESCRIPTOR
.enum_types_by_name
['GetUpdatesSource'] \
297 .values_by_number
[caller_info_source
].name
300 def ShortDatatypeListSummary(data_types
):
301 """Formats compactly a list of sync types (python enums) for human eyes.
303 This function is intended for use by logging. If the list of datatypes
304 contains almost all of the values, the return value will be expressed
305 in terms of the datatypes that aren't set.
307 included
= set(data_types
) - set([TOP_LEVEL
])
310 excluded
= set(ALL_TYPES
) - included
- set([TOP_LEVEL
])
313 simple_text
= '+'.join(sorted([SyncTypeToString(x
) for x
in included
]))
314 all_but_text
= 'all except %s' % (
315 '+'.join(sorted([SyncTypeToString(x
) for x
in excluded
])))
316 if len(included
) < len(excluded
) or len(simple_text
) <= len(all_but_text
):
322 def GetDefaultEntitySpecifics(data_type
):
323 """Get an EntitySpecifics having a sync type's default field value."""
324 specifics
= sync_pb2
.EntitySpecifics()
325 if data_type
in SYNC_TYPE_TO_DESCRIPTOR
:
326 descriptor
= SYNC_TYPE_TO_DESCRIPTOR
[data_type
]
327 getattr(specifics
, descriptor
.name
).SetInParent()
331 class PermanentItem(object):
332 """A specification of one server-created permanent item.
335 tag: A known-to-the-client value that uniquely identifies a server-created
337 name: The human-readable display name for this item.
338 parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates
339 a top-level item. Otherwise, this must be the tag value of some other
340 server-created permanent item.
341 sync_type: A value from ALL_TYPES, giving the datatype of this permanent
342 item. This controls which types of client GetUpdates requests will
343 cause the permanent item to be created and returned.
344 create_by_default: Whether the permanent item is created at startup or not.
345 This value is set to True in the default case. Non-default permanent items
346 are those that are created only when a client explicitly tells the server
350 def __init__(self
, tag
, name
, parent_tag
, sync_type
, create_by_default
=True):
353 self
.parent_tag
= parent_tag
354 self
.sync_type
= sync_type
355 self
.create_by_default
= create_by_default
358 class MigrationHistory(object):
359 """A record of the migration events associated with an account.
361 Each migration event invalidates one or more datatypes on all clients
362 that had synced the datatype before the event. Such clients will continue
363 to receive MigrationDone errors until they throw away their progress and
364 re-sync that datatype from the beginning.
367 self
._migrations
= {}
368 for datatype
in ALL_TYPES
:
369 self
._migrations
[datatype
] = [1]
370 self
._next
_migration
_version
= 2
372 def GetLatestVersion(self
, datatype
):
373 return self
._migrations
[datatype
][-1]
375 def CheckAllCurrent(self
, versions_map
):
376 """Raises an error if any the provided versions are out of date.
378 This function intentionally returns migrations in the order that they were
379 triggered. Doing it this way allows the client to queue up two migrations
380 in a row, so the second one is received while responding to the first.
383 version_map: a map whose keys are datatypes and whose values are versions.
386 MigrationDoneError: if a mismatch is found.
389 for datatype
, client_migration
in versions_map
.iteritems():
390 for server_migration
in self
._migrations
[datatype
]:
391 if client_migration
< server_migration
:
392 problems
.setdefault(server_migration
, []).append(datatype
)
394 raise MigrationDoneError(problems
[min(problems
.keys())])
396 def Bump(self
, datatypes
):
397 """Add a record of a migration, to cause errors on future requests."""
398 for idx
, datatype
in enumerate(datatypes
):
399 self
._migrations
[datatype
].append(self
._next
_migration
_version
)
400 self
._next
_migration
_version
+= 1
403 class UpdateSieve(object):
404 """A filter to remove items the client has already seen."""
405 def __init__(self
, request
, migration_history
=None):
406 self
._original
_request
= request
408 self
._migration
_history
= migration_history
or MigrationHistory()
409 self
._migration
_versions
_to
_check
= {}
410 if request
.from_progress_marker
:
411 for marker
in request
.from_progress_marker
:
412 data_type
= ProtocolDataTypeIdToSyncType(marker
.data_type_id
)
413 if marker
.HasField('timestamp_token_for_migration'):
414 timestamp
= marker
.timestamp_token_for_migration
416 self
._migration
_versions
_to
_check
[data_type
] = 1
418 (timestamp
, version
) = pickle
.loads(marker
.token
)
419 self
._migration
_versions
_to
_check
[data_type
] = version
420 elif marker
.HasField('token'):
423 raise ValueError('No timestamp information in progress marker.')
424 data_type
= ProtocolDataTypeIdToSyncType(marker
.data_type_id
)
425 self
._state
[data_type
] = timestamp
426 elif request
.HasField('from_timestamp'):
427 for data_type
in GetEntryTypesFromSpecifics(request
.requested_types
):
428 self
._state
[data_type
] = request
.from_timestamp
429 self
._migration
_versions
_to
_check
[data_type
] = 1
431 self
._state
[TOP_LEVEL
] = min(self
._state
.itervalues())
433 def SummarizeRequest(self
):
435 for data_type
, timestamp
in self
._state
.iteritems():
436 if data_type
== TOP_LEVEL
:
438 timestamps
.setdefault(timestamp
, []).append(data_type
)
439 return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types
), stamp
)
440 for stamp
, types
in sorted(timestamps
.iteritems()))
442 def CheckMigrationState(self
):
443 self
._migration
_history
.CheckAllCurrent(self
._migration
_versions
_to
_check
)
445 def ClientWantsItem(self
, item
):
446 """Return true if the client hasn't already seen an item."""
447 return self
._state
.get(GetEntryType(item
), sys
.maxint
) < item
.version
449 def HasAnyTimestamp(self
):
450 """Return true if at least one datatype was requested."""
451 return bool(self
._state
)
453 def GetMinTimestamp(self
):
454 """Return true the smallest timestamp requested across all datatypes."""
455 return min(self
._state
.itervalues())
457 def GetFirstTimeTypes(self
):
458 """Return a list of datatypes requesting updates from timestamp zero."""
459 return [datatype
for datatype
, timestamp
in self
._state
.iteritems()
462 def GetCreateMobileBookmarks(self
):
463 """Return true if the client has requested to create the 'Mobile Bookmarks'
466 return (self
._original
_request
.HasField('create_mobile_bookmarks_folder')
467 and self
._original
_request
.create_mobile_bookmarks_folder
)
469 def SaveProgress(self
, new_timestamp
, get_updates_response
):
470 """Write the new_timestamp or new_progress_marker fields to a response."""
471 if self
._original
_request
.from_progress_marker
:
472 for data_type
, old_timestamp
in self
._state
.iteritems():
473 if data_type
== TOP_LEVEL
:
475 new_marker
= sync_pb2
.DataTypeProgressMarker()
476 new_marker
.data_type_id
= SyncTypeToProtocolDataTypeId(data_type
)
477 final_stamp
= max(old_timestamp
, new_timestamp
)
478 final_migration
= self
._migration
_history
.GetLatestVersion(data_type
)
479 new_marker
.token
= pickle
.dumps((final_stamp
, final_migration
))
480 get_updates_response
.new_progress_marker
.add().MergeFrom(new_marker
)
481 elif self
._original
_request
.HasField('from_timestamp'):
482 if self
._original
_request
.from_timestamp
< new_timestamp
:
483 get_updates_response
.new_timestamp
= new_timestamp
486 class SyncDataModel(object):
487 """Models the account state of one sync user."""
490 # Specify all the permanent items that a model might need.
491 _PERMANENT_ITEM_SPECS
= [
492 PermanentItem('google_chrome_apps', name
='Apps',
493 parent_tag
=ROOT_ID
, sync_type
=APPS
),
494 PermanentItem('google_chrome_app_list', name
='App List',
495 parent_tag
=ROOT_ID
, sync_type
=APP_LIST
),
496 PermanentItem('google_chrome_app_notifications', name
='App Notifications',
497 parent_tag
=ROOT_ID
, sync_type
=APP_NOTIFICATION
),
498 PermanentItem('google_chrome_app_settings',
500 parent_tag
=ROOT_ID
, sync_type
=APP_SETTINGS
),
501 PermanentItem('google_chrome_bookmarks', name
='Bookmarks',
502 parent_tag
=ROOT_ID
, sync_type
=BOOKMARK
),
503 PermanentItem('bookmark_bar', name
='Bookmark Bar',
504 parent_tag
='google_chrome_bookmarks', sync_type
=BOOKMARK
),
505 PermanentItem('other_bookmarks', name
='Other Bookmarks',
506 parent_tag
='google_chrome_bookmarks', sync_type
=BOOKMARK
),
507 PermanentItem('synced_bookmarks', name
='Synced Bookmarks',
508 parent_tag
='google_chrome_bookmarks', sync_type
=BOOKMARK
,
509 create_by_default
=False),
510 PermanentItem('google_chrome_autofill', name
='Autofill',
511 parent_tag
=ROOT_ID
, sync_type
=AUTOFILL
),
512 PermanentItem('google_chrome_autofill_profiles', name
='Autofill Profiles',
513 parent_tag
=ROOT_ID
, sync_type
=AUTOFILL_PROFILE
),
514 PermanentItem('google_chrome_device_info', name
='Device Info',
515 parent_tag
=ROOT_ID
, sync_type
=DEVICE_INFO
),
516 PermanentItem('google_chrome_experiments', name
='Experiments',
517 parent_tag
=ROOT_ID
, sync_type
=EXPERIMENTS
),
518 PermanentItem('google_chrome_extension_settings',
519 name
='Extension Settings',
520 parent_tag
=ROOT_ID
, sync_type
=EXTENSION_SETTINGS
),
521 PermanentItem('google_chrome_extensions', name
='Extensions',
522 parent_tag
=ROOT_ID
, sync_type
=EXTENSIONS
),
523 PermanentItem('google_chrome_history_delete_directives',
524 name
='History Delete Directives',
526 sync_type
=HISTORY_DELETE_DIRECTIVE
),
527 PermanentItem('google_chrome_favicon_images',
528 name
='Favicon Images',
530 sync_type
=FAVICON_IMAGES
),
531 PermanentItem('google_chrome_favicon_tracking',
532 name
='Favicon Tracking',
534 sync_type
=FAVICON_TRACKING
),
535 PermanentItem('google_chrome_managed_user_settings',
536 name
='Managed User Settings',
537 parent_tag
=ROOT_ID
, sync_type
=MANAGED_USER_SETTING
),
538 PermanentItem('google_chrome_managed_users',
539 name
='Managed Users',
540 parent_tag
=ROOT_ID
, sync_type
=MANAGED_USER
),
541 PermanentItem('google_chrome_managed_user_shared_settings',
542 name
='Managed User Shared Settings',
543 parent_tag
=ROOT_ID
, sync_type
=MANAGED_USER_SHARED_SETTING
),
544 PermanentItem('google_chrome_managed_user_whitelists',
545 name
='Managed User Whitelists', parent_tag
=ROOT_ID
,
546 sync_type
=MANAGED_USER_WHITELIST
),
547 PermanentItem('google_chrome_nigori', name
='Nigori',
548 parent_tag
=ROOT_ID
, sync_type
=NIGORI
),
549 PermanentItem('google_chrome_passwords', name
='Passwords',
550 parent_tag
=ROOT_ID
, sync_type
=PASSWORD
),
551 PermanentItem('google_chrome_preferences', name
='Preferences',
552 parent_tag
=ROOT_ID
, sync_type
=PREFERENCE
),
553 PermanentItem('google_chrome_priority_preferences',
554 name
='Priority Preferences',
555 parent_tag
=ROOT_ID
, sync_type
=PRIORITY_PREFERENCE
),
556 PermanentItem('google_chrome_synced_notifications',
557 name
='Synced Notifications',
558 parent_tag
=ROOT_ID
, sync_type
=SYNCED_NOTIFICATION
),
559 PermanentItem('google_chrome_synced_notification_app_info',
560 name
='Synced Notification App Info',
561 parent_tag
=ROOT_ID
, sync_type
=SYNCED_NOTIFICATION_APP_INFO
),
562 PermanentItem('google_chrome_search_engines', name
='Search Engines',
563 parent_tag
=ROOT_ID
, sync_type
=SEARCH_ENGINE
),
564 PermanentItem('google_chrome_sessions', name
='Sessions',
565 parent_tag
=ROOT_ID
, sync_type
=SESSION
),
566 PermanentItem('google_chrome_themes', name
='Themes',
567 parent_tag
=ROOT_ID
, sync_type
=THEME
),
568 PermanentItem('google_chrome_typed_urls', name
='Typed URLs',
569 parent_tag
=ROOT_ID
, sync_type
=TYPED_URL
),
570 PermanentItem('google_chrome_wifi_credentials', name
='WiFi Credentials',
571 parent_tag
=ROOT_ID
, sync_type
=WIFI_CREDENTIAL
),
572 PermanentItem('google_chrome_dictionary', name
='Dictionary',
573 parent_tag
=ROOT_ID
, sync_type
=DICTIONARY
),
574 PermanentItem('google_chrome_articles', name
='Articles',
575 parent_tag
=ROOT_ID
, sync_type
=ARTICLE
),
579 # Monotonically increasing version number. The next object change will
580 # take on this value + 1.
583 # The definitive copy of this client's items: a map from ID string to a
584 # SyncEntity protocol buffer.
587 self
.ResetStoreBirthday()
588 self
.migration_history
= MigrationHistory()
589 self
.induced_error
= sync_pb2
.ClientToServerResponse
.Error()
590 self
.induced_error_frequency
= 0
591 self
.sync_count_before_errors
= 0
592 self
.acknowledge_managed_users
= False
593 self
._keys
= [MakeNewKeystoreKey()]
595 def _SaveEntry(self
, entry
):
596 """Insert or update an entry in the change log, and give it a new version.
598 The ID fields of this entry are assumed to be valid server IDs. This
599 entry will be updated with a new version number and sync_timestamp.
602 entry: The entry to be added or updated.
605 # Maintain a global (rather than per-item) sequence number and use it
606 # both as the per-entry version as well as the update-progress timestamp.
607 # This simulates the behavior of the original server implementation.
608 entry
.version
= self
._version
609 entry
.sync_timestamp
= self
._version
611 # Preserve the originator info, which the client is not required to send
613 base_entry
= self
._entries
.get(entry
.id_string
)
615 entry
.originator_cache_guid
= base_entry
.originator_cache_guid
616 entry
.originator_client_item_id
= base_entry
.originator_client_item_id
618 self
._entries
[entry
.id_string
] = copy
.deepcopy(entry
)
620 def _ServerTagToId(self
, tag
):
621 """Determine the server ID from a server-unique tag.
623 The resulting value is guaranteed not to collide with the other ID
627 tag: The unique, known-to-the-client tag of a server-generated item.
629 The string value of the computed server ID.
631 if not tag
or tag
== ROOT_ID
:
633 spec
= [x
for x
in self
._PERMANENT
_ITEM
_SPECS
if x
.tag
== tag
][0]
634 return self
._MakeCurrentId
(spec
.sync_type
, '<server tag>%s' % tag
)
636 def _TypeToTypeRootId(self
, model_type
):
637 """Returns the server ID for the type root node of the given type."""
638 tag
= [x
.tag
for x
in self
._PERMANENT
_ITEM
_SPECS
639 if x
.sync_type
== model_type
][0]
640 return self
._ServerTagToId
(tag
)
642 def _ClientTagToId(self
, datatype
, tag
):
643 """Determine the server ID from a client-unique tag.
645 The resulting value is guaranteed not to collide with the other ID
649 datatype: The sync type (python enum) of the identified object.
650 tag: The unique, opaque-to-the-server tag of a client-tagged item.
652 The string value of the computed server ID.
654 return self
._MakeCurrentId
(datatype
, '<client tag>%s' % tag
)
656 def _ClientIdToId(self
, datatype
, client_guid
, client_item_id
):
657 """Compute a unique server ID from a client-local ID tag.
659 The resulting value is guaranteed not to collide with the other ID
663 datatype: The sync type (python enum) of the identified object.
664 client_guid: A globally unique ID that identifies the client which
666 client_item_id: An ID that uniquely identifies this item on the client
669 The string value of the computed server ID.
671 # Using the client ID info is not required here (we could instead generate
672 # a random ID), but it's useful for debugging.
673 return self
._MakeCurrentId
(datatype
,
674 '<server ID originally>%s/%s' % (client_guid
, client_item_id
))
676 def _MakeCurrentId(self
, datatype
, inner_id
):
677 return '%d^%d^%s' % (datatype
,
678 self
.migration_history
.GetLatestVersion(datatype
),
681 def _ExtractIdInfo(self
, id_string
):
682 if not id_string
or id_string
== ROOT_ID
:
684 datatype_string
, separator
, remainder
= id_string
.partition('^')
685 migration_version_string
, separator
, inner_id
= remainder
.partition('^')
686 return (int(datatype_string
), int(migration_version_string
), inner_id
)
688 def _WritePosition(self
, entry
, parent_id
):
689 """Ensure the entry has an absolute, numeric position and parent_id.
691 Historically, clients would specify positions using the predecessor-based
692 references in the insert_after_item_id field; starting July 2011, this
693 was changed and Chrome now sends up the absolute position. The server
694 must store a position_in_parent value and must not maintain
695 insert_after_item_id.
696 Starting in Jan 2013, the client will also send up a unique_position field
697 which should be saved and returned on subsequent GetUpdates.
700 entry: The entry for which to write a position. Its ID field are
701 assumed to be server IDs. This entry will have its parent_id_string,
702 position_in_parent and unique_position fields updated; its
703 insert_after_item_id field will be cleared.
704 parent_id: The ID of the entry intended as the new parent.
707 entry
.parent_id_string
= parent_id
708 if not entry
.HasField('position_in_parent'):
709 entry
.position_in_parent
= 1337 # A debuggable, distinctive default.
710 entry
.ClearField('insert_after_item_id')
712 def _ItemExists(self
, id_string
):
713 """Determine whether an item exists in the changelog."""
714 return id_string
in self
._entries
716 def _CreatePermanentItem(self
, spec
):
717 """Create one permanent item from its spec, if it doesn't exist.
719 The resulting item is added to the changelog.
722 spec: A PermanentItem object holding the properties of the item to create.
724 id_string
= self
._ServerTagToId
(spec
.tag
)
725 if self
._ItemExists
(id_string
):
727 print 'Creating permanent item: %s' % spec
.name
728 entry
= sync_pb2
.SyncEntity()
729 entry
.id_string
= id_string
730 entry
.non_unique_name
= spec
.name
731 entry
.name
= spec
.name
732 entry
.server_defined_unique_tag
= spec
.tag
734 entry
.deleted
= False
735 entry
.specifics
.CopyFrom(GetDefaultEntitySpecifics(spec
.sync_type
))
736 self
._WritePosition
(entry
, self
._ServerTagToId
(spec
.parent_tag
))
737 self
._SaveEntry
(entry
)
739 def _CreateDefaultPermanentItems(self
, requested_types
):
740 """Ensure creation of all default permanent items for a given set of types.
743 requested_types: A list of sync data types from ALL_TYPES.
744 All default permanent items of only these types will be created.
746 for spec
in self
._PERMANENT
_ITEM
_SPECS
:
747 if spec
.sync_type
in requested_types
and spec
.create_by_default
:
748 self
._CreatePermanentItem
(spec
)
750 def ResetStoreBirthday(self
):
751 """Resets the store birthday to a random value."""
752 # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
753 self
.store_birthday
= '%0.30f' % random
.random()
755 def StoreBirthday(self
):
756 """Gets the store birthday."""
757 return self
.store_birthday
759 def GetChanges(self
, sieve
):
760 """Get entries which have changed, oldest first.
762 The returned entries are limited to being _BATCH_SIZE many. The entries
763 are returned in strict version order.
766 sieve: An update sieve to use to filter out updates the client
769 A tuple of (version, entries, changes_remaining). Version is a new
770 timestamp value, which should be used as the starting point for the
771 next query. Entries is the batch of entries meeting the current
772 timestamp query. Changes_remaining indicates the number of changes
773 left on the server after this batch.
775 if not sieve
.HasAnyTimestamp():
777 min_timestamp
= sieve
.GetMinTimestamp()
778 first_time_types
= sieve
.GetFirstTimeTypes()
779 self
._CreateDefaultPermanentItems
(first_time_types
)
780 # Mobile bookmark folder is not created by default, create it only when
781 # client requested it.
782 if (sieve
.GetCreateMobileBookmarks() and
783 first_time_types
.count(BOOKMARK
) > 0):
784 self
.TriggerCreateSyncedBookmarks()
786 self
.TriggerAcknowledgeManagedUsers()
788 change_log
= sorted(self
._entries
.values(),
789 key
=operator
.attrgetter('version'))
790 new_changes
= [x
for x
in change_log
if x
.version
> min_timestamp
]
791 # Pick batch_size new changes, and then filter them. This matches
792 # the RPC behavior of the production sync server.
793 batch
= new_changes
[:self
._BATCH
_SIZE
]
795 # Client is up to date.
796 return (min_timestamp
, [], 0)
798 # Restrict batch to requested types. Tombstones are untyped
799 # and will always get included.
800 filtered
= [copy
.deepcopy(item
) for item
in batch
801 if item
.deleted
or sieve
.ClientWantsItem(item
)]
803 # The new client timestamp is the timestamp of the last item in the
804 # batch, even if that item was filtered out.
805 return (batch
[-1].version
, filtered
, len(new_changes
) - len(batch
))
807 def GetKeystoreKeys(self
):
808 """Returns the encryption keys for this account."""
809 print "Returning encryption keys: %s" % self
._keys
812 def _CopyOverImmutableFields(self
, entry
):
813 """Preserve immutable fields by copying pre-commit state.
816 entry: A sync entity from the client.
818 if entry
.id_string
in self
._entries
:
819 if self
._entries
[entry
.id_string
].HasField(
820 'server_defined_unique_tag'):
821 entry
.server_defined_unique_tag
= (
822 self
._entries
[entry
.id_string
].server_defined_unique_tag
)
824 def _CheckVersionForCommit(self
, entry
):
825 """Perform an optimistic concurrency check on the version number.
827 Clients are only allowed to commit if they report having seen the most
828 recent version of an object.
831 entry: A sync entity from the client. It is assumed that ID fields
832 have been converted to server IDs.
834 A boolean value indicating whether the client's version matches the
835 newest server version for the given entry.
837 if entry
.id_string
in self
._entries
:
838 # Allow edits/deletes if the version matches, and any undeletion.
839 return (self
._entries
[entry
.id_string
].version
== entry
.version
or
840 self
._entries
[entry
.id_string
].deleted
)
842 # Allow unknown ID only if the client thinks it's new too.
843 return entry
.version
== 0
845 def _CheckParentIdForCommit(self
, entry
):
846 """Check that the parent ID referenced in a SyncEntity actually exists.
849 entry: A sync entity from the client. It is assumed that ID fields
850 have been converted to server IDs.
852 A boolean value indicating whether the entity's parent ID is an object
853 that actually exists (and is not deleted) in the current account state.
855 if entry
.parent_id_string
== ROOT_ID
:
856 # This is generally allowed.
858 if (not entry
.HasField('parent_id_string') and
859 entry
.HasField('client_defined_unique_tag')):
860 return True # Unique client tag items do not need to specify a parent.
861 if entry
.parent_id_string
not in self
._entries
:
862 print 'Warning: Client sent unknown ID. Should never happen.'
864 if entry
.parent_id_string
== entry
.id_string
:
865 print 'Warning: Client sent circular reference. Should never happen.'
867 if self
._entries
[entry
.parent_id_string
].deleted
:
868 # This can happen in a race condition between two clients.
870 if not self
._entries
[entry
.parent_id_string
].folder
:
871 print 'Warning: Client sent non-folder parent. Should never happen.'
875 def _RewriteIdsAsServerIds(self
, entry
, cache_guid
, commit_session
):
876 """Convert ID fields in a client sync entry to server IDs.
878 A commit batch sent by a client may contain new items for which the
879 server has not generated IDs yet. And within a commit batch, later
880 items are allowed to refer to earlier items. This method will
881 generate server IDs for new items, as well as rewrite references
882 to items whose server IDs were generated earlier in the batch.
885 entry: The client sync entry to modify.
886 cache_guid: The globally unique ID of the client that sent this
888 commit_session: A dictionary mapping the original IDs to the new server
889 IDs, for any items committed earlier in the batch.
891 if entry
.version
== 0:
892 data_type
= GetEntryType(entry
)
893 if entry
.HasField('client_defined_unique_tag'):
894 # When present, this should determine the item's ID.
895 new_id
= self
._ClientTagToId
(data_type
, entry
.client_defined_unique_tag
)
897 new_id
= self
._ClientIdToId
(data_type
, cache_guid
, entry
.id_string
)
898 entry
.originator_cache_guid
= cache_guid
899 entry
.originator_client_item_id
= entry
.id_string
900 commit_session
[entry
.id_string
] = new_id
# Remember the remapping.
901 entry
.id_string
= new_id
902 if entry
.parent_id_string
in commit_session
:
903 entry
.parent_id_string
= commit_session
[entry
.parent_id_string
]
904 if entry
.insert_after_item_id
in commit_session
:
905 entry
.insert_after_item_id
= commit_session
[entry
.insert_after_item_id
]
907 def ValidateCommitEntries(self
, entries
):
908 """Raise an exception if a commit batch contains any global errors.
911 entries: an iterable containing commit-form SyncEntity protocol buffers.
914 MigrationDoneError: if any of the entries reference a recently-migrated
917 server_ids_in_commit
= set()
918 local_ids_in_commit
= set()
919 for entry
in entries
:
921 server_ids_in_commit
.add(entry
.id_string
)
923 local_ids_in_commit
.add(entry
.id_string
)
924 if entry
.HasField('parent_id_string'):
925 if entry
.parent_id_string
not in local_ids_in_commit
:
926 server_ids_in_commit
.add(entry
.parent_id_string
)
928 versions_present
= {}
929 for server_id
in server_ids_in_commit
:
930 parsed
= self
._ExtractIdInfo
(server_id
)
932 datatype
, version
, _
= parsed
933 versions_present
.setdefault(datatype
, []).append(version
)
935 self
.migration_history
.CheckAllCurrent(
936 dict((k
, min(v
)) for k
, v
in versions_present
.iteritems()))
938 def CommitEntry(self
, entry
, cache_guid
, commit_session
):
939 """Attempt to commit one entry to the user's account.
942 entry: A SyncEntity protobuf representing desired object changes.
943 cache_guid: A string value uniquely identifying the client; this
944 is used for ID generation and will determine the originator_cache_guid
946 commit_session: A dictionary mapping client IDs to server IDs for any
947 objects committed earlier this session. If the entry gets a new ID
948 during commit, the change will be recorded here.
950 A SyncEntity reflecting the post-commit value of the entry, or None
951 if the entry was not committed due to an error.
953 entry
= copy
.deepcopy(entry
)
955 # Generate server IDs for this entry, and write generated server IDs
956 # from earlier entries into the message's fields, as appropriate. The
957 # ID generation state is stored in 'commit_session'.
958 self
._RewriteIdsAsServerIds
(entry
, cache_guid
, commit_session
)
960 # Sets the parent ID field for a client-tagged item. The client is allowed
961 # to not specify parents for these types of items. The server can figure
962 # out on its own what the parent ID for this entry should be.
963 self
._RewriteParentIdForUniqueClientEntry
(entry
)
965 # Perform the optimistic concurrency check on the entry's version number.
966 # Clients are not allowed to commit unless they indicate that they've seen
967 # the most recent version of an object.
968 if not self
._CheckVersionForCommit
(entry
):
971 # Check the validity of the parent ID; it must exist at this point.
972 # TODO(nick): Implement cycle detection and resolution.
973 if not self
._CheckParentIdForCommit
(entry
):
976 self
._CopyOverImmutableFields
(entry
);
978 # At this point, the commit is definitely going to happen.
980 # Deletion works by storing a limited record for an entry, called a
981 # tombstone. A sync server must track deleted IDs forever, since it does
982 # not keep track of client knowledge (there's no deletion ACK event).
984 def MakeTombstone(id_string
, datatype
):
985 """Make a tombstone entry that will replace the entry being deleted.
988 id_string: Index of the SyncEntity to be deleted.
990 A new SyncEntity reflecting the fact that the entry is deleted.
992 # Only the ID, version and deletion state are preserved on a tombstone.
993 tombstone
= sync_pb2
.SyncEntity()
994 tombstone
.id_string
= id_string
995 tombstone
.deleted
= True
997 tombstone
.specifics
.CopyFrom(GetDefaultEntitySpecifics(datatype
))
1000 def IsChild(child_id
):
1001 """Check if a SyncEntity is a child of entry, or any of its children.
1004 child_id: Index of the SyncEntity that is a possible child of entry.
1006 True if it is a child; false otherwise.
1008 if child_id
not in self
._entries
:
1010 if self
._entries
[child_id
].parent_id_string
== entry
.id_string
:
1012 return IsChild(self
._entries
[child_id
].parent_id_string
)
1014 # Identify any children entry might have.
1015 child_ids
= [child
.id_string
for child
in self
._entries
.itervalues()
1016 if IsChild(child
.id_string
)]
1018 # Mark all children that were identified as deleted.
1019 for child_id
in child_ids
:
1020 datatype
= GetEntryType(self
._entries
[child_id
])
1021 self
._SaveEntry
(MakeTombstone(child_id
, datatype
))
1023 # Delete entry itself.
1024 datatype
= GetEntryType(self
._entries
[entry
.id_string
])
1025 entry
= MakeTombstone(entry
.id_string
, datatype
)
1027 # Comments in sync.proto detail how the representation of positional
1030 # We've almost fully deprecated the 'insert_after_item_id' field.
1031 # The 'position_in_parent' field is also deprecated, but as of Jan 2013
1032 # is still in common use. The 'unique_position' field is the latest
1033 # and greatest in positioning technology.
1035 # This server supports 'position_in_parent' and 'unique_position'.
1036 self
._WritePosition
(entry
, entry
.parent_id_string
)
1038 # Preserve the originator info, which the client is not required to send
1040 base_entry
= self
._entries
.get(entry
.id_string
)
1041 if base_entry
and not entry
.HasField('originator_cache_guid'):
1042 entry
.originator_cache_guid
= base_entry
.originator_cache_guid
1043 entry
.originator_client_item_id
= base_entry
.originator_client_item_id
1045 # Store the current time since the Unix epoch in milliseconds.
1046 entry
.mtime
= (int((time
.mktime(time
.gmtime()) -
1047 (time
.mktime(FIRST_DAY_UNIX_TIME_EPOCH
) - ONE_DAY_SECONDS
))*1000))
1049 # Commit the change. This also updates the version number.
1050 self
._SaveEntry
(entry
)
1053 def _RewriteVersionInId(self
, id_string
):
1054 """Rewrites an ID so that its migration version becomes current."""
1055 parsed_id
= self
._ExtractIdInfo
(id_string
)
1058 datatype
, old_migration_version
, inner_id
= parsed_id
1059 return self
._MakeCurrentId
(datatype
, inner_id
)
1061 def _RewriteParentIdForUniqueClientEntry(self
, entry
):
1062 """Sets the entry's parent ID field to the appropriate value.
1064 The client must always set enough of the specifics of the entries it sends
1065 up such that the server can identify its type. (See crbug.com/373859)
1067 The client is under no obligation to set the parent ID field. The server
1068 can always infer what the appropriate parent for this model type should be.
1069 Having the client not send the parent ID is a step towards the removal of
1070 type root nodes. (See crbug.com/373869)
1072 This server implements these features by "faking" the existing of a parent
1073 ID early on in the commit processing.
1075 This function has no effect on non-client-tagged items.
1077 if not entry
.HasField('client_defined_unique_tag'):
1078 return # Skip this processing for non-client-tagged types.
1079 data_type
= GetEntryType(entry
)
1080 entry
.parent_id_string
= self
._TypeToTypeRootId
(data_type
)
1082 def TriggerMigration(self
, datatypes
):
1083 """Cause a migration to occur for a set of datatypes on this account.
1085 Clients will see the MIGRATION_DONE error for these datatypes until they
1088 versions_to_remap
= self
.migration_history
.Bump(datatypes
)
1089 all_entries
= self
._entries
.values()
1090 self
._entries
.clear()
1091 for entry
in all_entries
:
1092 new_id
= self
._RewriteVersionInId
(entry
.id_string
)
1093 entry
.id_string
= new_id
1094 if entry
.HasField('parent_id_string'):
1095 entry
.parent_id_string
= self
._RewriteVersionInId
(
1096 entry
.parent_id_string
)
1097 self
._entries
[entry
.id_string
] = entry
1099 def TriggerSyncTabFavicons(self
):
1100 """Set the 'sync_tab_favicons' field to this account's nigori node.
1102 If the field is not currently set, will write a new nigori node entry
1103 with the field set. Else does nothing.
1106 nigori_tag
= "google_chrome_nigori"
1107 nigori_original
= self
._entries
.get(self
._ServerTagToId
(nigori_tag
))
1108 if (nigori_original
.specifics
.nigori
.sync_tab_favicons
):
1110 nigori_new
= copy
.deepcopy(nigori_original
)
1111 nigori_new
.specifics
.nigori
.sync_tabs
= True
1112 self
._SaveEntry
(nigori_new
)
1114 def TriggerCreateSyncedBookmarks(self
):
1115 """Create the Synced Bookmarks folder under the Bookmarks permanent item.
1117 Clients will then receive the Synced Bookmarks folder on future
1118 GetUpdates, and new bookmarks can be added within the Synced Bookmarks
1122 synced_bookmarks_spec
, = [spec
for spec
in self
._PERMANENT
_ITEM
_SPECS
1123 if spec
.name
== "Synced Bookmarks"]
1124 self
._CreatePermanentItem
(synced_bookmarks_spec
)
1126 def TriggerEnableKeystoreEncryption(self
):
1127 """Create the keystore_encryption experiment entity and enable it.
1129 A new entity within the EXPERIMENTS datatype is created with the unique
1130 client tag "keystore_encryption" if it doesn't already exist. The
1131 keystore_encryption message is then filled with |enabled| set to true.
1134 experiment_id
= self
._ServerTagToId
("google_chrome_experiments")
1135 keystore_encryption_id
= self
._ClientTagToId
(
1137 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG
)
1138 keystore_entry
= self
._entries
.get(keystore_encryption_id
)
1139 if keystore_entry
is None:
1140 keystore_entry
= sync_pb2
.SyncEntity()
1141 keystore_entry
.id_string
= keystore_encryption_id
1142 keystore_entry
.name
= "Keystore Encryption"
1143 keystore_entry
.client_defined_unique_tag
= (
1144 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG
)
1145 keystore_entry
.folder
= False
1146 keystore_entry
.deleted
= False
1147 keystore_entry
.specifics
.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS
))
1148 self
._WritePosition
(keystore_entry
, experiment_id
)
1150 keystore_entry
.specifics
.experiments
.keystore_encryption
.enabled
= True
1152 self
._SaveEntry
(keystore_entry
)
1154 def TriggerRotateKeystoreKeys(self
):
1155 """Rotate the current set of keystore encryption keys.
1157 |self._keys| will have a new random encryption key appended to it. We touch
1158 the nigori node so that each client will receive the new encryption keys
1162 # Add a new encryption key.
1163 self
._keys
+= [MakeNewKeystoreKey(), ]
1165 # Increment the nigori node's timestamp, so clients will get the new keys
1166 # on their next GetUpdates (any time the nigori node is sent back, we also
1167 # send back the keystore keys).
1168 nigori_tag
= "google_chrome_nigori"
1169 self
._SaveEntry
(self
._entries
.get(self
._ServerTagToId
(nigori_tag
)))
1171 def TriggerAcknowledgeManagedUsers(self
):
1172 """Set the "acknowledged" flag for any managed user entities that don't have
1176 if not self
.acknowledge_managed_users
:
1179 managed_users
= [copy
.deepcopy(entry
) for entry
in self
._entries
.values()
1180 if entry
.specifics
.HasField('managed_user')
1181 and not entry
.specifics
.managed_user
.acknowledged
]
1182 for user
in managed_users
:
1183 user
.specifics
.managed_user
.acknowledged
= True
1184 self
._SaveEntry
(user
)
1186 def TriggerEnablePreCommitGetUpdateAvoidance(self
):
1187 """Sets the experiment to enable pre-commit GetUpdate avoidance."""
1188 experiment_id
= self
._ServerTagToId
("google_chrome_experiments")
1189 pre_commit_gu_avoidance_id
= self
._ClientTagToId
(
1191 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
)
1192 entry
= self
._entries
.get(pre_commit_gu_avoidance_id
)
1194 entry
= sync_pb2
.SyncEntity()
1195 entry
.id_string
= pre_commit_gu_avoidance_id
1196 entry
.name
= "Pre-commit GU avoidance"
1197 entry
.client_defined_unique_tag
= PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
1198 entry
.folder
= False
1199 entry
.deleted
= False
1200 entry
.specifics
.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS
))
1201 self
._WritePosition
(entry
, experiment_id
)
1202 entry
.specifics
.experiments
.pre_commit_update_avoidance
.enabled
= True
1203 self
._SaveEntry
(entry
)
1205 def SetInducedError(self
, error
, error_frequency
,
1206 sync_count_before_errors
):
1207 self
.induced_error
= error
1208 self
.induced_error_frequency
= error_frequency
1209 self
.sync_count_before_errors
= sync_count_before_errors
1211 def GetInducedError(self
):
1212 return self
.induced_error
1214 def _GetNextVersionNumber(self
):
1215 """Set the version to one more than the greatest version number seen."""
1216 entries
= sorted(self
._entries
.values(), key
=operator
.attrgetter('version'))
1217 if len(entries
) < 1:
1218 raise ClientNotConnectedError
1219 return entries
[-1].version
+ 1
1222 class TestServer(object):
1223 """An object to handle requests for one (and only one) Chrome Sync account.
1225 TestServer consumes the sync command messages that are the outermost
1226 layers of the protocol, performs the corresponding actions on its
1227 SyncDataModel, and constructs an appropriate response message.
1231 # The implementation supports exactly one account; its state is here.
1232 self
.account
= SyncDataModel()
1233 self
.account_lock
= threading
.Lock()
1234 # Clients that have talked to us: a map from the full client ID
1237 self
.client_name_generator
= ('+' * times
+ chr(c
)
1238 for times
in xrange(0, sys
.maxint
) for c
in xrange(ord('A'), ord('Z')))
1239 self
.transient_error
= False
1241 # Gaia OAuth2 Token fields and their default values.
1242 self
.response_code
= 200
1243 self
.request_token
= 'rt1'
1244 self
.access_token
= 'at1'
1245 self
.expires_in
= 3600
1246 self
.token_type
= 'Bearer'
1247 # The ClientCommand to send back on each ServerToClientResponse. If set to
1248 # None, no ClientCommand should be sent.
1249 self
._client
_command
= None
1252 def GetShortClientName(self
, query
):
1253 parsed
= cgi
.parse_qs(query
[query
.find('?')+1:])
1254 client_id
= parsed
.get('client_id')
1257 client_id
= client_id
[0]
1258 if client_id
not in self
.clients
:
1259 self
.clients
[client_id
] = self
.client_name_generator
.next()
1260 return self
.clients
[client_id
]
1262 def CheckStoreBirthday(self
, request
):
1263 """Raises StoreBirthdayError if the request's birthday is a mismatch."""
1264 if not request
.HasField('store_birthday'):
1266 if self
.account
.StoreBirthday() != request
.store_birthday
:
1267 raise StoreBirthdayError
1269 def CheckTransientError(self
):
1270 """Raises TransientError if transient_error variable is set."""
1271 if self
.transient_error
:
1272 raise TransientError
1274 def CheckSendError(self
):
1275 """Raises SyncInducedError if needed."""
1276 if (self
.account
.induced_error
.error_type
!=
1277 sync_enums_pb2
.SyncEnums
.UNKNOWN
):
1278 # Always means return the given error for all requests.
1279 if self
.account
.induced_error_frequency
== ERROR_FREQUENCY_ALWAYS
:
1280 raise SyncInducedError
1281 # This means the FIRST 2 requests of every 3 requests
1282 # return an error. Don't switch the order of failures. There are
1283 # test cases that rely on the first 2 being the failure rather than
1285 elif (self
.account
.induced_error_frequency
==
1286 ERROR_FREQUENCY_TWO_THIRDS
):
1287 if (((self
.sync_count
-
1288 self
.account
.sync_count_before_errors
) % 3) != 0):
1289 raise SyncInducedError
1291 raise InducedErrorFrequencyNotDefined
1293 def HandleMigrate(self
, path
):
1294 query
= urlparse
.urlparse(path
)[4]
1296 self
.account_lock
.acquire()
1298 datatypes
= [DataTypeStringToSyncTypeLoose(x
)
1299 for x
in urlparse
.parse_qs(query
).get('type',[])]
1301 self
.account
.TriggerMigration(datatypes
)
1302 response
= 'Migrated datatypes %s' % (
1303 ' and '.join(SyncTypeToString(x
).upper() for x
in datatypes
))
1305 response
= 'Please specify one or more <i>type=name</i> parameters'
1307 except DataTypeIdNotRecognized
, error
:
1308 response
= 'Could not interpret datatype name'
1311 self
.account_lock
.release()
1312 return (code
, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
1313 (code
, code
, response
))
1315 def HandleSetInducedError(self
, path
):
1316 query
= urlparse
.urlparse(path
)[4]
1317 self
.account_lock
.acquire()
1319 response
= 'Success'
1320 error
= sync_pb2
.ClientToServerResponse
.Error()
1322 error_type
= urlparse
.parse_qs(query
)['error']
1323 action
= urlparse
.parse_qs(query
)['action']
1324 error
.error_type
= int(error_type
[0])
1325 error
.action
= int(action
[0])
1327 error
.url
= (urlparse
.parse_qs(query
)['url'])[0]
1331 error
.error_description
=(
1332 (urlparse
.parse_qs(query
)['error_description'])[0])
1334 error
.error_description
= ''
1336 error_frequency
= int((urlparse
.parse_qs(query
)['frequency'])[0])
1338 error_frequency
= ERROR_FREQUENCY_ALWAYS
1339 self
.account
.SetInducedError(error
, error_frequency
, self
.sync_count
)
1340 response
= ('Error = %d, action = %d, url = %s, description = %s' %
1341 (error
.error_type
, error
.action
,
1343 error
.error_description
))
1345 response
= 'Could not parse url'
1348 self
.account_lock
.release()
1349 return (code
, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
1350 (code
, code
, response
))
1352 def HandleCreateBirthdayError(self
):
1353 self
.account
.ResetStoreBirthday()
1356 '<html><title>Birthday error</title><H1>Birthday error</H1></html>')
1358 def HandleSetTransientError(self
):
1359 self
.transient_error
= True
1362 '<html><title>Transient error</title><H1>Transient error</H1></html>')
1364 def HandleSetSyncTabFavicons(self
):
1365 """Set 'sync_tab_favicons' field of the nigori node for this account."""
1366 self
.account
.TriggerSyncTabFavicons()
1369 '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
1371 def HandleCreateSyncedBookmarks(self
):
1372 """Create the Synced Bookmarks folder under Bookmarks."""
1373 self
.account
.TriggerCreateSyncedBookmarks()
1376 '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
1378 def HandleEnableKeystoreEncryption(self
):
1379 """Enables the keystore encryption experiment."""
1380 self
.account
.TriggerEnableKeystoreEncryption()
1383 '<html><title>Enable Keystore Encryption</title>'
1384 '<H1>Enable Keystore Encryption</H1></html>')
1386 def HandleRotateKeystoreKeys(self
):
1387 """Rotate the keystore encryption keys."""
1388 self
.account
.TriggerRotateKeystoreKeys()
1391 '<html><title>Rotate Keystore Keys</title>'
1392 '<H1>Rotate Keystore Keys</H1></html>')
1394 def HandleEnableManagedUserAcknowledgement(self
):
1395 """Enable acknowledging newly created managed users."""
1396 self
.account
.acknowledge_managed_users
= True
1399 '<html><title>Enable Managed User Acknowledgement</title>'
1400 '<h1>Enable Managed User Acknowledgement</h1></html>')
1402 def HandleEnablePreCommitGetUpdateAvoidance(self
):
1403 """Enables the pre-commit GU avoidance experiment."""
1404 self
.account
.TriggerEnablePreCommitGetUpdateAvoidance()
1407 '<html><title>Enable pre-commit GU avoidance</title>'
1408 '<H1>Enable pre-commit GU avoidance</H1></html>')
1410 def HandleCommand(self
, query
, raw_request
):
1411 """Decode and handle a sync command from a raw input of bytes.
1413 This is the main entry point for this class. It is safe to call this
1414 method from multiple threads.
1417 raw_request: An iterable byte sequence to be interpreted as a sync
1420 A tuple (response_code, raw_response); the first value is an HTTP
1421 result code, while the second value is a string of bytes which is the
1422 serialized reply to the command.
1424 self
.account_lock
.acquire()
1425 self
.sync_count
+= 1
1426 def print_context(direction
):
1427 print '[Client %s %s %s.py]' % (self
.GetShortClientName(query
), direction
,
1431 request
= sync_pb2
.ClientToServerMessage()
1432 request
.MergeFromString(raw_request
)
1433 contents
= request
.message_contents
1435 response
= sync_pb2
.ClientToServerResponse()
1436 response
.error_code
= sync_enums_pb2
.SyncEnums
.SUCCESS
1438 if self
._client
_command
:
1439 response
.client_command
.CopyFrom(self
._client
_command
)
1441 self
.CheckStoreBirthday(request
)
1442 response
.store_birthday
= self
.account
.store_birthday
1443 self
.CheckTransientError()
1444 self
.CheckSendError()
1448 if contents
== sync_pb2
.ClientToServerMessage
.AUTHENTICATE
:
1449 print 'Authenticate'
1450 # We accept any authentication token, and support only one account.
1451 # TODO(nick): Mock out the GAIA authentication as well; hook up here.
1452 response
.authenticate
.user
.email
= 'syncjuser@chromium'
1453 response
.authenticate
.user
.display_name
= 'Sync J User'
1454 elif contents
== sync_pb2
.ClientToServerMessage
.COMMIT
:
1455 print 'Commit %d item(s)' % len(request
.commit
.entries
)
1456 self
.HandleCommit(request
.commit
, response
.commit
)
1457 elif contents
== sync_pb2
.ClientToServerMessage
.GET_UPDATES
:
1459 self
.HandleGetUpdates(request
.get_updates
, response
.get_updates
)
1461 print '%d update(s)' % len(response
.get_updates
.entries
)
1463 print 'Unrecognizable sync request!'
1464 return (400, None) # Bad request.
1465 return (200, response
.SerializeToString())
1466 except MigrationDoneError
, error
:
1468 print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error
.datatypes
))
1469 response
= sync_pb2
.ClientToServerResponse()
1470 response
.store_birthday
= self
.account
.store_birthday
1471 response
.error_code
= sync_enums_pb2
.SyncEnums
.MIGRATION_DONE
1472 response
.migrated_data_type_id
[:] = [
1473 SyncTypeToProtocolDataTypeId(x
) for x
in error
.datatypes
]
1474 return (200, response
.SerializeToString())
1475 except StoreBirthdayError
, error
:
1477 print 'NOT_MY_BIRTHDAY'
1478 response
= sync_pb2
.ClientToServerResponse()
1479 response
.store_birthday
= self
.account
.store_birthday
1480 response
.error_code
= sync_enums_pb2
.SyncEnums
.NOT_MY_BIRTHDAY
1481 return (200, response
.SerializeToString())
1482 except TransientError
, error
:
1483 ### This is deprecated now. Would be removed once test cases are removed.
1485 print 'TRANSIENT_ERROR'
1486 response
.store_birthday
= self
.account
.store_birthday
1487 response
.error_code
= sync_enums_pb2
.SyncEnums
.TRANSIENT_ERROR
1488 return (200, response
.SerializeToString())
1489 except SyncInducedError
, error
:
1491 print 'INDUCED_ERROR'
1492 response
.store_birthday
= self
.account
.store_birthday
1493 error
= self
.account
.GetInducedError()
1494 response
.error
.error_type
= error
.error_type
1495 response
.error
.url
= error
.url
1496 response
.error
.error_description
= error
.error_description
1497 response
.error
.action
= error
.action
1498 return (200, response
.SerializeToString())
1500 self
.account_lock
.release()
1502 def HandleCommit(self
, commit_message
, commit_response
):
1503 """Respond to a Commit request by updating the user's account state.
1505 Commit attempts stop after the first error, returning a CONFLICT result
1506 for any unattempted entries.
1509 commit_message: A sync_pb.CommitMessage protobuf holding the content
1510 of the client's request.
1511 commit_response: A sync_pb.CommitResponse protobuf into which a reply
1512 to the client request will be written.
1514 commit_response
.SetInParent()
1515 batch_failure
= False
1516 session
= {} # Tracks ID renaming during the commit operation.
1517 guid
= commit_message
.cache_guid
1519 self
.account
.ValidateCommitEntries(commit_message
.entries
)
1521 for entry
in commit_message
.entries
:
1523 if not batch_failure
:
1524 # Try to commit the change to the account.
1525 server_entry
= self
.account
.CommitEntry(entry
, guid
, session
)
1527 # An entryresponse is returned in both success and failure cases.
1528 reply
= commit_response
.entryresponse
.add()
1529 if not server_entry
:
1530 reply
.response_type
= sync_pb2
.CommitResponse
.CONFLICT
1531 reply
.error_message
= 'Conflict.'
1532 batch_failure
= True # One failure halts the batch.
1534 reply
.response_type
= sync_pb2
.CommitResponse
.SUCCESS
1535 # These are the properties that the server is allowed to override
1536 # during commit; the client wants to know their values at the end
1538 reply
.id_string
= server_entry
.id_string
1539 if not server_entry
.deleted
:
1540 # Note: the production server doesn't actually send the
1541 # parent_id_string on commit responses, so we don't either.
1542 reply
.position_in_parent
= server_entry
.position_in_parent
1543 reply
.version
= server_entry
.version
1544 reply
.name
= server_entry
.name
1545 reply
.non_unique_name
= server_entry
.non_unique_name
1547 reply
.version
= entry
.version
+ 1
1549 def HandleGetUpdates(self
, update_request
, update_response
):
1550 """Respond to a GetUpdates request by querying the user's account.
1553 update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
1554 of the client's request.
1555 update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
1556 to the client request will be written.
1558 update_response
.SetInParent()
1559 update_sieve
= UpdateSieve(update_request
, self
.account
.migration_history
)
1561 print CallerInfoToString(update_request
.caller_info
.source
),
1562 print update_sieve
.SummarizeRequest()
1564 update_sieve
.CheckMigrationState()
1566 new_timestamp
, entries
, remaining
= self
.account
.GetChanges(update_sieve
)
1568 update_response
.changes_remaining
= remaining
1569 sending_nigori_node
= False
1570 for entry
in entries
:
1571 if entry
.name
== 'Nigori':
1572 sending_nigori_node
= True
1573 reply
= update_response
.entries
.add()
1574 reply
.CopyFrom(entry
)
1575 update_sieve
.SaveProgress(new_timestamp
, update_response
)
1577 if update_request
.need_encryption_key
or sending_nigori_node
:
1578 update_response
.encryption_keys
.extend(self
.account
.GetKeystoreKeys())
1580 def HandleGetOauth2Token(self
):
1581 return (int(self
.response_code
),
1583 ' \"refresh_token\": \"' + self
.request_token
+ '\",\n'
1584 ' \"access_token\": \"' + self
.access_token
+ '\",\n'
1585 ' \"expires_in\": ' + str(self
.expires_in
) + ',\n'
1586 ' \"token_type\": \"' + self
.token_type
+'\"\n'
1589 def HandleSetOauth2Token(self
, response_code
, request_token
, access_token
,
1590 expires_in
, token_type
):
1591 if response_code
!= 0:
1592 self
.response_code
= response_code
1593 if request_token
!= '':
1594 self
.request_token
= request_token
1595 if access_token
!= '':
1596 self
.access_token
= access_token
1598 self
.expires_in
= expires_in
1599 if token_type
!= '':
1600 self
.token_type
= token_type
1603 '<html><title>Set OAuth2 Token</title>'
1604 '<H1>This server will now return the OAuth2 Token:</H1>'
1605 '<p>response_code: ' + str(self
.response_code
) + '</p>'
1606 '<p>request_token: ' + self
.request_token
+ '</p>'
1607 '<p>access_token: ' + self
.access_token
+ '</p>'
1608 '<p>expires_in: ' + str(self
.expires_in
) + '</p>'
1609 '<p>token_type: ' + self
.token_type
+ '</p>'
1612 def CustomizeClientCommand(self
, sessions_commit_delay_seconds
):
1613 """Customizes the value of the ClientCommand of ServerToClientResponse.
1615 Currently, this only allows for changing the sessions_commit_delay_seconds
1619 sessions_commit_delay_seconds: The desired sync delay time for sessions.
1621 if not self
._client
_command
:
1622 self
._client
_command
= client_commands_pb2
.ClientCommand()
1624 self
._client
_command
.sessions_commit_delay_seconds
= \
1625 sessions_commit_delay_seconds
1626 return self
._client
_command