1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """An implementation of the server side of the Chromium sync protocol.
7 The details of the protocol are described mostly by comments in the protocol
8 buffer definition at chrome/browser/sync/protocol/sync.proto.
14 import google
.protobuf
.text_format
26 import app_list_specifics_pb2
27 import app_notification_specifics_pb2
28 import app_setting_specifics_pb2
29 import app_specifics_pb2
30 import article_specifics_pb2
31 import autofill_specifics_pb2
32 import bookmark_specifics_pb2
33 import client_commands_pb2
34 import dictionary_specifics_pb2
35 import get_updates_caller_info_pb2
36 import extension_setting_specifics_pb2
37 import extension_specifics_pb2
38 import favicon_image_specifics_pb2
39 import favicon_tracking_specifics_pb2
40 import history_delete_directive_specifics_pb2
41 import managed_user_setting_specifics_pb2
42 import managed_user_specifics_pb2
43 import managed_user_shared_setting_specifics_pb2
44 import nigori_specifics_pb2
45 import password_specifics_pb2
46 import preference_specifics_pb2
47 import priority_preference_specifics_pb2
48 import search_engine_specifics_pb2
49 import session_specifics_pb2
52 import synced_notification_app_info_specifics_pb2
53 import synced_notification_data_pb2
54 import synced_notification_render_pb2
55 import synced_notification_specifics_pb2
56 import theme_specifics_pb2
57 import typed_url_specifics_pb2
58 import wifi_credential_specifics_pb2
60 # An enumeration of the various kinds of data that can be synced.
61 # Over the wire, this enumeration is not used: a sync object's type is
62 # inferred by which EntitySpecifics field it has. But in the context
63 # of a program, it is useful to have an enumeration.
65 TOP_LEVEL
, # The type of the 'Google Chrome' folder.
78 HISTORY_DELETE_DIRECTIVE
,
80 MANAGED_USER_SHARED_SETTING
,
89 SYNCED_NOTIFICATION_APP_INFO
,
95 WIFI_CREDENTIAL
) = range(31)
97 # An enumeration on the frequency at which the server should send errors
98 # to the client. This would be specified by the url that triggers the error.
99 # Note: This enum should be kept in the same order as the enum in sync_test.h.
100 SYNC_ERROR_FREQUENCY
= (
101 ERROR_FREQUENCY_NONE
,
102 ERROR_FREQUENCY_ALWAYS
,
103 ERROR_FREQUENCY_TWO_THIRDS
) = range(3)
105 # Well-known server tag of the top level 'Google Chrome' folder.
106 TOP_LEVEL_FOLDER_TAG
= 'google_chrome'
108 # Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
109 # to that datatype. Note that TOP_LEVEL has no such token.
110 SYNC_TYPE_FIELDS
= sync_pb2
.EntitySpecifics
.DESCRIPTOR
.fields_by_name
111 SYNC_TYPE_TO_DESCRIPTOR
= {
112 APP_LIST
: SYNC_TYPE_FIELDS
['app_list'],
113 APP_NOTIFICATION
: SYNC_TYPE_FIELDS
['app_notification'],
114 APP_SETTINGS
: SYNC_TYPE_FIELDS
['app_setting'],
115 APPS
: SYNC_TYPE_FIELDS
['app'],
116 ARTICLE
: SYNC_TYPE_FIELDS
['article'],
117 AUTOFILL
: SYNC_TYPE_FIELDS
['autofill'],
118 AUTOFILL_PROFILE
: SYNC_TYPE_FIELDS
['autofill_profile'],
119 BOOKMARK
: SYNC_TYPE_FIELDS
['bookmark'],
120 DEVICE_INFO
: SYNC_TYPE_FIELDS
['device_info'],
121 DICTIONARY
: SYNC_TYPE_FIELDS
['dictionary'],
122 EXPERIMENTS
: SYNC_TYPE_FIELDS
['experiments'],
123 EXTENSION_SETTINGS
: SYNC_TYPE_FIELDS
['extension_setting'],
124 EXTENSIONS
: SYNC_TYPE_FIELDS
['extension'],
125 FAVICON_IMAGES
: SYNC_TYPE_FIELDS
['favicon_image'],
126 FAVICON_TRACKING
: SYNC_TYPE_FIELDS
['favicon_tracking'],
127 HISTORY_DELETE_DIRECTIVE
: SYNC_TYPE_FIELDS
['history_delete_directive'],
128 MANAGED_USER_SHARED_SETTING
:
129 SYNC_TYPE_FIELDS
['managed_user_shared_setting'],
130 MANAGED_USER_SETTING
: SYNC_TYPE_FIELDS
['managed_user_setting'],
131 MANAGED_USER
: SYNC_TYPE_FIELDS
['managed_user'],
132 NIGORI
: SYNC_TYPE_FIELDS
['nigori'],
133 PASSWORD
: SYNC_TYPE_FIELDS
['password'],
134 PREFERENCE
: SYNC_TYPE_FIELDS
['preference'],
135 PRIORITY_PREFERENCE
: SYNC_TYPE_FIELDS
['priority_preference'],
136 SEARCH_ENGINE
: SYNC_TYPE_FIELDS
['search_engine'],
137 SESSION
: SYNC_TYPE_FIELDS
['session'],
138 SYNCED_NOTIFICATION
: SYNC_TYPE_FIELDS
["synced_notification"],
139 SYNCED_NOTIFICATION_APP_INFO
:
140 SYNC_TYPE_FIELDS
["synced_notification_app_info"],
141 THEME
: SYNC_TYPE_FIELDS
['theme'],
142 TYPED_URL
: SYNC_TYPE_FIELDS
['typed_url'],
143 WIFI_CREDENTIAL
: SYNC_TYPE_FIELDS
["wifi_credential"],
146 # The parent ID used to indicate a top-level node.
149 # Unix time epoch +1 day in struct_time format. The tuple corresponds to
150 # UTC Thursday Jan 2 1970, 00:00:00, non-dst.
151 # We have to add one day after start of epoch, since in timezones with positive
152 # UTC offset time.mktime throws an OverflowError,
153 # rather then returning negative number.
154 FIRST_DAY_UNIX_TIME_EPOCH
= (1970, 1, 2, 0, 0, 0, 4, 2, 0)
155 ONE_DAY_SECONDS
= 60 * 60 * 24
157 # The number of characters in the server-generated encryption key.
158 KEYSTORE_KEY_LENGTH
= 16
160 # The hashed client tags for some experiment nodes.
161 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG
= "pis8ZRzh98/MKLtVEio2mr42LQA="
162 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
= "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
164 class Error(Exception):
165 """Error class for this module."""
168 class ProtobufDataTypeFieldNotUnique(Error
):
169 """An entry should not have more than one data type present."""
172 class DataTypeIdNotRecognized(Error
):
173 """The requested data type is not recognized."""
176 class MigrationDoneError(Error
):
177 """A server-side migration occurred; clients must re-sync some datatypes.
180 datatypes: a list of the datatypes (python enum) needing migration.
183 def __init__(self
, datatypes
):
184 self
.datatypes
= datatypes
187 class StoreBirthdayError(Error
):
188 """The client sent a birthday that doesn't correspond to this server."""
191 class TransientError(Error
):
192 """The client would be sent a transient error."""
195 class SyncInducedError(Error
):
196 """The client would be sent an error."""
199 class InducedErrorFrequencyNotDefined(Error
):
200 """The error frequency defined is not handled."""
203 class ClientNotConnectedError(Error
):
204 """The client is not connected to the server."""
207 def GetEntryType(entry
):
208 """Extract the sync type from a SyncEntry.
211 entry: A SyncEntity protobuf object whose type to determine.
213 An enum value from ALL_TYPES if the entry's type can be determined, or None
214 if the type cannot be determined.
216 ProtobufDataTypeFieldNotUnique: More than one type was indicated by
219 if entry
.server_defined_unique_tag
== TOP_LEVEL_FOLDER_TAG
:
221 entry_types
= GetEntryTypesFromSpecifics(entry
.specifics
)
225 # If there is more than one, either there's a bug, or else the caller
226 # should use GetEntryTypes.
227 if len(entry_types
) > 1:
228 raise ProtobufDataTypeFieldNotUnique
229 return entry_types
[0]
232 def GetEntryTypesFromSpecifics(specifics
):
233 """Determine the sync types indicated by an EntitySpecifics's field(s).
235 If the specifics have more than one recognized data type field (as commonly
236 happens with the requested_types field of GetUpdatesMessage), all types
237 will be returned. Callers must handle the possibility of the returned
238 value having more than one item.
241 specifics: A EntitySpecifics protobuf message whose extensions to
244 A list of the sync types (values from ALL_TYPES) associated with each
245 recognized extension of the specifics message.
247 return [data_type
for data_type
, field_descriptor
248 in SYNC_TYPE_TO_DESCRIPTOR
.iteritems()
249 if specifics
.HasField(field_descriptor
.name
)]
252 def SyncTypeToProtocolDataTypeId(data_type
):
253 """Convert from a sync type (python enum) to the protocol's data type id."""
254 return SYNC_TYPE_TO_DESCRIPTOR
[data_type
].number
257 def ProtocolDataTypeIdToSyncType(protocol_data_type_id
):
258 """Convert from the protocol's data type id to a sync type (python enum)."""
259 for data_type
, field_descriptor
in SYNC_TYPE_TO_DESCRIPTOR
.iteritems():
260 if field_descriptor
.number
== protocol_data_type_id
:
262 raise DataTypeIdNotRecognized
265 def DataTypeStringToSyncTypeLoose(data_type_string
):
266 """Converts a human-readable string to a sync type (python enum).
268 Capitalization and pluralization don't matter; this function is appropriate
269 for values that might have been typed by a human being; e.g., command-line
270 flags or query parameters.
272 if data_type_string
.isdigit():
273 return ProtocolDataTypeIdToSyncType(int(data_type_string
))
274 name
= data_type_string
.lower().rstrip('s')
275 for data_type
, field_descriptor
in SYNC_TYPE_TO_DESCRIPTOR
.iteritems():
276 if field_descriptor
.name
.lower().rstrip('s') == name
:
278 raise DataTypeIdNotRecognized
281 def MakeNewKeystoreKey():
282 """Returns a new random keystore key."""
283 return ''.join(random
.choice(string
.ascii_uppercase
+ string
.digits
)
284 for x
in xrange(KEYSTORE_KEY_LENGTH
))
287 def SyncTypeToString(data_type
):
288 """Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
289 return SYNC_TYPE_TO_DESCRIPTOR
[data_type
].name
292 def CallerInfoToString(caller_info_source
):
293 """Formats a GetUpdatesSource enum value to a readable string."""
294 return get_updates_caller_info_pb2
.GetUpdatesCallerInfo \
295 .DESCRIPTOR
.enum_types_by_name
['GetUpdatesSource'] \
296 .values_by_number
[caller_info_source
].name
299 def ShortDatatypeListSummary(data_types
):
300 """Formats compactly a list of sync types (python enums) for human eyes.
302 This function is intended for use by logging. If the list of datatypes
303 contains almost all of the values, the return value will be expressed
304 in terms of the datatypes that aren't set.
306 included
= set(data_types
) - set([TOP_LEVEL
])
309 excluded
= set(ALL_TYPES
) - included
- set([TOP_LEVEL
])
312 simple_text
= '+'.join(sorted([SyncTypeToString(x
) for x
in included
]))
313 all_but_text
= 'all except %s' % (
314 '+'.join(sorted([SyncTypeToString(x
) for x
in excluded
])))
315 if len(included
) < len(excluded
) or len(simple_text
) <= len(all_but_text
):
321 def GetDefaultEntitySpecifics(data_type
):
322 """Get an EntitySpecifics having a sync type's default field value."""
323 specifics
= sync_pb2
.EntitySpecifics()
324 if data_type
in SYNC_TYPE_TO_DESCRIPTOR
:
325 descriptor
= SYNC_TYPE_TO_DESCRIPTOR
[data_type
]
326 getattr(specifics
, descriptor
.name
).SetInParent()
330 class PermanentItem(object):
331 """A specification of one server-created permanent item.
334 tag: A known-to-the-client value that uniquely identifies a server-created
336 name: The human-readable display name for this item.
337 parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates
338 a top-level item. Otherwise, this must be the tag value of some other
339 server-created permanent item.
340 sync_type: A value from ALL_TYPES, giving the datatype of this permanent
341 item. This controls which types of client GetUpdates requests will
342 cause the permanent item to be created and returned.
343 create_by_default: Whether the permanent item is created at startup or not.
344 This value is set to True in the default case. Non-default permanent items
345 are those that are created only when a client explicitly tells the server
349 def __init__(self
, tag
, name
, parent_tag
, sync_type
, create_by_default
=True):
352 self
.parent_tag
= parent_tag
353 self
.sync_type
= sync_type
354 self
.create_by_default
= create_by_default
357 class MigrationHistory(object):
358 """A record of the migration events associated with an account.
360 Each migration event invalidates one or more datatypes on all clients
361 that had synced the datatype before the event. Such clients will continue
362 to receive MigrationDone errors until they throw away their progress and
363 re-sync that datatype from the beginning.
366 self
._migrations
= {}
367 for datatype
in ALL_TYPES
:
368 self
._migrations
[datatype
] = [1]
369 self
._next
_migration
_version
= 2
371 def GetLatestVersion(self
, datatype
):
372 return self
._migrations
[datatype
][-1]
374 def CheckAllCurrent(self
, versions_map
):
375 """Raises an error if any the provided versions are out of date.
377 This function intentionally returns migrations in the order that they were
378 triggered. Doing it this way allows the client to queue up two migrations
379 in a row, so the second one is received while responding to the first.
382 version_map: a map whose keys are datatypes and whose values are versions.
385 MigrationDoneError: if a mismatch is found.
388 for datatype
, client_migration
in versions_map
.iteritems():
389 for server_migration
in self
._migrations
[datatype
]:
390 if client_migration
< server_migration
:
391 problems
.setdefault(server_migration
, []).append(datatype
)
393 raise MigrationDoneError(problems
[min(problems
.keys())])
395 def Bump(self
, datatypes
):
396 """Add a record of a migration, to cause errors on future requests."""
397 for idx
, datatype
in enumerate(datatypes
):
398 self
._migrations
[datatype
].append(self
._next
_migration
_version
)
399 self
._next
_migration
_version
+= 1
402 class UpdateSieve(object):
403 """A filter to remove items the client has already seen."""
404 def __init__(self
, request
, migration_history
=None):
405 self
._original
_request
= request
407 self
._migration
_history
= migration_history
or MigrationHistory()
408 self
._migration
_versions
_to
_check
= {}
409 if request
.from_progress_marker
:
410 for marker
in request
.from_progress_marker
:
411 data_type
= ProtocolDataTypeIdToSyncType(marker
.data_type_id
)
412 if marker
.HasField('timestamp_token_for_migration'):
413 timestamp
= marker
.timestamp_token_for_migration
415 self
._migration
_versions
_to
_check
[data_type
] = 1
417 (timestamp
, version
) = pickle
.loads(marker
.token
)
418 self
._migration
_versions
_to
_check
[data_type
] = version
419 elif marker
.HasField('token'):
422 raise ValueError('No timestamp information in progress marker.')
423 data_type
= ProtocolDataTypeIdToSyncType(marker
.data_type_id
)
424 self
._state
[data_type
] = timestamp
425 elif request
.HasField('from_timestamp'):
426 for data_type
in GetEntryTypesFromSpecifics(request
.requested_types
):
427 self
._state
[data_type
] = request
.from_timestamp
428 self
._migration
_versions
_to
_check
[data_type
] = 1
430 self
._state
[TOP_LEVEL
] = min(self
._state
.itervalues())
432 def SummarizeRequest(self
):
434 for data_type
, timestamp
in self
._state
.iteritems():
435 if data_type
== TOP_LEVEL
:
437 timestamps
.setdefault(timestamp
, []).append(data_type
)
438 return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types
), stamp
)
439 for stamp
, types
in sorted(timestamps
.iteritems()))
441 def CheckMigrationState(self
):
442 self
._migration
_history
.CheckAllCurrent(self
._migration
_versions
_to
_check
)
444 def ClientWantsItem(self
, item
):
445 """Return true if the client hasn't already seen an item."""
446 return self
._state
.get(GetEntryType(item
), sys
.maxint
) < item
.version
448 def HasAnyTimestamp(self
):
449 """Return true if at least one datatype was requested."""
450 return bool(self
._state
)
452 def GetMinTimestamp(self
):
453 """Return true the smallest timestamp requested across all datatypes."""
454 return min(self
._state
.itervalues())
456 def GetFirstTimeTypes(self
):
457 """Return a list of datatypes requesting updates from timestamp zero."""
458 return [datatype
for datatype
, timestamp
in self
._state
.iteritems()
461 def GetCreateMobileBookmarks(self
):
462 """Return true if the client has requested to create the 'Mobile Bookmarks'
465 return (self
._original
_request
.HasField('create_mobile_bookmarks_folder')
466 and self
._original
_request
.create_mobile_bookmarks_folder
)
468 def SaveProgress(self
, new_timestamp
, get_updates_response
):
469 """Write the new_timestamp or new_progress_marker fields to a response."""
470 if self
._original
_request
.from_progress_marker
:
471 for data_type
, old_timestamp
in self
._state
.iteritems():
472 if data_type
== TOP_LEVEL
:
474 new_marker
= sync_pb2
.DataTypeProgressMarker()
475 new_marker
.data_type_id
= SyncTypeToProtocolDataTypeId(data_type
)
476 final_stamp
= max(old_timestamp
, new_timestamp
)
477 final_migration
= self
._migration
_history
.GetLatestVersion(data_type
)
478 new_marker
.token
= pickle
.dumps((final_stamp
, final_migration
))
479 get_updates_response
.new_progress_marker
.add().MergeFrom(new_marker
)
480 elif self
._original
_request
.HasField('from_timestamp'):
481 if self
._original
_request
.from_timestamp
< new_timestamp
:
482 get_updates_response
.new_timestamp
= new_timestamp
485 class SyncDataModel(object):
486 """Models the account state of one sync user."""
489 # Specify all the permanent items that a model might need.
490 _PERMANENT_ITEM_SPECS
= [
491 PermanentItem('google_chrome_apps', name
='Apps',
492 parent_tag
=ROOT_ID
, sync_type
=APPS
),
493 PermanentItem('google_chrome_app_list', name
='App List',
494 parent_tag
=ROOT_ID
, sync_type
=APP_LIST
),
495 PermanentItem('google_chrome_app_notifications', name
='App Notifications',
496 parent_tag
=ROOT_ID
, sync_type
=APP_NOTIFICATION
),
497 PermanentItem('google_chrome_app_settings',
499 parent_tag
=ROOT_ID
, sync_type
=APP_SETTINGS
),
500 PermanentItem('google_chrome_bookmarks', name
='Bookmarks',
501 parent_tag
=ROOT_ID
, sync_type
=BOOKMARK
),
502 PermanentItem('bookmark_bar', name
='Bookmark Bar',
503 parent_tag
='google_chrome_bookmarks', sync_type
=BOOKMARK
),
504 PermanentItem('other_bookmarks', name
='Other Bookmarks',
505 parent_tag
='google_chrome_bookmarks', sync_type
=BOOKMARK
),
506 PermanentItem('synced_bookmarks', name
='Synced Bookmarks',
507 parent_tag
='google_chrome_bookmarks', sync_type
=BOOKMARK
,
508 create_by_default
=False),
509 PermanentItem('google_chrome_autofill', name
='Autofill',
510 parent_tag
=ROOT_ID
, sync_type
=AUTOFILL
),
511 PermanentItem('google_chrome_autofill_profiles', name
='Autofill Profiles',
512 parent_tag
=ROOT_ID
, sync_type
=AUTOFILL_PROFILE
),
513 PermanentItem('google_chrome_device_info', name
='Device Info',
514 parent_tag
=ROOT_ID
, sync_type
=DEVICE_INFO
),
515 PermanentItem('google_chrome_experiments', name
='Experiments',
516 parent_tag
=ROOT_ID
, sync_type
=EXPERIMENTS
),
517 PermanentItem('google_chrome_extension_settings',
518 name
='Extension Settings',
519 parent_tag
=ROOT_ID
, sync_type
=EXTENSION_SETTINGS
),
520 PermanentItem('google_chrome_extensions', name
='Extensions',
521 parent_tag
=ROOT_ID
, sync_type
=EXTENSIONS
),
522 PermanentItem('google_chrome_history_delete_directives',
523 name
='History Delete Directives',
525 sync_type
=HISTORY_DELETE_DIRECTIVE
),
526 PermanentItem('google_chrome_favicon_images',
527 name
='Favicon Images',
529 sync_type
=FAVICON_IMAGES
),
530 PermanentItem('google_chrome_favicon_tracking',
531 name
='Favicon Tracking',
533 sync_type
=FAVICON_TRACKING
),
534 PermanentItem('google_chrome_managed_user_settings',
535 name
='Managed User Settings',
536 parent_tag
=ROOT_ID
, sync_type
=MANAGED_USER_SETTING
),
537 PermanentItem('google_chrome_managed_users',
538 name
='Managed Users',
539 parent_tag
=ROOT_ID
, sync_type
=MANAGED_USER
),
540 PermanentItem('google_chrome_managed_user_shared_settings',
541 name
='Managed User Shared Settings',
542 parent_tag
=ROOT_ID
, sync_type
=MANAGED_USER_SHARED_SETTING
),
543 PermanentItem('google_chrome_nigori', name
='Nigori',
544 parent_tag
=ROOT_ID
, sync_type
=NIGORI
),
545 PermanentItem('google_chrome_passwords', name
='Passwords',
546 parent_tag
=ROOT_ID
, sync_type
=PASSWORD
),
547 PermanentItem('google_chrome_preferences', name
='Preferences',
548 parent_tag
=ROOT_ID
, sync_type
=PREFERENCE
),
549 PermanentItem('google_chrome_priority_preferences',
550 name
='Priority Preferences',
551 parent_tag
=ROOT_ID
, sync_type
=PRIORITY_PREFERENCE
),
552 PermanentItem('google_chrome_synced_notifications',
553 name
='Synced Notifications',
554 parent_tag
=ROOT_ID
, sync_type
=SYNCED_NOTIFICATION
),
555 PermanentItem('google_chrome_synced_notification_app_info',
556 name
='Synced Notification App Info',
557 parent_tag
=ROOT_ID
, sync_type
=SYNCED_NOTIFICATION_APP_INFO
),
558 PermanentItem('google_chrome_search_engines', name
='Search Engines',
559 parent_tag
=ROOT_ID
, sync_type
=SEARCH_ENGINE
),
560 PermanentItem('google_chrome_sessions', name
='Sessions',
561 parent_tag
=ROOT_ID
, sync_type
=SESSION
),
562 PermanentItem('google_chrome_themes', name
='Themes',
563 parent_tag
=ROOT_ID
, sync_type
=THEME
),
564 PermanentItem('google_chrome_typed_urls', name
='Typed URLs',
565 parent_tag
=ROOT_ID
, sync_type
=TYPED_URL
),
566 PermanentItem('google_chrome_wifi_credentials', name
='WiFi Credentials',
567 parent_tag
=ROOT_ID
, sync_type
=WIFI_CREDENTIAL
),
568 PermanentItem('google_chrome_dictionary', name
='Dictionary',
569 parent_tag
=ROOT_ID
, sync_type
=DICTIONARY
),
570 PermanentItem('google_chrome_articles', name
='Articles',
571 parent_tag
=ROOT_ID
, sync_type
=ARTICLE
),
575 # Monotonically increasing version number. The next object change will
576 # take on this value + 1.
579 # The definitive copy of this client's items: a map from ID string to a
580 # SyncEntity protocol buffer.
583 self
.ResetStoreBirthday()
584 self
.migration_history
= MigrationHistory()
585 self
.induced_error
= sync_pb2
.ClientToServerResponse
.Error()
586 self
.induced_error_frequency
= 0
587 self
.sync_count_before_errors
= 0
588 self
.acknowledge_managed_users
= False
589 self
._keys
= [MakeNewKeystoreKey()]
591 def _SaveEntry(self
, entry
):
592 """Insert or update an entry in the change log, and give it a new version.
594 The ID fields of this entry are assumed to be valid server IDs. This
595 entry will be updated with a new version number and sync_timestamp.
598 entry: The entry to be added or updated.
601 # Maintain a global (rather than per-item) sequence number and use it
602 # both as the per-entry version as well as the update-progress timestamp.
603 # This simulates the behavior of the original server implementation.
604 entry
.version
= self
._version
605 entry
.sync_timestamp
= self
._version
607 # Preserve the originator info, which the client is not required to send
609 base_entry
= self
._entries
.get(entry
.id_string
)
611 entry
.originator_cache_guid
= base_entry
.originator_cache_guid
612 entry
.originator_client_item_id
= base_entry
.originator_client_item_id
614 self
._entries
[entry
.id_string
] = copy
.deepcopy(entry
)
616 def _ServerTagToId(self
, tag
):
617 """Determine the server ID from a server-unique tag.
619 The resulting value is guaranteed not to collide with the other ID
623 tag: The unique, known-to-the-client tag of a server-generated item.
625 The string value of the computed server ID.
627 if not tag
or tag
== ROOT_ID
:
629 spec
= [x
for x
in self
._PERMANENT
_ITEM
_SPECS
if x
.tag
== tag
][0]
630 return self
._MakeCurrentId
(spec
.sync_type
, '<server tag>%s' % tag
)
632 def _TypeToTypeRootId(self
, model_type
):
633 """Returns the server ID for the type root node of the given type."""
634 tag
= [x
.tag
for x
in self
._PERMANENT
_ITEM
_SPECS
635 if x
.sync_type
== model_type
][0]
636 return self
._ServerTagToId
(tag
)
638 def _ClientTagToId(self
, datatype
, tag
):
639 """Determine the server ID from a client-unique tag.
641 The resulting value is guaranteed not to collide with the other ID
645 datatype: The sync type (python enum) of the identified object.
646 tag: The unique, opaque-to-the-server tag of a client-tagged item.
648 The string value of the computed server ID.
650 return self
._MakeCurrentId
(datatype
, '<client tag>%s' % tag
)
652 def _ClientIdToId(self
, datatype
, client_guid
, client_item_id
):
653 """Compute a unique server ID from a client-local ID tag.
655 The resulting value is guaranteed not to collide with the other ID
659 datatype: The sync type (python enum) of the identified object.
660 client_guid: A globally unique ID that identifies the client which
662 client_item_id: An ID that uniquely identifies this item on the client
665 The string value of the computed server ID.
667 # Using the client ID info is not required here (we could instead generate
668 # a random ID), but it's useful for debugging.
669 return self
._MakeCurrentId
(datatype
,
670 '<server ID originally>%s/%s' % (client_guid
, client_item_id
))
672 def _MakeCurrentId(self
, datatype
, inner_id
):
673 return '%d^%d^%s' % (datatype
,
674 self
.migration_history
.GetLatestVersion(datatype
),
677 def _ExtractIdInfo(self
, id_string
):
678 if not id_string
or id_string
== ROOT_ID
:
680 datatype_string
, separator
, remainder
= id_string
.partition('^')
681 migration_version_string
, separator
, inner_id
= remainder
.partition('^')
682 return (int(datatype_string
), int(migration_version_string
), inner_id
)
684 def _WritePosition(self
, entry
, parent_id
):
685 """Ensure the entry has an absolute, numeric position and parent_id.
687 Historically, clients would specify positions using the predecessor-based
688 references in the insert_after_item_id field; starting July 2011, this
689 was changed and Chrome now sends up the absolute position. The server
690 must store a position_in_parent value and must not maintain
691 insert_after_item_id.
692 Starting in Jan 2013, the client will also send up a unique_position field
693 which should be saved and returned on subsequent GetUpdates.
696 entry: The entry for which to write a position. Its ID field are
697 assumed to be server IDs. This entry will have its parent_id_string,
698 position_in_parent and unique_position fields updated; its
699 insert_after_item_id field will be cleared.
700 parent_id: The ID of the entry intended as the new parent.
703 entry
.parent_id_string
= parent_id
704 if not entry
.HasField('position_in_parent'):
705 entry
.position_in_parent
= 1337 # A debuggable, distinctive default.
706 entry
.ClearField('insert_after_item_id')
708 def _ItemExists(self
, id_string
):
709 """Determine whether an item exists in the changelog."""
710 return id_string
in self
._entries
712 def _CreatePermanentItem(self
, spec
):
713 """Create one permanent item from its spec, if it doesn't exist.
715 The resulting item is added to the changelog.
718 spec: A PermanentItem object holding the properties of the item to create.
720 id_string
= self
._ServerTagToId
(spec
.tag
)
721 if self
._ItemExists
(id_string
):
723 print 'Creating permanent item: %s' % spec
.name
724 entry
= sync_pb2
.SyncEntity()
725 entry
.id_string
= id_string
726 entry
.non_unique_name
= spec
.name
727 entry
.name
= spec
.name
728 entry
.server_defined_unique_tag
= spec
.tag
730 entry
.deleted
= False
731 entry
.specifics
.CopyFrom(GetDefaultEntitySpecifics(spec
.sync_type
))
732 self
._WritePosition
(entry
, self
._ServerTagToId
(spec
.parent_tag
))
733 self
._SaveEntry
(entry
)
735 def _CreateDefaultPermanentItems(self
, requested_types
):
736 """Ensure creation of all default permanent items for a given set of types.
739 requested_types: A list of sync data types from ALL_TYPES.
740 All default permanent items of only these types will be created.
742 for spec
in self
._PERMANENT
_ITEM
_SPECS
:
743 if spec
.sync_type
in requested_types
and spec
.create_by_default
:
744 self
._CreatePermanentItem
(spec
)
746 def ResetStoreBirthday(self
):
747 """Resets the store birthday to a random value."""
748 # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
749 self
.store_birthday
= '%0.30f' % random
.random()
751 def StoreBirthday(self
):
752 """Gets the store birthday."""
753 return self
.store_birthday
755 def GetChanges(self
, sieve
):
756 """Get entries which have changed, oldest first.
758 The returned entries are limited to being _BATCH_SIZE many. The entries
759 are returned in strict version order.
762 sieve: An update sieve to use to filter out updates the client
765 A tuple of (version, entries, changes_remaining). Version is a new
766 timestamp value, which should be used as the starting point for the
767 next query. Entries is the batch of entries meeting the current
768 timestamp query. Changes_remaining indicates the number of changes
769 left on the server after this batch.
771 if not sieve
.HasAnyTimestamp():
773 min_timestamp
= sieve
.GetMinTimestamp()
774 first_time_types
= sieve
.GetFirstTimeTypes()
775 self
._CreateDefaultPermanentItems
(first_time_types
)
776 # Mobile bookmark folder is not created by default, create it only when
777 # client requested it.
778 if (sieve
.GetCreateMobileBookmarks() and
779 first_time_types
.count(BOOKMARK
) > 0):
780 self
.TriggerCreateSyncedBookmarks()
782 self
.TriggerAcknowledgeManagedUsers()
784 change_log
= sorted(self
._entries
.values(),
785 key
=operator
.attrgetter('version'))
786 new_changes
= [x
for x
in change_log
if x
.version
> min_timestamp
]
787 # Pick batch_size new changes, and then filter them. This matches
788 # the RPC behavior of the production sync server.
789 batch
= new_changes
[:self
._BATCH
_SIZE
]
791 # Client is up to date.
792 return (min_timestamp
, [], 0)
794 # Restrict batch to requested types. Tombstones are untyped
795 # and will always get included.
796 filtered
= [copy
.deepcopy(item
) for item
in batch
797 if item
.deleted
or sieve
.ClientWantsItem(item
)]
799 # The new client timestamp is the timestamp of the last item in the
800 # batch, even if that item was filtered out.
801 return (batch
[-1].version
, filtered
, len(new_changes
) - len(batch
))
803 def GetKeystoreKeys(self
):
804 """Returns the encryption keys for this account."""
805 print "Returning encryption keys: %s" % self
._keys
808 def _CopyOverImmutableFields(self
, entry
):
809 """Preserve immutable fields by copying pre-commit state.
812 entry: A sync entity from the client.
814 if entry
.id_string
in self
._entries
:
815 if self
._entries
[entry
.id_string
].HasField(
816 'server_defined_unique_tag'):
817 entry
.server_defined_unique_tag
= (
818 self
._entries
[entry
.id_string
].server_defined_unique_tag
)
820 def _CheckVersionForCommit(self
, entry
):
821 """Perform an optimistic concurrency check on the version number.
823 Clients are only allowed to commit if they report having seen the most
824 recent version of an object.
827 entry: A sync entity from the client. It is assumed that ID fields
828 have been converted to server IDs.
830 A boolean value indicating whether the client's version matches the
831 newest server version for the given entry.
833 if entry
.id_string
in self
._entries
:
834 # Allow edits/deletes if the version matches, and any undeletion.
835 return (self
._entries
[entry
.id_string
].version
== entry
.version
or
836 self
._entries
[entry
.id_string
].deleted
)
838 # Allow unknown ID only if the client thinks it's new too.
839 return entry
.version
== 0
841 def _CheckParentIdForCommit(self
, entry
):
842 """Check that the parent ID referenced in a SyncEntity actually exists.
845 entry: A sync entity from the client. It is assumed that ID fields
846 have been converted to server IDs.
848 A boolean value indicating whether the entity's parent ID is an object
849 that actually exists (and is not deleted) in the current account state.
851 if entry
.parent_id_string
== ROOT_ID
:
852 # This is generally allowed.
854 if (not entry
.HasField('parent_id_string') and
855 entry
.HasField('client_defined_unique_tag')):
856 return True # Unique client tag items do not need to specify a parent.
857 if entry
.parent_id_string
not in self
._entries
:
858 print 'Warning: Client sent unknown ID. Should never happen.'
860 if entry
.parent_id_string
== entry
.id_string
:
861 print 'Warning: Client sent circular reference. Should never happen.'
863 if self
._entries
[entry
.parent_id_string
].deleted
:
864 # This can happen in a race condition between two clients.
866 if not self
._entries
[entry
.parent_id_string
].folder
:
867 print 'Warning: Client sent non-folder parent. Should never happen.'
871 def _RewriteIdsAsServerIds(self
, entry
, cache_guid
, commit_session
):
872 """Convert ID fields in a client sync entry to server IDs.
874 A commit batch sent by a client may contain new items for which the
875 server has not generated IDs yet. And within a commit batch, later
876 items are allowed to refer to earlier items. This method will
877 generate server IDs for new items, as well as rewrite references
878 to items whose server IDs were generated earlier in the batch.
881 entry: The client sync entry to modify.
882 cache_guid: The globally unique ID of the client that sent this
884 commit_session: A dictionary mapping the original IDs to the new server
885 IDs, for any items committed earlier in the batch.
887 if entry
.version
== 0:
888 data_type
= GetEntryType(entry
)
889 if entry
.HasField('client_defined_unique_tag'):
890 # When present, this should determine the item's ID.
891 new_id
= self
._ClientTagToId
(data_type
, entry
.client_defined_unique_tag
)
893 new_id
= self
._ClientIdToId
(data_type
, cache_guid
, entry
.id_string
)
894 entry
.originator_cache_guid
= cache_guid
895 entry
.originator_client_item_id
= entry
.id_string
896 commit_session
[entry
.id_string
] = new_id
# Remember the remapping.
897 entry
.id_string
= new_id
898 if entry
.parent_id_string
in commit_session
:
899 entry
.parent_id_string
= commit_session
[entry
.parent_id_string
]
900 if entry
.insert_after_item_id
in commit_session
:
901 entry
.insert_after_item_id
= commit_session
[entry
.insert_after_item_id
]
903 def ValidateCommitEntries(self
, entries
):
904 """Raise an exception if a commit batch contains any global errors.
907 entries: an iterable containing commit-form SyncEntity protocol buffers.
910 MigrationDoneError: if any of the entries reference a recently-migrated
913 server_ids_in_commit
= set()
914 local_ids_in_commit
= set()
915 for entry
in entries
:
917 server_ids_in_commit
.add(entry
.id_string
)
919 local_ids_in_commit
.add(entry
.id_string
)
920 if entry
.HasField('parent_id_string'):
921 if entry
.parent_id_string
not in local_ids_in_commit
:
922 server_ids_in_commit
.add(entry
.parent_id_string
)
924 versions_present
= {}
925 for server_id
in server_ids_in_commit
:
926 parsed
= self
._ExtractIdInfo
(server_id
)
928 datatype
, version
, _
= parsed
929 versions_present
.setdefault(datatype
, []).append(version
)
931 self
.migration_history
.CheckAllCurrent(
932 dict((k
, min(v
)) for k
, v
in versions_present
.iteritems()))
934 def CommitEntry(self
, entry
, cache_guid
, commit_session
):
935 """Attempt to commit one entry to the user's account.
938 entry: A SyncEntity protobuf representing desired object changes.
939 cache_guid: A string value uniquely identifying the client; this
940 is used for ID generation and will determine the originator_cache_guid
942 commit_session: A dictionary mapping client IDs to server IDs for any
943 objects committed earlier this session. If the entry gets a new ID
944 during commit, the change will be recorded here.
946 A SyncEntity reflecting the post-commit value of the entry, or None
947 if the entry was not committed due to an error.
949 entry
= copy
.deepcopy(entry
)
951 # Generate server IDs for this entry, and write generated server IDs
952 # from earlier entries into the message's fields, as appropriate. The
953 # ID generation state is stored in 'commit_session'.
954 self
._RewriteIdsAsServerIds
(entry
, cache_guid
, commit_session
)
956 # Sets the parent ID field for a client-tagged item. The client is allowed
957 # to not specify parents for these types of items. The server can figure
958 # out on its own what the parent ID for this entry should be.
959 self
._RewriteParentIdForUniqueClientEntry
(entry
)
961 # Perform the optimistic concurrency check on the entry's version number.
962 # Clients are not allowed to commit unless they indicate that they've seen
963 # the most recent version of an object.
964 if not self
._CheckVersionForCommit
(entry
):
967 # Check the validity of the parent ID; it must exist at this point.
968 # TODO(nick): Implement cycle detection and resolution.
969 if not self
._CheckParentIdForCommit
(entry
):
972 self
._CopyOverImmutableFields
(entry
);
974 # At this point, the commit is definitely going to happen.
976 # Deletion works by storing a limited record for an entry, called a
977 # tombstone. A sync server must track deleted IDs forever, since it does
978 # not keep track of client knowledge (there's no deletion ACK event).
980 def MakeTombstone(id_string
, datatype
):
981 """Make a tombstone entry that will replace the entry being deleted.
984 id_string: Index of the SyncEntity to be deleted.
986 A new SyncEntity reflecting the fact that the entry is deleted.
988 # Only the ID, version and deletion state are preserved on a tombstone.
989 tombstone
= sync_pb2
.SyncEntity()
990 tombstone
.id_string
= id_string
991 tombstone
.deleted
= True
993 tombstone
.specifics
.CopyFrom(GetDefaultEntitySpecifics(datatype
))
996 def IsChild(child_id
):
997 """Check if a SyncEntity is a child of entry, or any of its children.
1000 child_id: Index of the SyncEntity that is a possible child of entry.
1002 True if it is a child; false otherwise.
1004 if child_id
not in self
._entries
:
1006 if self
._entries
[child_id
].parent_id_string
== entry
.id_string
:
1008 return IsChild(self
._entries
[child_id
].parent_id_string
)
1010 # Identify any children entry might have.
1011 child_ids
= [child
.id_string
for child
in self
._entries
.itervalues()
1012 if IsChild(child
.id_string
)]
1014 # Mark all children that were identified as deleted.
1015 for child_id
in child_ids
:
1016 datatype
= GetEntryType(self
._entries
[child_id
])
1017 self
._SaveEntry
(MakeTombstone(child_id
, datatype
))
1019 # Delete entry itself.
1020 datatype
= GetEntryType(self
._entries
[entry
.id_string
])
1021 entry
= MakeTombstone(entry
.id_string
, datatype
)
1023 # Comments in sync.proto detail how the representation of positional
1026 # We've almost fully deprecated the 'insert_after_item_id' field.
1027 # The 'position_in_parent' field is also deprecated, but as of Jan 2013
1028 # is still in common use. The 'unique_position' field is the latest
1029 # and greatest in positioning technology.
1031 # This server supports 'position_in_parent' and 'unique_position'.
1032 self
._WritePosition
(entry
, entry
.parent_id_string
)
1034 # Preserve the originator info, which the client is not required to send
1036 base_entry
= self
._entries
.get(entry
.id_string
)
1037 if base_entry
and not entry
.HasField('originator_cache_guid'):
1038 entry
.originator_cache_guid
= base_entry
.originator_cache_guid
1039 entry
.originator_client_item_id
= base_entry
.originator_client_item_id
1041 # Store the current time since the Unix epoch in milliseconds.
1042 entry
.mtime
= (int((time
.mktime(time
.gmtime()) -
1043 (time
.mktime(FIRST_DAY_UNIX_TIME_EPOCH
) - ONE_DAY_SECONDS
))*1000))
1045 # Commit the change. This also updates the version number.
1046 self
._SaveEntry
(entry
)
1049 def _RewriteVersionInId(self
, id_string
):
1050 """Rewrites an ID so that its migration version becomes current."""
1051 parsed_id
= self
._ExtractIdInfo
(id_string
)
1054 datatype
, old_migration_version
, inner_id
= parsed_id
1055 return self
._MakeCurrentId
(datatype
, inner_id
)
1057 def _RewriteParentIdForUniqueClientEntry(self
, entry
):
1058 """Sets the entry's parent ID field to the appropriate value.
1060 The client must always set enough of the specifics of the entries it sends
1061 up such that the server can identify its type. (See crbug.com/373859)
1063 The client is under no obligation to set the parent ID field. The server
1064 can always infer what the appropriate parent for this model type should be.
1065 Having the client not send the parent ID is a step towards the removal of
1066 type root nodes. (See crbug.com/373869)
1068 This server implements these features by "faking" the existing of a parent
1069 ID early on in the commit processing.
1071 This function has no effect on non-client-tagged items.
1073 if not entry
.HasField('client_defined_unique_tag'):
1074 return # Skip this processing for non-client-tagged types.
1075 data_type
= GetEntryType(entry
)
1076 entry
.parent_id_string
= self
._TypeToTypeRootId
(data_type
)
1078 def TriggerMigration(self
, datatypes
):
1079 """Cause a migration to occur for a set of datatypes on this account.
1081 Clients will see the MIGRATION_DONE error for these datatypes until they
1084 versions_to_remap
= self
.migration_history
.Bump(datatypes
)
1085 all_entries
= self
._entries
.values()
1086 self
._entries
.clear()
1087 for entry
in all_entries
:
1088 new_id
= self
._RewriteVersionInId
(entry
.id_string
)
1089 entry
.id_string
= new_id
1090 if entry
.HasField('parent_id_string'):
1091 entry
.parent_id_string
= self
._RewriteVersionInId
(
1092 entry
.parent_id_string
)
1093 self
._entries
[entry
.id_string
] = entry
1095 def TriggerSyncTabFavicons(self
):
1096 """Set the 'sync_tab_favicons' field to this account's nigori node.
1098 If the field is not currently set, will write a new nigori node entry
1099 with the field set. Else does nothing.
1102 nigori_tag
= "google_chrome_nigori"
1103 nigori_original
= self
._entries
.get(self
._ServerTagToId
(nigori_tag
))
1104 if (nigori_original
.specifics
.nigori
.sync_tab_favicons
):
1106 nigori_new
= copy
.deepcopy(nigori_original
)
1107 nigori_new
.specifics
.nigori
.sync_tabs
= True
1108 self
._SaveEntry
(nigori_new
)
1110 def TriggerCreateSyncedBookmarks(self
):
1111 """Create the Synced Bookmarks folder under the Bookmarks permanent item.
1113 Clients will then receive the Synced Bookmarks folder on future
1114 GetUpdates, and new bookmarks can be added within the Synced Bookmarks
1118 synced_bookmarks_spec
, = [spec
for spec
in self
._PERMANENT
_ITEM
_SPECS
1119 if spec
.name
== "Synced Bookmarks"]
1120 self
._CreatePermanentItem
(synced_bookmarks_spec
)
1122 def TriggerEnableKeystoreEncryption(self
):
1123 """Create the keystore_encryption experiment entity and enable it.
1125 A new entity within the EXPERIMENTS datatype is created with the unique
1126 client tag "keystore_encryption" if it doesn't already exist. The
1127 keystore_encryption message is then filled with |enabled| set to true.
1130 experiment_id
= self
._ServerTagToId
("google_chrome_experiments")
1131 keystore_encryption_id
= self
._ClientTagToId
(
1133 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG
)
1134 keystore_entry
= self
._entries
.get(keystore_encryption_id
)
1135 if keystore_entry
is None:
1136 keystore_entry
= sync_pb2
.SyncEntity()
1137 keystore_entry
.id_string
= keystore_encryption_id
1138 keystore_entry
.name
= "Keystore Encryption"
1139 keystore_entry
.client_defined_unique_tag
= (
1140 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG
)
1141 keystore_entry
.folder
= False
1142 keystore_entry
.deleted
= False
1143 keystore_entry
.specifics
.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS
))
1144 self
._WritePosition
(keystore_entry
, experiment_id
)
1146 keystore_entry
.specifics
.experiments
.keystore_encryption
.enabled
= True
1148 self
._SaveEntry
(keystore_entry
)
1150 def TriggerRotateKeystoreKeys(self
):
1151 """Rotate the current set of keystore encryption keys.
1153 |self._keys| will have a new random encryption key appended to it. We touch
1154 the nigori node so that each client will receive the new encryption keys
1158 # Add a new encryption key.
1159 self
._keys
+= [MakeNewKeystoreKey(), ]
1161 # Increment the nigori node's timestamp, so clients will get the new keys
1162 # on their next GetUpdates (any time the nigori node is sent back, we also
1163 # send back the keystore keys).
1164 nigori_tag
= "google_chrome_nigori"
1165 self
._SaveEntry
(self
._entries
.get(self
._ServerTagToId
(nigori_tag
)))
1167 def TriggerAcknowledgeManagedUsers(self
):
1168 """Set the "acknowledged" flag for any managed user entities that don't have
1172 if not self
.acknowledge_managed_users
:
1175 managed_users
= [copy
.deepcopy(entry
) for entry
in self
._entries
.values()
1176 if entry
.specifics
.HasField('managed_user')
1177 and not entry
.specifics
.managed_user
.acknowledged
]
1178 for user
in managed_users
:
1179 user
.specifics
.managed_user
.acknowledged
= True
1180 self
._SaveEntry
(user
)
1182 def TriggerEnablePreCommitGetUpdateAvoidance(self
):
1183 """Sets the experiment to enable pre-commit GetUpdate avoidance."""
1184 experiment_id
= self
._ServerTagToId
("google_chrome_experiments")
1185 pre_commit_gu_avoidance_id
= self
._ClientTagToId
(
1187 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
)
1188 entry
= self
._entries
.get(pre_commit_gu_avoidance_id
)
1190 entry
= sync_pb2
.SyncEntity()
1191 entry
.id_string
= pre_commit_gu_avoidance_id
1192 entry
.name
= "Pre-commit GU avoidance"
1193 entry
.client_defined_unique_tag
= PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
1194 entry
.folder
= False
1195 entry
.deleted
= False
1196 entry
.specifics
.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS
))
1197 self
._WritePosition
(entry
, experiment_id
)
1198 entry
.specifics
.experiments
.pre_commit_update_avoidance
.enabled
= True
1199 self
._SaveEntry
(entry
)
1201 def SetInducedError(self
, error
, error_frequency
,
1202 sync_count_before_errors
):
1203 self
.induced_error
= error
1204 self
.induced_error_frequency
= error_frequency
1205 self
.sync_count_before_errors
= sync_count_before_errors
1207 def GetInducedError(self
):
1208 return self
.induced_error
1210 def AddSyncedNotification(self
, serialized_notification
):
1211 """Adds a synced notification to the server data.
1213 The notification will be delivered to the client on the next GetUpdates
1217 serialized_notification: A serialized CoalescedSyncedNotification.
1220 The string representation of the added SyncEntity.
1223 ClientNotConnectedError: if the client has not yet connected to this
1226 # A unique string used wherever a unique ID for this notification is
1228 unique_notification_id
= str(uuid
.uuid4())
1230 specifics
= self
._CreateSyncedNotificationEntitySpecifics
(
1231 unique_notification_id
, serialized_notification
)
1233 # Create the root SyncEntity representing a single notification.
1234 entity
= sync_pb2
.SyncEntity()
1235 entity
.specifics
.CopyFrom(specifics
)
1236 entity
.parent_id_string
= self
._ServerTagToId
(
1237 'google_chrome_synced_notifications')
1238 entity
.name
= 'Synced notification added for testing'
1239 entity
.version
= self
._GetNextVersionNumber
()
1241 entity
.client_defined_unique_tag
= self
._CreateSyncedNotificationClientTag
(
1242 specifics
.synced_notification
.coalesced_notification
.key
)
1243 entity
.id_string
= self
._ClientTagToId
(GetEntryType(entity
),
1244 entity
.client_defined_unique_tag
)
1246 self
._entries
[entity
.id_string
] = copy
.deepcopy(entity
)
1248 return google
.protobuf
.text_format
.MessageToString(entity
)
1250 def _GetNextVersionNumber(self
):
1251 """Set the version to one more than the greatest version number seen."""
1252 entries
= sorted(self
._entries
.values(), key
=operator
.attrgetter('version'))
1253 if len(entries
) < 1:
1254 raise ClientNotConnectedError
1255 return entries
[-1].version
+ 1
1257 def _CreateSyncedNotificationEntitySpecifics(self
, unique_id
,
1258 serialized_notification
):
1259 """Create the EntitySpecifics proto for a synced notification."""
1260 coalesced
= synced_notification_data_pb2
.CoalescedSyncedNotification()
1261 google
.protobuf
.text_format
.Merge(serialized_notification
, coalesced
)
1263 # Override the provided key so that we have a unique one.
1264 coalesced
.key
= unique_id
1266 specifics
= sync_pb2
.EntitySpecifics()
1267 notification_specifics
= \
1268 synced_notification_specifics_pb2
.SyncedNotificationSpecifics()
1269 notification_specifics
.coalesced_notification
.CopyFrom(coalesced
)
1270 specifics
.synced_notification
.CopyFrom(notification_specifics
)
1274 def _CreateSyncedNotificationClientTag(self
, key
):
1275 """Create the client_defined_unique_tag value for a SyncedNotification.
1278 key: The entity used to create the client tag.
1281 The string value of the to be used as the client_defined_unique_tag.
1283 serialized_type
= sync_pb2
.EntitySpecifics()
1284 specifics
= synced_notification_specifics_pb2
.SyncedNotificationSpecifics()
1285 serialized_type
.synced_notification
.CopyFrom(specifics
)
1286 hash_input
= serialized_type
.SerializeToString() + key
1287 return base64
.b64encode(hashlib
.sha1(hash_input
).digest())
1289 def AddSyncedNotificationAppInfo(self
, app_info
):
1290 """Adds an app info struct to the server data.
1292 The notification will be delivered to the client on the next GetUpdates
1296 app_info: A serialized AppInfo.
1299 The string representation of the added SyncEntity.
1302 ClientNotConnectedError: if the client has not yet connected to this
1305 specifics
= self
._CreateSyncedNotificationAppInfoEntitySpecifics
(app_info
)
1307 # Create the root SyncEntity representing a single app info protobuf.
1308 entity
= sync_pb2
.SyncEntity()
1309 entity
.specifics
.CopyFrom(specifics
)
1310 entity
.parent_id_string
= self
._ServerTagToId
(
1311 'google_chrome_synced_notification_app_info')
1312 entity
.name
= 'App info added for testing'
1313 entity
.version
= self
._GetNextVersionNumber
()
1315 # App Infos do not have a strong id, it only needs to be unique.
1316 entity
.client_defined_unique_tag
= "foo"
1317 entity
.id_string
= "foo"
1319 self
._entries
[entity
.id_string
] = copy
.deepcopy(entity
)
1321 print "entity before exit is ", entity
1323 return google
.protobuf
.text_format
.MessageToString(entity
)
1325 def _CreateSyncedNotificationAppInfoEntitySpecifics(
1326 self
, synced_notification_app_info
):
1327 """Create the EntitySpecifics proto for a synced notification app info."""
1328 # Create a single, empty app_info object
1330 synced_notification_app_info_specifics_pb2
.SyncedNotificationAppInfo()
1331 # Fill the app_info object from the text format protobuf.
1332 google
.protobuf
.text_format
.Merge(synced_notification_app_info
, app_info
)
1334 # Create a new specifics object with a contained app_info
1335 specifics
= sync_pb2
.EntitySpecifics()
1336 app_info_specifics
= \
1337 synced_notification_app_info_specifics_pb2
.\
1338 SyncedNotificationAppInfoSpecifics()
1340 # Copy the app info from the text format protobuf
1341 contained_app_info
= app_info_specifics
.synced_notification_app_info
.add()
1342 contained_app_info
.CopyFrom(app_info
)
1344 # And put the new app_info_specifics into the specifics before returning.
1345 specifics
.synced_notification_app_info
.CopyFrom(app_info_specifics
)
1349 class TestServer(object):
1350 """An object to handle requests for one (and only one) Chrome Sync account.
1352 TestServer consumes the sync command messages that are the outermost
1353 layers of the protocol, performs the corresponding actions on its
1354 SyncDataModel, and constructs an appropriate response message.
1358 # The implementation supports exactly one account; its state is here.
1359 self
.account
= SyncDataModel()
1360 self
.account_lock
= threading
.Lock()
1361 # Clients that have talked to us: a map from the full client ID
1364 self
.client_name_generator
= ('+' * times
+ chr(c
)
1365 for times
in xrange(0, sys
.maxint
) for c
in xrange(ord('A'), ord('Z')))
1366 self
.transient_error
= False
1368 # Gaia OAuth2 Token fields and their default values.
1369 self
.response_code
= 200
1370 self
.request_token
= 'rt1'
1371 self
.access_token
= 'at1'
1372 self
.expires_in
= 3600
1373 self
.token_type
= 'Bearer'
1374 # The ClientCommand to send back on each ServerToClientResponse. If set to
1375 # None, no ClientCommand should be sent.
1376 self
._client
_command
= None
1379 def GetShortClientName(self
, query
):
1380 parsed
= cgi
.parse_qs(query
[query
.find('?')+1:])
1381 client_id
= parsed
.get('client_id')
1384 client_id
= client_id
[0]
1385 if client_id
not in self
.clients
:
1386 self
.clients
[client_id
] = self
.client_name_generator
.next()
1387 return self
.clients
[client_id
]
1389 def CheckStoreBirthday(self
, request
):
1390 """Raises StoreBirthdayError if the request's birthday is a mismatch."""
1391 if not request
.HasField('store_birthday'):
1393 if self
.account
.StoreBirthday() != request
.store_birthday
:
1394 raise StoreBirthdayError
1396 def CheckTransientError(self
):
1397 """Raises TransientError if transient_error variable is set."""
1398 if self
.transient_error
:
1399 raise TransientError
1401 def CheckSendError(self
):
1402 """Raises SyncInducedError if needed."""
1403 if (self
.account
.induced_error
.error_type
!=
1404 sync_enums_pb2
.SyncEnums
.UNKNOWN
):
1405 # Always means return the given error for all requests.
1406 if self
.account
.induced_error_frequency
== ERROR_FREQUENCY_ALWAYS
:
1407 raise SyncInducedError
1408 # This means the FIRST 2 requests of every 3 requests
1409 # return an error. Don't switch the order of failures. There are
1410 # test cases that rely on the first 2 being the failure rather than
1412 elif (self
.account
.induced_error_frequency
==
1413 ERROR_FREQUENCY_TWO_THIRDS
):
1414 if (((self
.sync_count
-
1415 self
.account
.sync_count_before_errors
) % 3) != 0):
1416 raise SyncInducedError
1418 raise InducedErrorFrequencyNotDefined
1420 def HandleMigrate(self
, path
):
1421 query
= urlparse
.urlparse(path
)[4]
1423 self
.account_lock
.acquire()
1425 datatypes
= [DataTypeStringToSyncTypeLoose(x
)
1426 for x
in urlparse
.parse_qs(query
).get('type',[])]
1428 self
.account
.TriggerMigration(datatypes
)
1429 response
= 'Migrated datatypes %s' % (
1430 ' and '.join(SyncTypeToString(x
).upper() for x
in datatypes
))
1432 response
= 'Please specify one or more <i>type=name</i> parameters'
1434 except DataTypeIdNotRecognized
, error
:
1435 response
= 'Could not interpret datatype name'
1438 self
.account_lock
.release()
1439 return (code
, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
1440 (code
, code
, response
))
1442 def HandleSetInducedError(self
, path
):
1443 query
= urlparse
.urlparse(path
)[4]
1444 self
.account_lock
.acquire()
1446 response
= 'Success'
1447 error
= sync_pb2
.ClientToServerResponse
.Error()
1449 error_type
= urlparse
.parse_qs(query
)['error']
1450 action
= urlparse
.parse_qs(query
)['action']
1451 error
.error_type
= int(error_type
[0])
1452 error
.action
= int(action
[0])
1454 error
.url
= (urlparse
.parse_qs(query
)['url'])[0]
1458 error
.error_description
=(
1459 (urlparse
.parse_qs(query
)['error_description'])[0])
1461 error
.error_description
= ''
1463 error_frequency
= int((urlparse
.parse_qs(query
)['frequency'])[0])
1465 error_frequency
= ERROR_FREQUENCY_ALWAYS
1466 self
.account
.SetInducedError(error
, error_frequency
, self
.sync_count
)
1467 response
= ('Error = %d, action = %d, url = %s, description = %s' %
1468 (error
.error_type
, error
.action
,
1470 error
.error_description
))
1472 response
= 'Could not parse url'
1475 self
.account_lock
.release()
1476 return (code
, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
1477 (code
, code
, response
))
1479 def HandleCreateBirthdayError(self
):
1480 self
.account
.ResetStoreBirthday()
1483 '<html><title>Birthday error</title><H1>Birthday error</H1></html>')
1485 def HandleSetTransientError(self
):
1486 self
.transient_error
= True
1489 '<html><title>Transient error</title><H1>Transient error</H1></html>')
1491 def HandleSetSyncTabFavicons(self
):
1492 """Set 'sync_tab_favicons' field of the nigori node for this account."""
1493 self
.account
.TriggerSyncTabFavicons()
1496 '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
1498 def HandleCreateSyncedBookmarks(self
):
1499 """Create the Synced Bookmarks folder under Bookmarks."""
1500 self
.account
.TriggerCreateSyncedBookmarks()
1503 '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
1505 def HandleEnableKeystoreEncryption(self
):
1506 """Enables the keystore encryption experiment."""
1507 self
.account
.TriggerEnableKeystoreEncryption()
1510 '<html><title>Enable Keystore Encryption</title>'
1511 '<H1>Enable Keystore Encryption</H1></html>')
1513 def HandleRotateKeystoreKeys(self
):
1514 """Rotate the keystore encryption keys."""
1515 self
.account
.TriggerRotateKeystoreKeys()
1518 '<html><title>Rotate Keystore Keys</title>'
1519 '<H1>Rotate Keystore Keys</H1></html>')
1521 def HandleEnableManagedUserAcknowledgement(self
):
1522 """Enable acknowledging newly created managed users."""
1523 self
.account
.acknowledge_managed_users
= True
1526 '<html><title>Enable Managed User Acknowledgement</title>'
1527 '<h1>Enable Managed User Acknowledgement</h1></html>')
1529 def HandleEnablePreCommitGetUpdateAvoidance(self
):
1530 """Enables the pre-commit GU avoidance experiment."""
1531 self
.account
.TriggerEnablePreCommitGetUpdateAvoidance()
1534 '<html><title>Enable pre-commit GU avoidance</title>'
1535 '<H1>Enable pre-commit GU avoidance</H1></html>')
1537 def HandleCommand(self
, query
, raw_request
):
1538 """Decode and handle a sync command from a raw input of bytes.
1540 This is the main entry point for this class. It is safe to call this
1541 method from multiple threads.
1544 raw_request: An iterable byte sequence to be interpreted as a sync
1547 A tuple (response_code, raw_response); the first value is an HTTP
1548 result code, while the second value is a string of bytes which is the
1549 serialized reply to the command.
1551 self
.account_lock
.acquire()
1552 self
.sync_count
+= 1
1553 def print_context(direction
):
1554 print '[Client %s %s %s.py]' % (self
.GetShortClientName(query
), direction
,
1558 request
= sync_pb2
.ClientToServerMessage()
1559 request
.MergeFromString(raw_request
)
1560 contents
= request
.message_contents
1562 response
= sync_pb2
.ClientToServerResponse()
1563 response
.error_code
= sync_enums_pb2
.SyncEnums
.SUCCESS
1565 if self
._client
_command
:
1566 response
.client_command
.CopyFrom(self
._client
_command
)
1568 self
.CheckStoreBirthday(request
)
1569 response
.store_birthday
= self
.account
.store_birthday
1570 self
.CheckTransientError()
1571 self
.CheckSendError()
1575 if contents
== sync_pb2
.ClientToServerMessage
.AUTHENTICATE
:
1576 print 'Authenticate'
1577 # We accept any authentication token, and support only one account.
1578 # TODO(nick): Mock out the GAIA authentication as well; hook up here.
1579 response
.authenticate
.user
.email
= 'syncjuser@chromium'
1580 response
.authenticate
.user
.display_name
= 'Sync J User'
1581 elif contents
== sync_pb2
.ClientToServerMessage
.COMMIT
:
1582 print 'Commit %d item(s)' % len(request
.commit
.entries
)
1583 self
.HandleCommit(request
.commit
, response
.commit
)
1584 elif contents
== sync_pb2
.ClientToServerMessage
.GET_UPDATES
:
1586 self
.HandleGetUpdates(request
.get_updates
, response
.get_updates
)
1588 print '%d update(s)' % len(response
.get_updates
.entries
)
1590 print 'Unrecognizable sync request!'
1591 return (400, None) # Bad request.
1592 return (200, response
.SerializeToString())
1593 except MigrationDoneError
, error
:
1595 print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error
.datatypes
))
1596 response
= sync_pb2
.ClientToServerResponse()
1597 response
.store_birthday
= self
.account
.store_birthday
1598 response
.error_code
= sync_enums_pb2
.SyncEnums
.MIGRATION_DONE
1599 response
.migrated_data_type_id
[:] = [
1600 SyncTypeToProtocolDataTypeId(x
) for x
in error
.datatypes
]
1601 return (200, response
.SerializeToString())
1602 except StoreBirthdayError
, error
:
1604 print 'NOT_MY_BIRTHDAY'
1605 response
= sync_pb2
.ClientToServerResponse()
1606 response
.store_birthday
= self
.account
.store_birthday
1607 response
.error_code
= sync_enums_pb2
.SyncEnums
.NOT_MY_BIRTHDAY
1608 return (200, response
.SerializeToString())
1609 except TransientError
, error
:
1610 ### This is deprecated now. Would be removed once test cases are removed.
1612 print 'TRANSIENT_ERROR'
1613 response
.store_birthday
= self
.account
.store_birthday
1614 response
.error_code
= sync_enums_pb2
.SyncEnums
.TRANSIENT_ERROR
1615 return (200, response
.SerializeToString())
1616 except SyncInducedError
, error
:
1618 print 'INDUCED_ERROR'
1619 response
.store_birthday
= self
.account
.store_birthday
1620 error
= self
.account
.GetInducedError()
1621 response
.error
.error_type
= error
.error_type
1622 response
.error
.url
= error
.url
1623 response
.error
.error_description
= error
.error_description
1624 response
.error
.action
= error
.action
1625 return (200, response
.SerializeToString())
1627 self
.account_lock
.release()
1629 def HandleCommit(self
, commit_message
, commit_response
):
1630 """Respond to a Commit request by updating the user's account state.
1632 Commit attempts stop after the first error, returning a CONFLICT result
1633 for any unattempted entries.
1636 commit_message: A sync_pb.CommitMessage protobuf holding the content
1637 of the client's request.
1638 commit_response: A sync_pb.CommitResponse protobuf into which a reply
1639 to the client request will be written.
1641 commit_response
.SetInParent()
1642 batch_failure
= False
1643 session
= {} # Tracks ID renaming during the commit operation.
1644 guid
= commit_message
.cache_guid
1646 self
.account
.ValidateCommitEntries(commit_message
.entries
)
1648 for entry
in commit_message
.entries
:
1650 if not batch_failure
:
1651 # Try to commit the change to the account.
1652 server_entry
= self
.account
.CommitEntry(entry
, guid
, session
)
1654 # An entryresponse is returned in both success and failure cases.
1655 reply
= commit_response
.entryresponse
.add()
1656 if not server_entry
:
1657 reply
.response_type
= sync_pb2
.CommitResponse
.CONFLICT
1658 reply
.error_message
= 'Conflict.'
1659 batch_failure
= True # One failure halts the batch.
1661 reply
.response_type
= sync_pb2
.CommitResponse
.SUCCESS
1662 # These are the properties that the server is allowed to override
1663 # during commit; the client wants to know their values at the end
1665 reply
.id_string
= server_entry
.id_string
1666 if not server_entry
.deleted
:
1667 # Note: the production server doesn't actually send the
1668 # parent_id_string on commit responses, so we don't either.
1669 reply
.position_in_parent
= server_entry
.position_in_parent
1670 reply
.version
= server_entry
.version
1671 reply
.name
= server_entry
.name
1672 reply
.non_unique_name
= server_entry
.non_unique_name
1674 reply
.version
= entry
.version
+ 1
1676 def HandleGetUpdates(self
, update_request
, update_response
):
1677 """Respond to a GetUpdates request by querying the user's account.
1680 update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
1681 of the client's request.
1682 update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
1683 to the client request will be written.
1685 update_response
.SetInParent()
1686 update_sieve
= UpdateSieve(update_request
, self
.account
.migration_history
)
1688 print CallerInfoToString(update_request
.caller_info
.source
),
1689 print update_sieve
.SummarizeRequest()
1691 update_sieve
.CheckMigrationState()
1693 new_timestamp
, entries
, remaining
= self
.account
.GetChanges(update_sieve
)
1695 update_response
.changes_remaining
= remaining
1696 sending_nigori_node
= False
1697 for entry
in entries
:
1698 if entry
.name
== 'Nigori':
1699 sending_nigori_node
= True
1700 reply
= update_response
.entries
.add()
1701 reply
.CopyFrom(entry
)
1702 update_sieve
.SaveProgress(new_timestamp
, update_response
)
1704 if update_request
.need_encryption_key
or sending_nigori_node
:
1705 update_response
.encryption_keys
.extend(self
.account
.GetKeystoreKeys())
1707 def HandleGetOauth2Token(self
):
1708 return (int(self
.response_code
),
1710 ' \"refresh_token\": \"' + self
.request_token
+ '\",\n'
1711 ' \"access_token\": \"' + self
.access_token
+ '\",\n'
1712 ' \"expires_in\": ' + str(self
.expires_in
) + ',\n'
1713 ' \"token_type\": \"' + self
.token_type
+'\"\n'
1716 def HandleSetOauth2Token(self
, response_code
, request_token
, access_token
,
1717 expires_in
, token_type
):
1718 if response_code
!= 0:
1719 self
.response_code
= response_code
1720 if request_token
!= '':
1721 self
.request_token
= request_token
1722 if access_token
!= '':
1723 self
.access_token
= access_token
1725 self
.expires_in
= expires_in
1726 if token_type
!= '':
1727 self
.token_type
= token_type
1730 '<html><title>Set OAuth2 Token</title>'
1731 '<H1>This server will now return the OAuth2 Token:</H1>'
1732 '<p>response_code: ' + str(self
.response_code
) + '</p>'
1733 '<p>request_token: ' + self
.request_token
+ '</p>'
1734 '<p>access_token: ' + self
.access_token
+ '</p>'
1735 '<p>expires_in: ' + str(self
.expires_in
) + '</p>'
1736 '<p>token_type: ' + self
.token_type
+ '</p>'
1739 def CustomizeClientCommand(self
, sessions_commit_delay_seconds
):
1740 """Customizes the value of the ClientCommand of ServerToClientResponse.
1742 Currently, this only allows for changing the sessions_commit_delay_seconds
1743 field. This is useful for testing in conjunction with
1744 AddSyncedNotification so that synced notifications are seen immediately
1745 after triggering them with an HTTP call to the test server.
1748 sessions_commit_delay_seconds: The desired sync delay time for sessions.
1750 if not self
._client
_command
:
1751 self
._client
_command
= client_commands_pb2
.ClientCommand()
1753 self
._client
_command
.sessions_commit_delay_seconds
= \
1754 sessions_commit_delay_seconds
1755 return self
._client
_command