Updating trunk VERSION from 2139.0 to 2140.0
[chromium-blink-merge.git] / sync / tools / testserver / chromiumsync.py
blob293005957f11651a24a5f932fc382136d09a03ba
1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """An implementation of the server side of the Chromium sync protocol.
7 The details of the protocol are described mostly by comments in the protocol
8 buffer definition at chrome/browser/sync/protocol/sync.proto.
9 """
11 import base64
12 import cgi
13 import copy
14 import google.protobuf.text_format
15 import hashlib
16 import operator
17 import pickle
18 import random
19 import string
20 import sys
21 import threading
22 import time
23 import urlparse
24 import uuid
26 import app_list_specifics_pb2
27 import app_notification_specifics_pb2
28 import app_setting_specifics_pb2
29 import app_specifics_pb2
30 import article_specifics_pb2
31 import autofill_specifics_pb2
32 import bookmark_specifics_pb2
33 import client_commands_pb2
34 import dictionary_specifics_pb2
35 import get_updates_caller_info_pb2
36 import extension_setting_specifics_pb2
37 import extension_specifics_pb2
38 import favicon_image_specifics_pb2
39 import favicon_tracking_specifics_pb2
40 import history_delete_directive_specifics_pb2
41 import managed_user_setting_specifics_pb2
42 import managed_user_specifics_pb2
43 import managed_user_shared_setting_specifics_pb2
44 import nigori_specifics_pb2
45 import password_specifics_pb2
46 import preference_specifics_pb2
47 import priority_preference_specifics_pb2
48 import search_engine_specifics_pb2
49 import session_specifics_pb2
50 import sync_pb2
51 import sync_enums_pb2
52 import synced_notification_app_info_specifics_pb2
53 import synced_notification_data_pb2
54 import synced_notification_render_pb2
55 import synced_notification_specifics_pb2
56 import theme_specifics_pb2
57 import typed_url_specifics_pb2
59 # An enumeration of the various kinds of data that can be synced.
60 # Over the wire, this enumeration is not used: a sync object's type is
61 # inferred by which EntitySpecifics field it has. But in the context
62 # of a program, it is useful to have an enumeration.
63 ALL_TYPES = (
64 TOP_LEVEL, # The type of the 'Google Chrome' folder.
65 APPS,
66 APP_LIST,
67 APP_NOTIFICATION,
68 APP_SETTINGS,
69 ARTICLE,
70 AUTOFILL,
71 AUTOFILL_PROFILE,
72 BOOKMARK,
73 DEVICE_INFO,
74 DICTIONARY,
75 EXPERIMENTS,
76 EXTENSIONS,
77 HISTORY_DELETE_DIRECTIVE,
78 MANAGED_USER_SETTING,
79 MANAGED_USER_SHARED_SETTING,
80 MANAGED_USER,
81 NIGORI,
82 PASSWORD,
83 PREFERENCE,
84 PRIORITY_PREFERENCE,
85 SEARCH_ENGINE,
86 SESSION,
87 SYNCED_NOTIFICATION,
88 SYNCED_NOTIFICATION_APP_INFO,
89 THEME,
90 TYPED_URL,
91 EXTENSION_SETTINGS,
92 FAVICON_IMAGES,
93 FAVICON_TRACKING) = range(30)
95 # An enumeration on the frequency at which the server should send errors
96 # to the client. This would be specified by the url that triggers the error.
97 # Note: This enum should be kept in the same order as the enum in sync_test.h.
98 SYNC_ERROR_FREQUENCY = (
99 ERROR_FREQUENCY_NONE,
100 ERROR_FREQUENCY_ALWAYS,
101 ERROR_FREQUENCY_TWO_THIRDS) = range(3)
103 # Well-known server tag of the top level 'Google Chrome' folder.
104 TOP_LEVEL_FOLDER_TAG = 'google_chrome'
106 # Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
107 # to that datatype. Note that TOP_LEVEL has no such token.
108 SYNC_TYPE_FIELDS = sync_pb2.EntitySpecifics.DESCRIPTOR.fields_by_name
109 SYNC_TYPE_TO_DESCRIPTOR = {
110 APP_LIST: SYNC_TYPE_FIELDS['app_list'],
111 APP_NOTIFICATION: SYNC_TYPE_FIELDS['app_notification'],
112 APP_SETTINGS: SYNC_TYPE_FIELDS['app_setting'],
113 APPS: SYNC_TYPE_FIELDS['app'],
114 ARTICLE: SYNC_TYPE_FIELDS['article'],
115 AUTOFILL: SYNC_TYPE_FIELDS['autofill'],
116 AUTOFILL_PROFILE: SYNC_TYPE_FIELDS['autofill_profile'],
117 BOOKMARK: SYNC_TYPE_FIELDS['bookmark'],
118 DEVICE_INFO: SYNC_TYPE_FIELDS['device_info'],
119 DICTIONARY: SYNC_TYPE_FIELDS['dictionary'],
120 EXPERIMENTS: SYNC_TYPE_FIELDS['experiments'],
121 EXTENSION_SETTINGS: SYNC_TYPE_FIELDS['extension_setting'],
122 EXTENSIONS: SYNC_TYPE_FIELDS['extension'],
123 FAVICON_IMAGES: SYNC_TYPE_FIELDS['favicon_image'],
124 FAVICON_TRACKING: SYNC_TYPE_FIELDS['favicon_tracking'],
125 HISTORY_DELETE_DIRECTIVE: SYNC_TYPE_FIELDS['history_delete_directive'],
126 MANAGED_USER_SHARED_SETTING:
127 SYNC_TYPE_FIELDS['managed_user_shared_setting'],
128 MANAGED_USER_SETTING: SYNC_TYPE_FIELDS['managed_user_setting'],
129 MANAGED_USER: SYNC_TYPE_FIELDS['managed_user'],
130 NIGORI: SYNC_TYPE_FIELDS['nigori'],
131 PASSWORD: SYNC_TYPE_FIELDS['password'],
132 PREFERENCE: SYNC_TYPE_FIELDS['preference'],
133 PRIORITY_PREFERENCE: SYNC_TYPE_FIELDS['priority_preference'],
134 SEARCH_ENGINE: SYNC_TYPE_FIELDS['search_engine'],
135 SESSION: SYNC_TYPE_FIELDS['session'],
136 SYNCED_NOTIFICATION: SYNC_TYPE_FIELDS["synced_notification"],
137 SYNCED_NOTIFICATION_APP_INFO:
138 SYNC_TYPE_FIELDS["synced_notification_app_info"],
139 THEME: SYNC_TYPE_FIELDS['theme'],
140 TYPED_URL: SYNC_TYPE_FIELDS['typed_url'],
143 # The parent ID used to indicate a top-level node.
144 ROOT_ID = '0'
146 # Unix time epoch +1 day in struct_time format. The tuple corresponds to
147 # UTC Thursday Jan 2 1970, 00:00:00, non-dst.
148 # We have to add one day after start of epoch, since in timezones with positive
149 # UTC offset time.mktime throws an OverflowError,
150 # rather then returning negative number.
151 FIRST_DAY_UNIX_TIME_EPOCH = (1970, 1, 2, 0, 0, 0, 4, 2, 0)
152 ONE_DAY_SECONDS = 60 * 60 * 24
154 # The number of characters in the server-generated encryption key.
155 KEYSTORE_KEY_LENGTH = 16
157 # The hashed client tags for some experiment nodes.
158 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG = "pis8ZRzh98/MKLtVEio2mr42LQA="
159 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG = "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
161 class Error(Exception):
162 """Error class for this module."""
165 class ProtobufDataTypeFieldNotUnique(Error):
166 """An entry should not have more than one data type present."""
169 class DataTypeIdNotRecognized(Error):
170 """The requested data type is not recognized."""
173 class MigrationDoneError(Error):
174 """A server-side migration occurred; clients must re-sync some datatypes.
176 Attributes:
177 datatypes: a list of the datatypes (python enum) needing migration.
180 def __init__(self, datatypes):
181 self.datatypes = datatypes
184 class StoreBirthdayError(Error):
185 """The client sent a birthday that doesn't correspond to this server."""
188 class TransientError(Error):
189 """The client would be sent a transient error."""
192 class SyncInducedError(Error):
193 """The client would be sent an error."""
196 class InducedErrorFrequencyNotDefined(Error):
197 """The error frequency defined is not handled."""
200 class ClientNotConnectedError(Error):
201 """The client is not connected to the server."""
204 def GetEntryType(entry):
205 """Extract the sync type from a SyncEntry.
207 Args:
208 entry: A SyncEntity protobuf object whose type to determine.
209 Returns:
210 An enum value from ALL_TYPES if the entry's type can be determined, or None
211 if the type cannot be determined.
212 Raises:
213 ProtobufDataTypeFieldNotUnique: More than one type was indicated by
214 the entry.
216 if entry.server_defined_unique_tag == TOP_LEVEL_FOLDER_TAG:
217 return TOP_LEVEL
218 entry_types = GetEntryTypesFromSpecifics(entry.specifics)
219 if not entry_types:
220 return None
222 # If there is more than one, either there's a bug, or else the caller
223 # should use GetEntryTypes.
224 if len(entry_types) > 1:
225 raise ProtobufDataTypeFieldNotUnique
226 return entry_types[0]
229 def GetEntryTypesFromSpecifics(specifics):
230 """Determine the sync types indicated by an EntitySpecifics's field(s).
232 If the specifics have more than one recognized data type field (as commonly
233 happens with the requested_types field of GetUpdatesMessage), all types
234 will be returned. Callers must handle the possibility of the returned
235 value having more than one item.
237 Args:
238 specifics: A EntitySpecifics protobuf message whose extensions to
239 enumerate.
240 Returns:
241 A list of the sync types (values from ALL_TYPES) associated with each
242 recognized extension of the specifics message.
244 return [data_type for data_type, field_descriptor
245 in SYNC_TYPE_TO_DESCRIPTOR.iteritems()
246 if specifics.HasField(field_descriptor.name)]
249 def SyncTypeToProtocolDataTypeId(data_type):
250 """Convert from a sync type (python enum) to the protocol's data type id."""
251 return SYNC_TYPE_TO_DESCRIPTOR[data_type].number
254 def ProtocolDataTypeIdToSyncType(protocol_data_type_id):
255 """Convert from the protocol's data type id to a sync type (python enum)."""
256 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
257 if field_descriptor.number == protocol_data_type_id:
258 return data_type
259 raise DataTypeIdNotRecognized
262 def DataTypeStringToSyncTypeLoose(data_type_string):
263 """Converts a human-readable string to a sync type (python enum).
265 Capitalization and pluralization don't matter; this function is appropriate
266 for values that might have been typed by a human being; e.g., command-line
267 flags or query parameters.
269 if data_type_string.isdigit():
270 return ProtocolDataTypeIdToSyncType(int(data_type_string))
271 name = data_type_string.lower().rstrip('s')
272 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
273 if field_descriptor.name.lower().rstrip('s') == name:
274 return data_type
275 raise DataTypeIdNotRecognized
278 def MakeNewKeystoreKey():
279 """Returns a new random keystore key."""
280 return ''.join(random.choice(string.ascii_uppercase + string.digits)
281 for x in xrange(KEYSTORE_KEY_LENGTH))
284 def SyncTypeToString(data_type):
285 """Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
286 return SYNC_TYPE_TO_DESCRIPTOR[data_type].name
289 def CallerInfoToString(caller_info_source):
290 """Formats a GetUpdatesSource enum value to a readable string."""
291 return get_updates_caller_info_pb2.GetUpdatesCallerInfo \
292 .DESCRIPTOR.enum_types_by_name['GetUpdatesSource'] \
293 .values_by_number[caller_info_source].name
296 def ShortDatatypeListSummary(data_types):
297 """Formats compactly a list of sync types (python enums) for human eyes.
299 This function is intended for use by logging. If the list of datatypes
300 contains almost all of the values, the return value will be expressed
301 in terms of the datatypes that aren't set.
303 included = set(data_types) - set([TOP_LEVEL])
304 if not included:
305 return 'nothing'
306 excluded = set(ALL_TYPES) - included - set([TOP_LEVEL])
307 if not excluded:
308 return 'everything'
309 simple_text = '+'.join(sorted([SyncTypeToString(x) for x in included]))
310 all_but_text = 'all except %s' % (
311 '+'.join(sorted([SyncTypeToString(x) for x in excluded])))
312 if len(included) < len(excluded) or len(simple_text) <= len(all_but_text):
313 return simple_text
314 else:
315 return all_but_text
318 def GetDefaultEntitySpecifics(data_type):
319 """Get an EntitySpecifics having a sync type's default field value."""
320 specifics = sync_pb2.EntitySpecifics()
321 if data_type in SYNC_TYPE_TO_DESCRIPTOR:
322 descriptor = SYNC_TYPE_TO_DESCRIPTOR[data_type]
323 getattr(specifics, descriptor.name).SetInParent()
324 return specifics
327 class PermanentItem(object):
328 """A specification of one server-created permanent item.
330 Attributes:
331 tag: A known-to-the-client value that uniquely identifies a server-created
332 permanent item.
333 name: The human-readable display name for this item.
334 parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates
335 a top-level item. Otherwise, this must be the tag value of some other
336 server-created permanent item.
337 sync_type: A value from ALL_TYPES, giving the datatype of this permanent
338 item. This controls which types of client GetUpdates requests will
339 cause the permanent item to be created and returned.
340 create_by_default: Whether the permanent item is created at startup or not.
341 This value is set to True in the default case. Non-default permanent items
342 are those that are created only when a client explicitly tells the server
343 to do so.
346 def __init__(self, tag, name, parent_tag, sync_type, create_by_default=True):
347 self.tag = tag
348 self.name = name
349 self.parent_tag = parent_tag
350 self.sync_type = sync_type
351 self.create_by_default = create_by_default
354 class MigrationHistory(object):
355 """A record of the migration events associated with an account.
357 Each migration event invalidates one or more datatypes on all clients
358 that had synced the datatype before the event. Such clients will continue
359 to receive MigrationDone errors until they throw away their progress and
360 re-sync that datatype from the beginning.
362 def __init__(self):
363 self._migrations = {}
364 for datatype in ALL_TYPES:
365 self._migrations[datatype] = [1]
366 self._next_migration_version = 2
368 def GetLatestVersion(self, datatype):
369 return self._migrations[datatype][-1]
371 def CheckAllCurrent(self, versions_map):
372 """Raises an error if any the provided versions are out of date.
374 This function intentionally returns migrations in the order that they were
375 triggered. Doing it this way allows the client to queue up two migrations
376 in a row, so the second one is received while responding to the first.
378 Arguments:
379 version_map: a map whose keys are datatypes and whose values are versions.
381 Raises:
382 MigrationDoneError: if a mismatch is found.
384 problems = {}
385 for datatype, client_migration in versions_map.iteritems():
386 for server_migration in self._migrations[datatype]:
387 if client_migration < server_migration:
388 problems.setdefault(server_migration, []).append(datatype)
389 if problems:
390 raise MigrationDoneError(problems[min(problems.keys())])
392 def Bump(self, datatypes):
393 """Add a record of a migration, to cause errors on future requests."""
394 for idx, datatype in enumerate(datatypes):
395 self._migrations[datatype].append(self._next_migration_version)
396 self._next_migration_version += 1
399 class UpdateSieve(object):
400 """A filter to remove items the client has already seen."""
401 def __init__(self, request, migration_history=None):
402 self._original_request = request
403 self._state = {}
404 self._migration_history = migration_history or MigrationHistory()
405 self._migration_versions_to_check = {}
406 if request.from_progress_marker:
407 for marker in request.from_progress_marker:
408 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
409 if marker.HasField('timestamp_token_for_migration'):
410 timestamp = marker.timestamp_token_for_migration
411 if timestamp:
412 self._migration_versions_to_check[data_type] = 1
413 elif marker.token:
414 (timestamp, version) = pickle.loads(marker.token)
415 self._migration_versions_to_check[data_type] = version
416 elif marker.HasField('token'):
417 timestamp = 0
418 else:
419 raise ValueError('No timestamp information in progress marker.')
420 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
421 self._state[data_type] = timestamp
422 elif request.HasField('from_timestamp'):
423 for data_type in GetEntryTypesFromSpecifics(request.requested_types):
424 self._state[data_type] = request.from_timestamp
425 self._migration_versions_to_check[data_type] = 1
426 if self._state:
427 self._state[TOP_LEVEL] = min(self._state.itervalues())
429 def SummarizeRequest(self):
430 timestamps = {}
431 for data_type, timestamp in self._state.iteritems():
432 if data_type == TOP_LEVEL:
433 continue
434 timestamps.setdefault(timestamp, []).append(data_type)
435 return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types), stamp)
436 for stamp, types in sorted(timestamps.iteritems()))
438 def CheckMigrationState(self):
439 self._migration_history.CheckAllCurrent(self._migration_versions_to_check)
441 def ClientWantsItem(self, item):
442 """Return true if the client hasn't already seen an item."""
443 return self._state.get(GetEntryType(item), sys.maxint) < item.version
445 def HasAnyTimestamp(self):
446 """Return true if at least one datatype was requested."""
447 return bool(self._state)
449 def GetMinTimestamp(self):
450 """Return true the smallest timestamp requested across all datatypes."""
451 return min(self._state.itervalues())
453 def GetFirstTimeTypes(self):
454 """Return a list of datatypes requesting updates from timestamp zero."""
455 return [datatype for datatype, timestamp in self._state.iteritems()
456 if timestamp == 0]
458 def GetCreateMobileBookmarks(self):
459 """Return true if the client has requested to create the 'Mobile Bookmarks'
460 folder.
462 return (self._original_request.HasField('create_mobile_bookmarks_folder')
463 and self._original_request.create_mobile_bookmarks_folder)
465 def SaveProgress(self, new_timestamp, get_updates_response):
466 """Write the new_timestamp or new_progress_marker fields to a response."""
467 if self._original_request.from_progress_marker:
468 for data_type, old_timestamp in self._state.iteritems():
469 if data_type == TOP_LEVEL:
470 continue
471 new_marker = sync_pb2.DataTypeProgressMarker()
472 new_marker.data_type_id = SyncTypeToProtocolDataTypeId(data_type)
473 final_stamp = max(old_timestamp, new_timestamp)
474 final_migration = self._migration_history.GetLatestVersion(data_type)
475 new_marker.token = pickle.dumps((final_stamp, final_migration))
476 get_updates_response.new_progress_marker.add().MergeFrom(new_marker)
477 elif self._original_request.HasField('from_timestamp'):
478 if self._original_request.from_timestamp < new_timestamp:
479 get_updates_response.new_timestamp = new_timestamp
482 class SyncDataModel(object):
483 """Models the account state of one sync user."""
484 _BATCH_SIZE = 100
486 # Specify all the permanent items that a model might need.
487 _PERMANENT_ITEM_SPECS = [
488 PermanentItem('google_chrome_apps', name='Apps',
489 parent_tag=ROOT_ID, sync_type=APPS),
490 PermanentItem('google_chrome_app_list', name='App List',
491 parent_tag=ROOT_ID, sync_type=APP_LIST),
492 PermanentItem('google_chrome_app_notifications', name='App Notifications',
493 parent_tag=ROOT_ID, sync_type=APP_NOTIFICATION),
494 PermanentItem('google_chrome_app_settings',
495 name='App Settings',
496 parent_tag=ROOT_ID, sync_type=APP_SETTINGS),
497 PermanentItem('google_chrome_bookmarks', name='Bookmarks',
498 parent_tag=ROOT_ID, sync_type=BOOKMARK),
499 PermanentItem('bookmark_bar', name='Bookmark Bar',
500 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
501 PermanentItem('other_bookmarks', name='Other Bookmarks',
502 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
503 PermanentItem('synced_bookmarks', name='Synced Bookmarks',
504 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK,
505 create_by_default=False),
506 PermanentItem('google_chrome_autofill', name='Autofill',
507 parent_tag=ROOT_ID, sync_type=AUTOFILL),
508 PermanentItem('google_chrome_autofill_profiles', name='Autofill Profiles',
509 parent_tag=ROOT_ID, sync_type=AUTOFILL_PROFILE),
510 PermanentItem('google_chrome_device_info', name='Device Info',
511 parent_tag=ROOT_ID, sync_type=DEVICE_INFO),
512 PermanentItem('google_chrome_experiments', name='Experiments',
513 parent_tag=ROOT_ID, sync_type=EXPERIMENTS),
514 PermanentItem('google_chrome_extension_settings',
515 name='Extension Settings',
516 parent_tag=ROOT_ID, sync_type=EXTENSION_SETTINGS),
517 PermanentItem('google_chrome_extensions', name='Extensions',
518 parent_tag=ROOT_ID, sync_type=EXTENSIONS),
519 PermanentItem('google_chrome_history_delete_directives',
520 name='History Delete Directives',
521 parent_tag=ROOT_ID,
522 sync_type=HISTORY_DELETE_DIRECTIVE),
523 PermanentItem('google_chrome_favicon_images',
524 name='Favicon Images',
525 parent_tag=ROOT_ID,
526 sync_type=FAVICON_IMAGES),
527 PermanentItem('google_chrome_favicon_tracking',
528 name='Favicon Tracking',
529 parent_tag=ROOT_ID,
530 sync_type=FAVICON_TRACKING),
531 PermanentItem('google_chrome_managed_user_settings',
532 name='Managed User Settings',
533 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SETTING),
534 PermanentItem('google_chrome_managed_users',
535 name='Managed Users',
536 parent_tag=ROOT_ID, sync_type=MANAGED_USER),
537 PermanentItem('google_chrome_managed_user_shared_settings',
538 name='Managed User Shared Settings',
539 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SHARED_SETTING),
540 PermanentItem('google_chrome_nigori', name='Nigori',
541 parent_tag=ROOT_ID, sync_type=NIGORI),
542 PermanentItem('google_chrome_passwords', name='Passwords',
543 parent_tag=ROOT_ID, sync_type=PASSWORD),
544 PermanentItem('google_chrome_preferences', name='Preferences',
545 parent_tag=ROOT_ID, sync_type=PREFERENCE),
546 PermanentItem('google_chrome_priority_preferences',
547 name='Priority Preferences',
548 parent_tag=ROOT_ID, sync_type=PRIORITY_PREFERENCE),
549 PermanentItem('google_chrome_synced_notifications',
550 name='Synced Notifications',
551 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION),
552 PermanentItem('google_chrome_synced_notification_app_info',
553 name='Synced Notification App Info',
554 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION_APP_INFO),
555 PermanentItem('google_chrome_search_engines', name='Search Engines',
556 parent_tag=ROOT_ID, sync_type=SEARCH_ENGINE),
557 PermanentItem('google_chrome_sessions', name='Sessions',
558 parent_tag=ROOT_ID, sync_type=SESSION),
559 PermanentItem('google_chrome_themes', name='Themes',
560 parent_tag=ROOT_ID, sync_type=THEME),
561 PermanentItem('google_chrome_typed_urls', name='Typed URLs',
562 parent_tag=ROOT_ID, sync_type=TYPED_URL),
563 PermanentItem('google_chrome_dictionary', name='Dictionary',
564 parent_tag=ROOT_ID, sync_type=DICTIONARY),
565 PermanentItem('google_chrome_articles', name='Articles',
566 parent_tag=ROOT_ID, sync_type=ARTICLE),
569 def __init__(self):
570 # Monotonically increasing version number. The next object change will
571 # take on this value + 1.
572 self._version = 0
574 # The definitive copy of this client's items: a map from ID string to a
575 # SyncEntity protocol buffer.
576 self._entries = {}
578 self.ResetStoreBirthday()
579 self.migration_history = MigrationHistory()
580 self.induced_error = sync_pb2.ClientToServerResponse.Error()
581 self.induced_error_frequency = 0
582 self.sync_count_before_errors = 0
583 self.acknowledge_managed_users = False
584 self._keys = [MakeNewKeystoreKey()]
586 def _SaveEntry(self, entry):
587 """Insert or update an entry in the change log, and give it a new version.
589 The ID fields of this entry are assumed to be valid server IDs. This
590 entry will be updated with a new version number and sync_timestamp.
592 Args:
593 entry: The entry to be added or updated.
595 self._version += 1
596 # Maintain a global (rather than per-item) sequence number and use it
597 # both as the per-entry version as well as the update-progress timestamp.
598 # This simulates the behavior of the original server implementation.
599 entry.version = self._version
600 entry.sync_timestamp = self._version
602 # Preserve the originator info, which the client is not required to send
603 # when updating.
604 base_entry = self._entries.get(entry.id_string)
605 if base_entry:
606 entry.originator_cache_guid = base_entry.originator_cache_guid
607 entry.originator_client_item_id = base_entry.originator_client_item_id
609 self._entries[entry.id_string] = copy.deepcopy(entry)
611 def _ServerTagToId(self, tag):
612 """Determine the server ID from a server-unique tag.
614 The resulting value is guaranteed not to collide with the other ID
615 generation methods.
617 Args:
618 tag: The unique, known-to-the-client tag of a server-generated item.
619 Returns:
620 The string value of the computed server ID.
622 if not tag or tag == ROOT_ID:
623 return tag
624 spec = [x for x in self._PERMANENT_ITEM_SPECS if x.tag == tag][0]
625 return self._MakeCurrentId(spec.sync_type, '<server tag>%s' % tag)
627 def _TypeToTypeRootId(self, model_type):
628 """Returns the server ID for the type root node of the given type."""
629 tag = [x.tag for x in self._PERMANENT_ITEM_SPECS
630 if x.sync_type == model_type][0]
631 return self._ServerTagToId(tag)
633 def _ClientTagToId(self, datatype, tag):
634 """Determine the server ID from a client-unique tag.
636 The resulting value is guaranteed not to collide with the other ID
637 generation methods.
639 Args:
640 datatype: The sync type (python enum) of the identified object.
641 tag: The unique, opaque-to-the-server tag of a client-tagged item.
642 Returns:
643 The string value of the computed server ID.
645 return self._MakeCurrentId(datatype, '<client tag>%s' % tag)
647 def _ClientIdToId(self, datatype, client_guid, client_item_id):
648 """Compute a unique server ID from a client-local ID tag.
650 The resulting value is guaranteed not to collide with the other ID
651 generation methods.
653 Args:
654 datatype: The sync type (python enum) of the identified object.
655 client_guid: A globally unique ID that identifies the client which
656 created this item.
657 client_item_id: An ID that uniquely identifies this item on the client
658 which created it.
659 Returns:
660 The string value of the computed server ID.
662 # Using the client ID info is not required here (we could instead generate
663 # a random ID), but it's useful for debugging.
664 return self._MakeCurrentId(datatype,
665 '<server ID originally>%s/%s' % (client_guid, client_item_id))
667 def _MakeCurrentId(self, datatype, inner_id):
668 return '%d^%d^%s' % (datatype,
669 self.migration_history.GetLatestVersion(datatype),
670 inner_id)
672 def _ExtractIdInfo(self, id_string):
673 if not id_string or id_string == ROOT_ID:
674 return None
675 datatype_string, separator, remainder = id_string.partition('^')
676 migration_version_string, separator, inner_id = remainder.partition('^')
677 return (int(datatype_string), int(migration_version_string), inner_id)
679 def _WritePosition(self, entry, parent_id):
680 """Ensure the entry has an absolute, numeric position and parent_id.
682 Historically, clients would specify positions using the predecessor-based
683 references in the insert_after_item_id field; starting July 2011, this
684 was changed and Chrome now sends up the absolute position. The server
685 must store a position_in_parent value and must not maintain
686 insert_after_item_id.
687 Starting in Jan 2013, the client will also send up a unique_position field
688 which should be saved and returned on subsequent GetUpdates.
690 Args:
691 entry: The entry for which to write a position. Its ID field are
692 assumed to be server IDs. This entry will have its parent_id_string,
693 position_in_parent and unique_position fields updated; its
694 insert_after_item_id field will be cleared.
695 parent_id: The ID of the entry intended as the new parent.
698 entry.parent_id_string = parent_id
699 if not entry.HasField('position_in_parent'):
700 entry.position_in_parent = 1337 # A debuggable, distinctive default.
701 entry.ClearField('insert_after_item_id')
703 def _ItemExists(self, id_string):
704 """Determine whether an item exists in the changelog."""
705 return id_string in self._entries
707 def _CreatePermanentItem(self, spec):
708 """Create one permanent item from its spec, if it doesn't exist.
710 The resulting item is added to the changelog.
712 Args:
713 spec: A PermanentItem object holding the properties of the item to create.
715 id_string = self._ServerTagToId(spec.tag)
716 if self._ItemExists(id_string):
717 return
718 print 'Creating permanent item: %s' % spec.name
719 entry = sync_pb2.SyncEntity()
720 entry.id_string = id_string
721 entry.non_unique_name = spec.name
722 entry.name = spec.name
723 entry.server_defined_unique_tag = spec.tag
724 entry.folder = True
725 entry.deleted = False
726 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
727 self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
728 self._SaveEntry(entry)
730 def _CreateDefaultPermanentItems(self, requested_types):
731 """Ensure creation of all default permanent items for a given set of types.
733 Args:
734 requested_types: A list of sync data types from ALL_TYPES.
735 All default permanent items of only these types will be created.
737 for spec in self._PERMANENT_ITEM_SPECS:
738 if spec.sync_type in requested_types and spec.create_by_default:
739 self._CreatePermanentItem(spec)
741 def ResetStoreBirthday(self):
742 """Resets the store birthday to a random value."""
743 # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
744 self.store_birthday = '%0.30f' % random.random()
746 def StoreBirthday(self):
747 """Gets the store birthday."""
748 return self.store_birthday
750 def GetChanges(self, sieve):
751 """Get entries which have changed, oldest first.
753 The returned entries are limited to being _BATCH_SIZE many. The entries
754 are returned in strict version order.
756 Args:
757 sieve: An update sieve to use to filter out updates the client
758 has already seen.
759 Returns:
760 A tuple of (version, entries, changes_remaining). Version is a new
761 timestamp value, which should be used as the starting point for the
762 next query. Entries is the batch of entries meeting the current
763 timestamp query. Changes_remaining indicates the number of changes
764 left on the server after this batch.
766 if not sieve.HasAnyTimestamp():
767 return (0, [], 0)
768 min_timestamp = sieve.GetMinTimestamp()
769 first_time_types = sieve.GetFirstTimeTypes()
770 self._CreateDefaultPermanentItems(first_time_types)
771 # Mobile bookmark folder is not created by default, create it only when
772 # client requested it.
773 if (sieve.GetCreateMobileBookmarks() and
774 first_time_types.count(BOOKMARK) > 0):
775 self.TriggerCreateSyncedBookmarks()
777 self.TriggerAcknowledgeManagedUsers()
779 change_log = sorted(self._entries.values(),
780 key=operator.attrgetter('version'))
781 new_changes = [x for x in change_log if x.version > min_timestamp]
782 # Pick batch_size new changes, and then filter them. This matches
783 # the RPC behavior of the production sync server.
784 batch = new_changes[:self._BATCH_SIZE]
785 if not batch:
786 # Client is up to date.
787 return (min_timestamp, [], 0)
789 # Restrict batch to requested types. Tombstones are untyped
790 # and will always get included.
791 filtered = [copy.deepcopy(item) for item in batch
792 if item.deleted or sieve.ClientWantsItem(item)]
794 # The new client timestamp is the timestamp of the last item in the
795 # batch, even if that item was filtered out.
796 return (batch[-1].version, filtered, len(new_changes) - len(batch))
798 def GetKeystoreKeys(self):
799 """Returns the encryption keys for this account."""
800 print "Returning encryption keys: %s" % self._keys
801 return self._keys
803 def _CopyOverImmutableFields(self, entry):
804 """Preserve immutable fields by copying pre-commit state.
806 Args:
807 entry: A sync entity from the client.
809 if entry.id_string in self._entries:
810 if self._entries[entry.id_string].HasField(
811 'server_defined_unique_tag'):
812 entry.server_defined_unique_tag = (
813 self._entries[entry.id_string].server_defined_unique_tag)
815 def _CheckVersionForCommit(self, entry):
816 """Perform an optimistic concurrency check on the version number.
818 Clients are only allowed to commit if they report having seen the most
819 recent version of an object.
821 Args:
822 entry: A sync entity from the client. It is assumed that ID fields
823 have been converted to server IDs.
824 Returns:
825 A boolean value indicating whether the client's version matches the
826 newest server version for the given entry.
828 if entry.id_string in self._entries:
829 # Allow edits/deletes if the version matches, and any undeletion.
830 return (self._entries[entry.id_string].version == entry.version or
831 self._entries[entry.id_string].deleted)
832 else:
833 # Allow unknown ID only if the client thinks it's new too.
834 return entry.version == 0
836 def _CheckParentIdForCommit(self, entry):
837 """Check that the parent ID referenced in a SyncEntity actually exists.
839 Args:
840 entry: A sync entity from the client. It is assumed that ID fields
841 have been converted to server IDs.
842 Returns:
843 A boolean value indicating whether the entity's parent ID is an object
844 that actually exists (and is not deleted) in the current account state.
846 if entry.parent_id_string == ROOT_ID:
847 # This is generally allowed.
848 return True
849 if (not entry.HasField('parent_id_string') and
850 entry.HasField('client_defined_unique_tag')):
851 return True # Unique client tag items do not need to specify a parent.
852 if entry.parent_id_string not in self._entries:
853 print 'Warning: Client sent unknown ID. Should never happen.'
854 return False
855 if entry.parent_id_string == entry.id_string:
856 print 'Warning: Client sent circular reference. Should never happen.'
857 return False
858 if self._entries[entry.parent_id_string].deleted:
859 # This can happen in a race condition between two clients.
860 return False
861 if not self._entries[entry.parent_id_string].folder:
862 print 'Warning: Client sent non-folder parent. Should never happen.'
863 return False
864 return True
866 def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session):
867 """Convert ID fields in a client sync entry to server IDs.
869 A commit batch sent by a client may contain new items for which the
870 server has not generated IDs yet. And within a commit batch, later
871 items are allowed to refer to earlier items. This method will
872 generate server IDs for new items, as well as rewrite references
873 to items whose server IDs were generated earlier in the batch.
875 Args:
876 entry: The client sync entry to modify.
877 cache_guid: The globally unique ID of the client that sent this
878 commit request.
879 commit_session: A dictionary mapping the original IDs to the new server
880 IDs, for any items committed earlier in the batch.
882 if entry.version == 0:
883 data_type = GetEntryType(entry)
884 if entry.HasField('client_defined_unique_tag'):
885 # When present, this should determine the item's ID.
886 new_id = self._ClientTagToId(data_type, entry.client_defined_unique_tag)
887 else:
888 new_id = self._ClientIdToId(data_type, cache_guid, entry.id_string)
889 entry.originator_cache_guid = cache_guid
890 entry.originator_client_item_id = entry.id_string
891 commit_session[entry.id_string] = new_id # Remember the remapping.
892 entry.id_string = new_id
893 if entry.parent_id_string in commit_session:
894 entry.parent_id_string = commit_session[entry.parent_id_string]
895 if entry.insert_after_item_id in commit_session:
896 entry.insert_after_item_id = commit_session[entry.insert_after_item_id]
898 def ValidateCommitEntries(self, entries):
899 """Raise an exception if a commit batch contains any global errors.
901 Arguments:
902 entries: an iterable containing commit-form SyncEntity protocol buffers.
904 Raises:
905 MigrationDoneError: if any of the entries reference a recently-migrated
906 datatype.
908 server_ids_in_commit = set()
909 local_ids_in_commit = set()
910 for entry in entries:
911 if entry.version:
912 server_ids_in_commit.add(entry.id_string)
913 else:
914 local_ids_in_commit.add(entry.id_string)
915 if entry.HasField('parent_id_string'):
916 if entry.parent_id_string not in local_ids_in_commit:
917 server_ids_in_commit.add(entry.parent_id_string)
919 versions_present = {}
920 for server_id in server_ids_in_commit:
921 parsed = self._ExtractIdInfo(server_id)
922 if parsed:
923 datatype, version, _ = parsed
924 versions_present.setdefault(datatype, []).append(version)
926 self.migration_history.CheckAllCurrent(
927 dict((k, min(v)) for k, v in versions_present.iteritems()))
929 def CommitEntry(self, entry, cache_guid, commit_session):
930 """Attempt to commit one entry to the user's account.
932 Args:
933 entry: A SyncEntity protobuf representing desired object changes.
934 cache_guid: A string value uniquely identifying the client; this
935 is used for ID generation and will determine the originator_cache_guid
936 if the entry is new.
937 commit_session: A dictionary mapping client IDs to server IDs for any
938 objects committed earlier this session. If the entry gets a new ID
939 during commit, the change will be recorded here.
940 Returns:
941 A SyncEntity reflecting the post-commit value of the entry, or None
942 if the entry was not committed due to an error.
944 entry = copy.deepcopy(entry)
946 # Generate server IDs for this entry, and write generated server IDs
947 # from earlier entries into the message's fields, as appropriate. The
948 # ID generation state is stored in 'commit_session'.
949 self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)
951 # Sets the parent ID field for a client-tagged item. The client is allowed
952 # to not specify parents for these types of items. The server can figure
953 # out on its own what the parent ID for this entry should be.
954 self._RewriteParentIdForUniqueClientEntry(entry)
956 # Perform the optimistic concurrency check on the entry's version number.
957 # Clients are not allowed to commit unless they indicate that they've seen
958 # the most recent version of an object.
959 if not self._CheckVersionForCommit(entry):
960 return None
962 # Check the validity of the parent ID; it must exist at this point.
963 # TODO(nick): Implement cycle detection and resolution.
964 if not self._CheckParentIdForCommit(entry):
965 return None
967 self._CopyOverImmutableFields(entry);
969 # At this point, the commit is definitely going to happen.
971 # Deletion works by storing a limited record for an entry, called a
972 # tombstone. A sync server must track deleted IDs forever, since it does
973 # not keep track of client knowledge (there's no deletion ACK event).
974 if entry.deleted:
975 def MakeTombstone(id_string, datatype):
976 """Make a tombstone entry that will replace the entry being deleted.
978 Args:
979 id_string: Index of the SyncEntity to be deleted.
980 Returns:
981 A new SyncEntity reflecting the fact that the entry is deleted.
983 # Only the ID, version and deletion state are preserved on a tombstone.
984 tombstone = sync_pb2.SyncEntity()
985 tombstone.id_string = id_string
986 tombstone.deleted = True
987 tombstone.name = ''
988 tombstone.specifics.CopyFrom(GetDefaultEntitySpecifics(datatype))
989 return tombstone
991 def IsChild(child_id):
992 """Check if a SyncEntity is a child of entry, or any of its children.
994 Args:
995 child_id: Index of the SyncEntity that is a possible child of entry.
996 Returns:
997 True if it is a child; false otherwise.
999 if child_id not in self._entries:
1000 return False
1001 if self._entries[child_id].parent_id_string == entry.id_string:
1002 return True
1003 return IsChild(self._entries[child_id].parent_id_string)
1005 # Identify any children entry might have.
1006 child_ids = [child.id_string for child in self._entries.itervalues()
1007 if IsChild(child.id_string)]
1009 # Mark all children that were identified as deleted.
1010 for child_id in child_ids:
1011 datatype = GetEntryType(self._entries[child_id])
1012 self._SaveEntry(MakeTombstone(child_id, datatype))
1014 # Delete entry itself.
1015 datatype = GetEntryType(self._entries[entry.id_string])
1016 entry = MakeTombstone(entry.id_string, datatype)
1017 else:
1018 # Comments in sync.proto detail how the representation of positional
1019 # ordering works.
1021 # We've almost fully deprecated the 'insert_after_item_id' field.
1022 # The 'position_in_parent' field is also deprecated, but as of Jan 2013
1023 # is still in common use. The 'unique_position' field is the latest
1024 # and greatest in positioning technology.
1026 # This server supports 'position_in_parent' and 'unique_position'.
1027 self._WritePosition(entry, entry.parent_id_string)
1029 # Preserve the originator info, which the client is not required to send
1030 # when updating.
1031 base_entry = self._entries.get(entry.id_string)
1032 if base_entry and not entry.HasField('originator_cache_guid'):
1033 entry.originator_cache_guid = base_entry.originator_cache_guid
1034 entry.originator_client_item_id = base_entry.originator_client_item_id
1036 # Store the current time since the Unix epoch in milliseconds.
1037 entry.mtime = (int((time.mktime(time.gmtime()) -
1038 (time.mktime(FIRST_DAY_UNIX_TIME_EPOCH) - ONE_DAY_SECONDS))*1000))
1040 # Commit the change. This also updates the version number.
1041 self._SaveEntry(entry)
1042 return entry
1044 def _RewriteVersionInId(self, id_string):
1045 """Rewrites an ID so that its migration version becomes current."""
1046 parsed_id = self._ExtractIdInfo(id_string)
1047 if not parsed_id:
1048 return id_string
1049 datatype, old_migration_version, inner_id = parsed_id
1050 return self._MakeCurrentId(datatype, inner_id)
1052 def _RewriteParentIdForUniqueClientEntry(self, entry):
1053 """Sets the entry's parent ID field to the appropriate value.
1055 The client must always set enough of the specifics of the entries it sends
1056 up such that the server can identify its type. (See crbug.com/373859)
1058 The client is under no obligation to set the parent ID field. The server
1059 can always infer what the appropriate parent for this model type should be.
1060 Having the client not send the parent ID is a step towards the removal of
1061 type root nodes. (See crbug.com/373869)
1063 This server implements these features by "faking" the existing of a parent
1064 ID early on in the commit processing.
1066 This function has no effect on non-client-tagged items.
1068 if not entry.HasField('client_defined_unique_tag'):
1069 return # Skip this processing for non-client-tagged types.
1070 data_type = GetEntryType(entry)
1071 entry.parent_id_string = self._TypeToTypeRootId(data_type)
1073 def TriggerMigration(self, datatypes):
1074 """Cause a migration to occur for a set of datatypes on this account.
1076 Clients will see the MIGRATION_DONE error for these datatypes until they
1077 resync them.
1079 versions_to_remap = self.migration_history.Bump(datatypes)
1080 all_entries = self._entries.values()
1081 self._entries.clear()
1082 for entry in all_entries:
1083 new_id = self._RewriteVersionInId(entry.id_string)
1084 entry.id_string = new_id
1085 if entry.HasField('parent_id_string'):
1086 entry.parent_id_string = self._RewriteVersionInId(
1087 entry.parent_id_string)
1088 self._entries[entry.id_string] = entry
1090 def TriggerSyncTabFavicons(self):
1091 """Set the 'sync_tab_favicons' field to this account's nigori node.
1093 If the field is not currently set, will write a new nigori node entry
1094 with the field set. Else does nothing.
1097 nigori_tag = "google_chrome_nigori"
1098 nigori_original = self._entries.get(self._ServerTagToId(nigori_tag))
1099 if (nigori_original.specifics.nigori.sync_tab_favicons):
1100 return
1101 nigori_new = copy.deepcopy(nigori_original)
1102 nigori_new.specifics.nigori.sync_tabs = True
1103 self._SaveEntry(nigori_new)
1105 def TriggerCreateSyncedBookmarks(self):
1106 """Create the Synced Bookmarks folder under the Bookmarks permanent item.
1108 Clients will then receive the Synced Bookmarks folder on future
1109 GetUpdates, and new bookmarks can be added within the Synced Bookmarks
1110 folder.
1113 synced_bookmarks_spec, = [spec for spec in self._PERMANENT_ITEM_SPECS
1114 if spec.name == "Synced Bookmarks"]
1115 self._CreatePermanentItem(synced_bookmarks_spec)
1117 def TriggerEnableKeystoreEncryption(self):
1118 """Create the keystore_encryption experiment entity and enable it.
1120 A new entity within the EXPERIMENTS datatype is created with the unique
1121 client tag "keystore_encryption" if it doesn't already exist. The
1122 keystore_encryption message is then filled with |enabled| set to true.
1125 experiment_id = self._ServerTagToId("google_chrome_experiments")
1126 keystore_encryption_id = self._ClientTagToId(
1127 EXPERIMENTS,
1128 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1129 keystore_entry = self._entries.get(keystore_encryption_id)
1130 if keystore_entry is None:
1131 keystore_entry = sync_pb2.SyncEntity()
1132 keystore_entry.id_string = keystore_encryption_id
1133 keystore_entry.name = "Keystore Encryption"
1134 keystore_entry.client_defined_unique_tag = (
1135 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1136 keystore_entry.folder = False
1137 keystore_entry.deleted = False
1138 keystore_entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1139 self._WritePosition(keystore_entry, experiment_id)
1141 keystore_entry.specifics.experiments.keystore_encryption.enabled = True
1143 self._SaveEntry(keystore_entry)
1145 def TriggerRotateKeystoreKeys(self):
1146 """Rotate the current set of keystore encryption keys.
1148 |self._keys| will have a new random encryption key appended to it. We touch
1149 the nigori node so that each client will receive the new encryption keys
1150 only once.
1153 # Add a new encryption key.
1154 self._keys += [MakeNewKeystoreKey(), ]
1156 # Increment the nigori node's timestamp, so clients will get the new keys
1157 # on their next GetUpdates (any time the nigori node is sent back, we also
1158 # send back the keystore keys).
1159 nigori_tag = "google_chrome_nigori"
1160 self._SaveEntry(self._entries.get(self._ServerTagToId(nigori_tag)))
1162 def TriggerAcknowledgeManagedUsers(self):
1163 """Set the "acknowledged" flag for any managed user entities that don't have
1164 it set already.
1167 if not self.acknowledge_managed_users:
1168 return
1170 managed_users = [copy.deepcopy(entry) for entry in self._entries.values()
1171 if entry.specifics.HasField('managed_user')
1172 and not entry.specifics.managed_user.acknowledged]
1173 for user in managed_users:
1174 user.specifics.managed_user.acknowledged = True
1175 self._SaveEntry(user)
1177 def TriggerEnablePreCommitGetUpdateAvoidance(self):
1178 """Sets the experiment to enable pre-commit GetUpdate avoidance."""
1179 experiment_id = self._ServerTagToId("google_chrome_experiments")
1180 pre_commit_gu_avoidance_id = self._ClientTagToId(
1181 EXPERIMENTS,
1182 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG)
1183 entry = self._entries.get(pre_commit_gu_avoidance_id)
1184 if entry is None:
1185 entry = sync_pb2.SyncEntity()
1186 entry.id_string = pre_commit_gu_avoidance_id
1187 entry.name = "Pre-commit GU avoidance"
1188 entry.client_defined_unique_tag = PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
1189 entry.folder = False
1190 entry.deleted = False
1191 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1192 self._WritePosition(entry, experiment_id)
1193 entry.specifics.experiments.pre_commit_update_avoidance.enabled = True
1194 self._SaveEntry(entry)
1196 def SetInducedError(self, error, error_frequency,
1197 sync_count_before_errors):
1198 self.induced_error = error
1199 self.induced_error_frequency = error_frequency
1200 self.sync_count_before_errors = sync_count_before_errors
1202 def GetInducedError(self):
1203 return self.induced_error
1205 def AddSyncedNotification(self, serialized_notification):
1206 """Adds a synced notification to the server data.
1208 The notification will be delivered to the client on the next GetUpdates
1209 call.
1211 Args:
1212 serialized_notification: A serialized CoalescedSyncedNotification.
1214 Returns:
1215 The string representation of the added SyncEntity.
1217 Raises:
1218 ClientNotConnectedError: if the client has not yet connected to this
1219 server
1221 # A unique string used wherever a unique ID for this notification is
1222 # required.
1223 unique_notification_id = str(uuid.uuid4())
1225 specifics = self._CreateSyncedNotificationEntitySpecifics(
1226 unique_notification_id, serialized_notification)
1228 # Create the root SyncEntity representing a single notification.
1229 entity = sync_pb2.SyncEntity()
1230 entity.specifics.CopyFrom(specifics)
1231 entity.parent_id_string = self._ServerTagToId(
1232 'google_chrome_synced_notifications')
1233 entity.name = 'Synced notification added for testing'
1234 entity.version = self._GetNextVersionNumber()
1236 entity.client_defined_unique_tag = self._CreateSyncedNotificationClientTag(
1237 specifics.synced_notification.coalesced_notification.key)
1238 entity.id_string = self._ClientTagToId(GetEntryType(entity),
1239 entity.client_defined_unique_tag)
1241 self._entries[entity.id_string] = copy.deepcopy(entity)
1243 return google.protobuf.text_format.MessageToString(entity)
1245 def _GetNextVersionNumber(self):
1246 """Set the version to one more than the greatest version number seen."""
1247 entries = sorted(self._entries.values(), key=operator.attrgetter('version'))
1248 if len(entries) < 1:
1249 raise ClientNotConnectedError
1250 return entries[-1].version + 1
1252 def _CreateSyncedNotificationEntitySpecifics(self, unique_id,
1253 serialized_notification):
1254 """Create the EntitySpecifics proto for a synced notification."""
1255 coalesced = synced_notification_data_pb2.CoalescedSyncedNotification()
1256 google.protobuf.text_format.Merge(serialized_notification, coalesced)
1258 # Override the provided key so that we have a unique one.
1259 coalesced.key = unique_id
1261 specifics = sync_pb2.EntitySpecifics()
1262 notification_specifics = \
1263 synced_notification_specifics_pb2.SyncedNotificationSpecifics()
1264 notification_specifics.coalesced_notification.CopyFrom(coalesced)
1265 specifics.synced_notification.CopyFrom(notification_specifics)
1267 return specifics
1269 def _CreateSyncedNotificationClientTag(self, key):
1270 """Create the client_defined_unique_tag value for a SyncedNotification.
1272 Args:
1273 key: The entity used to create the client tag.
1275 Returns:
1276 The string value of the to be used as the client_defined_unique_tag.
1278 serialized_type = sync_pb2.EntitySpecifics()
1279 specifics = synced_notification_specifics_pb2.SyncedNotificationSpecifics()
1280 serialized_type.synced_notification.CopyFrom(specifics)
1281 hash_input = serialized_type.SerializeToString() + key
1282 return base64.b64encode(hashlib.sha1(hash_input).digest())
1284 def AddSyncedNotificationAppInfo(self, app_info):
1285 """Adds an app info struct to the server data.
1287 The notification will be delivered to the client on the next GetUpdates
1288 call.
1290 Args:
1291 app_info: A serialized AppInfo.
1293 Returns:
1294 The string representation of the added SyncEntity.
1296 Raises:
1297 ClientNotConnectedError: if the client has not yet connected to this
1298 server
1300 specifics = self._CreateSyncedNotificationAppInfoEntitySpecifics(app_info)
1302 # Create the root SyncEntity representing a single app info protobuf.
1303 entity = sync_pb2.SyncEntity()
1304 entity.specifics.CopyFrom(specifics)
1305 entity.parent_id_string = self._ServerTagToId(
1306 'google_chrome_synced_notification_app_info')
1307 entity.name = 'App info added for testing'
1308 entity.version = self._GetNextVersionNumber()
1310 # App Infos do not have a strong id, it only needs to be unique.
1311 entity.client_defined_unique_tag = "foo"
1312 entity.id_string = "foo"
1314 self._entries[entity.id_string] = copy.deepcopy(entity)
1316 print "entity before exit is ", entity
1318 return google.protobuf.text_format.MessageToString(entity)
1320 def _CreateSyncedNotificationAppInfoEntitySpecifics(
1321 self, synced_notification_app_info):
1322 """Create the EntitySpecifics proto for a synced notification app info."""
1323 # Create a single, empty app_info object
1324 app_info = \
1325 synced_notification_app_info_specifics_pb2.SyncedNotificationAppInfo()
1326 # Fill the app_info object from the text format protobuf.
1327 google.protobuf.text_format.Merge(synced_notification_app_info, app_info)
1329 # Create a new specifics object with a contained app_info
1330 specifics = sync_pb2.EntitySpecifics()
1331 app_info_specifics = \
1332 synced_notification_app_info_specifics_pb2.\
1333 SyncedNotificationAppInfoSpecifics()
1335 # Copy the app info from the text format protobuf
1336 contained_app_info = app_info_specifics.synced_notification_app_info.add()
1337 contained_app_info.CopyFrom(app_info)
1339 # And put the new app_info_specifics into the specifics before returning.
1340 specifics.synced_notification_app_info.CopyFrom(app_info_specifics)
1342 return specifics
1344 class TestServer(object):
1345 """An object to handle requests for one (and only one) Chrome Sync account.
1347 TestServer consumes the sync command messages that are the outermost
1348 layers of the protocol, performs the corresponding actions on its
1349 SyncDataModel, and constructs an appropriate response message.
1352 def __init__(self):
1353 # The implementation supports exactly one account; its state is here.
1354 self.account = SyncDataModel()
1355 self.account_lock = threading.Lock()
1356 # Clients that have talked to us: a map from the full client ID
1357 # to its nickname.
1358 self.clients = {}
1359 self.client_name_generator = ('+' * times + chr(c)
1360 for times in xrange(0, sys.maxint) for c in xrange(ord('A'), ord('Z')))
1361 self.transient_error = False
1362 self.sync_count = 0
1363 # Gaia OAuth2 Token fields and their default values.
1364 self.response_code = 200
1365 self.request_token = 'rt1'
1366 self.access_token = 'at1'
1367 self.expires_in = 3600
1368 self.token_type = 'Bearer'
1369 # The ClientCommand to send back on each ServerToClientResponse. If set to
1370 # None, no ClientCommand should be sent.
1371 self._client_command = None
1374 def GetShortClientName(self, query):
1375 parsed = cgi.parse_qs(query[query.find('?')+1:])
1376 client_id = parsed.get('client_id')
1377 if not client_id:
1378 return '?'
1379 client_id = client_id[0]
1380 if client_id not in self.clients:
1381 self.clients[client_id] = self.client_name_generator.next()
1382 return self.clients[client_id]
1384 def CheckStoreBirthday(self, request):
1385 """Raises StoreBirthdayError if the request's birthday is a mismatch."""
1386 if not request.HasField('store_birthday'):
1387 return
1388 if self.account.StoreBirthday() != request.store_birthday:
1389 raise StoreBirthdayError
1391 def CheckTransientError(self):
1392 """Raises TransientError if transient_error variable is set."""
1393 if self.transient_error:
1394 raise TransientError
1396 def CheckSendError(self):
1397 """Raises SyncInducedError if needed."""
1398 if (self.account.induced_error.error_type !=
1399 sync_enums_pb2.SyncEnums.UNKNOWN):
1400 # Always means return the given error for all requests.
1401 if self.account.induced_error_frequency == ERROR_FREQUENCY_ALWAYS:
1402 raise SyncInducedError
1403 # This means the FIRST 2 requests of every 3 requests
1404 # return an error. Don't switch the order of failures. There are
1405 # test cases that rely on the first 2 being the failure rather than
1406 # the last 2.
1407 elif (self.account.induced_error_frequency ==
1408 ERROR_FREQUENCY_TWO_THIRDS):
1409 if (((self.sync_count -
1410 self.account.sync_count_before_errors) % 3) != 0):
1411 raise SyncInducedError
1412 else:
1413 raise InducedErrorFrequencyNotDefined
1415 def HandleMigrate(self, path):
1416 query = urlparse.urlparse(path)[4]
1417 code = 200
1418 self.account_lock.acquire()
1419 try:
1420 datatypes = [DataTypeStringToSyncTypeLoose(x)
1421 for x in urlparse.parse_qs(query).get('type',[])]
1422 if datatypes:
1423 self.account.TriggerMigration(datatypes)
1424 response = 'Migrated datatypes %s' % (
1425 ' and '.join(SyncTypeToString(x).upper() for x in datatypes))
1426 else:
1427 response = 'Please specify one or more <i>type=name</i> parameters'
1428 code = 400
1429 except DataTypeIdNotRecognized, error:
1430 response = 'Could not interpret datatype name'
1431 code = 400
1432 finally:
1433 self.account_lock.release()
1434 return (code, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
1435 (code, code, response))
1437 def HandleSetInducedError(self, path):
1438 query = urlparse.urlparse(path)[4]
1439 self.account_lock.acquire()
1440 code = 200
1441 response = 'Success'
1442 error = sync_pb2.ClientToServerResponse.Error()
1443 try:
1444 error_type = urlparse.parse_qs(query)['error']
1445 action = urlparse.parse_qs(query)['action']
1446 error.error_type = int(error_type[0])
1447 error.action = int(action[0])
1448 try:
1449 error.url = (urlparse.parse_qs(query)['url'])[0]
1450 except KeyError:
1451 error.url = ''
1452 try:
1453 error.error_description =(
1454 (urlparse.parse_qs(query)['error_description'])[0])
1455 except KeyError:
1456 error.error_description = ''
1457 try:
1458 error_frequency = int((urlparse.parse_qs(query)['frequency'])[0])
1459 except KeyError:
1460 error_frequency = ERROR_FREQUENCY_ALWAYS
1461 self.account.SetInducedError(error, error_frequency, self.sync_count)
1462 response = ('Error = %d, action = %d, url = %s, description = %s' %
1463 (error.error_type, error.action,
1464 error.url,
1465 error.error_description))
1466 except error:
1467 response = 'Could not parse url'
1468 code = 400
1469 finally:
1470 self.account_lock.release()
1471 return (code, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
1472 (code, code, response))
1474 def HandleCreateBirthdayError(self):
1475 self.account.ResetStoreBirthday()
1476 return (
1477 200,
1478 '<html><title>Birthday error</title><H1>Birthday error</H1></html>')
1480 def HandleSetTransientError(self):
1481 self.transient_error = True
1482 return (
1483 200,
1484 '<html><title>Transient error</title><H1>Transient error</H1></html>')
1486 def HandleSetSyncTabFavicons(self):
1487 """Set 'sync_tab_favicons' field of the nigori node for this account."""
1488 self.account.TriggerSyncTabFavicons()
1489 return (
1490 200,
1491 '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
1493 def HandleCreateSyncedBookmarks(self):
1494 """Create the Synced Bookmarks folder under Bookmarks."""
1495 self.account.TriggerCreateSyncedBookmarks()
1496 return (
1497 200,
1498 '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
1500 def HandleEnableKeystoreEncryption(self):
1501 """Enables the keystore encryption experiment."""
1502 self.account.TriggerEnableKeystoreEncryption()
1503 return (
1504 200,
1505 '<html><title>Enable Keystore Encryption</title>'
1506 '<H1>Enable Keystore Encryption</H1></html>')
1508 def HandleRotateKeystoreKeys(self):
1509 """Rotate the keystore encryption keys."""
1510 self.account.TriggerRotateKeystoreKeys()
1511 return (
1512 200,
1513 '<html><title>Rotate Keystore Keys</title>'
1514 '<H1>Rotate Keystore Keys</H1></html>')
1516 def HandleEnableManagedUserAcknowledgement(self):
1517 """Enable acknowledging newly created managed users."""
1518 self.account.acknowledge_managed_users = True
1519 return (
1520 200,
1521 '<html><title>Enable Managed User Acknowledgement</title>'
1522 '<h1>Enable Managed User Acknowledgement</h1></html>')
1524 def HandleEnablePreCommitGetUpdateAvoidance(self):
1525 """Enables the pre-commit GU avoidance experiment."""
1526 self.account.TriggerEnablePreCommitGetUpdateAvoidance()
1527 return (
1528 200,
1529 '<html><title>Enable pre-commit GU avoidance</title>'
1530 '<H1>Enable pre-commit GU avoidance</H1></html>')
1532 def HandleCommand(self, query, raw_request):
1533 """Decode and handle a sync command from a raw input of bytes.
1535 This is the main entry point for this class. It is safe to call this
1536 method from multiple threads.
1538 Args:
1539 raw_request: An iterable byte sequence to be interpreted as a sync
1540 protocol command.
1541 Returns:
1542 A tuple (response_code, raw_response); the first value is an HTTP
1543 result code, while the second value is a string of bytes which is the
1544 serialized reply to the command.
1546 self.account_lock.acquire()
1547 self.sync_count += 1
1548 def print_context(direction):
1549 print '[Client %s %s %s.py]' % (self.GetShortClientName(query), direction,
1550 __name__),
1552 try:
1553 request = sync_pb2.ClientToServerMessage()
1554 request.MergeFromString(raw_request)
1555 contents = request.message_contents
1557 response = sync_pb2.ClientToServerResponse()
1558 response.error_code = sync_enums_pb2.SyncEnums.SUCCESS
1560 if self._client_command:
1561 response.client_command.CopyFrom(self._client_command)
1563 self.CheckStoreBirthday(request)
1564 response.store_birthday = self.account.store_birthday
1565 self.CheckTransientError()
1566 self.CheckSendError()
1568 print_context('->')
1570 if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE:
1571 print 'Authenticate'
1572 # We accept any authentication token, and support only one account.
1573 # TODO(nick): Mock out the GAIA authentication as well; hook up here.
1574 response.authenticate.user.email = 'syncjuser@chromium'
1575 response.authenticate.user.display_name = 'Sync J User'
1576 elif contents == sync_pb2.ClientToServerMessage.COMMIT:
1577 print 'Commit %d item(s)' % len(request.commit.entries)
1578 self.HandleCommit(request.commit, response.commit)
1579 elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES:
1580 print 'GetUpdates',
1581 self.HandleGetUpdates(request.get_updates, response.get_updates)
1582 print_context('<-')
1583 print '%d update(s)' % len(response.get_updates.entries)
1584 else:
1585 print 'Unrecognizable sync request!'
1586 return (400, None) # Bad request.
1587 return (200, response.SerializeToString())
1588 except MigrationDoneError, error:
1589 print_context('<-')
1590 print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error.datatypes))
1591 response = sync_pb2.ClientToServerResponse()
1592 response.store_birthday = self.account.store_birthday
1593 response.error_code = sync_enums_pb2.SyncEnums.MIGRATION_DONE
1594 response.migrated_data_type_id[:] = [
1595 SyncTypeToProtocolDataTypeId(x) for x in error.datatypes]
1596 return (200, response.SerializeToString())
1597 except StoreBirthdayError, error:
1598 print_context('<-')
1599 print 'NOT_MY_BIRTHDAY'
1600 response = sync_pb2.ClientToServerResponse()
1601 response.store_birthday = self.account.store_birthday
1602 response.error_code = sync_enums_pb2.SyncEnums.NOT_MY_BIRTHDAY
1603 return (200, response.SerializeToString())
1604 except TransientError, error:
1605 ### This is deprecated now. Would be removed once test cases are removed.
1606 print_context('<-')
1607 print 'TRANSIENT_ERROR'
1608 response.store_birthday = self.account.store_birthday
1609 response.error_code = sync_enums_pb2.SyncEnums.TRANSIENT_ERROR
1610 return (200, response.SerializeToString())
1611 except SyncInducedError, error:
1612 print_context('<-')
1613 print 'INDUCED_ERROR'
1614 response.store_birthday = self.account.store_birthday
1615 error = self.account.GetInducedError()
1616 response.error.error_type = error.error_type
1617 response.error.url = error.url
1618 response.error.error_description = error.error_description
1619 response.error.action = error.action
1620 return (200, response.SerializeToString())
1621 finally:
1622 self.account_lock.release()
1624 def HandleCommit(self, commit_message, commit_response):
1625 """Respond to a Commit request by updating the user's account state.
1627 Commit attempts stop after the first error, returning a CONFLICT result
1628 for any unattempted entries.
1630 Args:
1631 commit_message: A sync_pb.CommitMessage protobuf holding the content
1632 of the client's request.
1633 commit_response: A sync_pb.CommitResponse protobuf into which a reply
1634 to the client request will be written.
1636 commit_response.SetInParent()
1637 batch_failure = False
1638 session = {} # Tracks ID renaming during the commit operation.
1639 guid = commit_message.cache_guid
1641 self.account.ValidateCommitEntries(commit_message.entries)
1643 for entry in commit_message.entries:
1644 server_entry = None
1645 if not batch_failure:
1646 # Try to commit the change to the account.
1647 server_entry = self.account.CommitEntry(entry, guid, session)
1649 # An entryresponse is returned in both success and failure cases.
1650 reply = commit_response.entryresponse.add()
1651 if not server_entry:
1652 reply.response_type = sync_pb2.CommitResponse.CONFLICT
1653 reply.error_message = 'Conflict.'
1654 batch_failure = True # One failure halts the batch.
1655 else:
1656 reply.response_type = sync_pb2.CommitResponse.SUCCESS
1657 # These are the properties that the server is allowed to override
1658 # during commit; the client wants to know their values at the end
1659 # of the operation.
1660 reply.id_string = server_entry.id_string
1661 if not server_entry.deleted:
1662 # Note: the production server doesn't actually send the
1663 # parent_id_string on commit responses, so we don't either.
1664 reply.position_in_parent = server_entry.position_in_parent
1665 reply.version = server_entry.version
1666 reply.name = server_entry.name
1667 reply.non_unique_name = server_entry.non_unique_name
1668 else:
1669 reply.version = entry.version + 1
1671 def HandleGetUpdates(self, update_request, update_response):
1672 """Respond to a GetUpdates request by querying the user's account.
1674 Args:
1675 update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
1676 of the client's request.
1677 update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
1678 to the client request will be written.
1680 update_response.SetInParent()
1681 update_sieve = UpdateSieve(update_request, self.account.migration_history)
1683 print CallerInfoToString(update_request.caller_info.source),
1684 print update_sieve.SummarizeRequest()
1686 update_sieve.CheckMigrationState()
1688 new_timestamp, entries, remaining = self.account.GetChanges(update_sieve)
1690 update_response.changes_remaining = remaining
1691 sending_nigori_node = False
1692 for entry in entries:
1693 if entry.name == 'Nigori':
1694 sending_nigori_node = True
1695 reply = update_response.entries.add()
1696 reply.CopyFrom(entry)
1697 update_sieve.SaveProgress(new_timestamp, update_response)
1699 if update_request.need_encryption_key or sending_nigori_node:
1700 update_response.encryption_keys.extend(self.account.GetKeystoreKeys())
1702 def HandleGetOauth2Token(self):
1703 return (int(self.response_code),
1704 '{\n'
1705 ' \"refresh_token\": \"' + self.request_token + '\",\n'
1706 ' \"access_token\": \"' + self.access_token + '\",\n'
1707 ' \"expires_in\": ' + str(self.expires_in) + ',\n'
1708 ' \"token_type\": \"' + self.token_type +'\"\n'
1709 '}')
1711 def HandleSetOauth2Token(self, response_code, request_token, access_token,
1712 expires_in, token_type):
1713 if response_code != 0:
1714 self.response_code = response_code
1715 if request_token != '':
1716 self.request_token = request_token
1717 if access_token != '':
1718 self.access_token = access_token
1719 if expires_in != 0:
1720 self.expires_in = expires_in
1721 if token_type != '':
1722 self.token_type = token_type
1724 return (200,
1725 '<html><title>Set OAuth2 Token</title>'
1726 '<H1>This server will now return the OAuth2 Token:</H1>'
1727 '<p>response_code: ' + str(self.response_code) + '</p>'
1728 '<p>request_token: ' + self.request_token + '</p>'
1729 '<p>access_token: ' + self.access_token + '</p>'
1730 '<p>expires_in: ' + str(self.expires_in) + '</p>'
1731 '<p>token_type: ' + self.token_type + '</p>'
1732 '</html>')
1734 def CustomizeClientCommand(self, sessions_commit_delay_seconds):
1735 """Customizes the value of the ClientCommand of ServerToClientResponse.
1737 Currently, this only allows for changing the sessions_commit_delay_seconds
1738 field. This is useful for testing in conjunction with
1739 AddSyncedNotification so that synced notifications are seen immediately
1740 after triggering them with an HTTP call to the test server.
1742 Args:
1743 sessions_commit_delay_seconds: The desired sync delay time for sessions.
1745 if not self._client_command:
1746 self._client_command = client_commands_pb2.ClientCommand()
1748 self._client_command.sessions_commit_delay_seconds = \
1749 sessions_commit_delay_seconds
1750 return self._client_command