Blink roll 25b6bd3a7a131ffe68d809546ad1a20707915cdc:3a503f41ae42e5b79cfcd2ff10e65afde...
[chromium-blink-merge.git] / sync / tools / testserver / chromiumsync.py
blob5487ef23aba0c1fb33c016eee39b6d39bf75b169
1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """An implementation of the server side of the Chromium sync protocol.
7 The details of the protocol are described mostly by comments in the protocol
8 buffer definition at chrome/browser/sync/protocol/sync.proto.
9 """
11 import base64
12 import cgi
13 import copy
14 import google.protobuf.text_format
15 import hashlib
16 import operator
17 import pickle
18 import random
19 import string
20 import sys
21 import threading
22 import time
23 import urlparse
24 import uuid
26 import app_list_specifics_pb2
27 import app_notification_specifics_pb2
28 import app_setting_specifics_pb2
29 import app_specifics_pb2
30 import article_specifics_pb2
31 import autofill_specifics_pb2
32 import bookmark_specifics_pb2
33 import client_commands_pb2
34 import dictionary_specifics_pb2
35 import get_updates_caller_info_pb2
36 import extension_setting_specifics_pb2
37 import extension_specifics_pb2
38 import favicon_image_specifics_pb2
39 import favicon_tracking_specifics_pb2
40 import history_delete_directive_specifics_pb2
41 import managed_user_setting_specifics_pb2
42 import managed_user_specifics_pb2
43 import managed_user_shared_setting_specifics_pb2
44 import nigori_specifics_pb2
45 import password_specifics_pb2
46 import preference_specifics_pb2
47 import priority_preference_specifics_pb2
48 import search_engine_specifics_pb2
49 import session_specifics_pb2
50 import sync_pb2
51 import sync_enums_pb2
52 import synced_notification_app_info_specifics_pb2
53 import synced_notification_data_pb2
54 import synced_notification_render_pb2
55 import synced_notification_specifics_pb2
56 import theme_specifics_pb2
57 import typed_url_specifics_pb2
58 import wifi_credential_specifics_pb2
60 # An enumeration of the various kinds of data that can be synced.
61 # Over the wire, this enumeration is not used: a sync object's type is
62 # inferred by which EntitySpecifics field it has. But in the context
63 # of a program, it is useful to have an enumeration.
64 ALL_TYPES = (
65 TOP_LEVEL, # The type of the 'Google Chrome' folder.
66 APPS,
67 APP_LIST,
68 APP_NOTIFICATION,
69 APP_SETTINGS,
70 ARTICLE,
71 AUTOFILL,
72 AUTOFILL_PROFILE,
73 BOOKMARK,
74 DEVICE_INFO,
75 DICTIONARY,
76 EXPERIMENTS,
77 EXTENSIONS,
78 HISTORY_DELETE_DIRECTIVE,
79 MANAGED_USER_SETTING,
80 MANAGED_USER_SHARED_SETTING,
81 MANAGED_USER,
82 NIGORI,
83 PASSWORD,
84 PREFERENCE,
85 PRIORITY_PREFERENCE,
86 SEARCH_ENGINE,
87 SESSION,
88 SYNCED_NOTIFICATION,
89 SYNCED_NOTIFICATION_APP_INFO,
90 THEME,
91 TYPED_URL,
92 EXTENSION_SETTINGS,
93 FAVICON_IMAGES,
94 FAVICON_TRACKING,
95 WIFI_CREDENTIAL) = range(31)
97 # An enumeration on the frequency at which the server should send errors
98 # to the client. This would be specified by the url that triggers the error.
99 # Note: This enum should be kept in the same order as the enum in sync_test.h.
100 SYNC_ERROR_FREQUENCY = (
101 ERROR_FREQUENCY_NONE,
102 ERROR_FREQUENCY_ALWAYS,
103 ERROR_FREQUENCY_TWO_THIRDS) = range(3)
105 # Well-known server tag of the top level 'Google Chrome' folder.
106 TOP_LEVEL_FOLDER_TAG = 'google_chrome'
108 # Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
109 # to that datatype. Note that TOP_LEVEL has no such token.
110 SYNC_TYPE_FIELDS = sync_pb2.EntitySpecifics.DESCRIPTOR.fields_by_name
111 SYNC_TYPE_TO_DESCRIPTOR = {
112 APP_LIST: SYNC_TYPE_FIELDS['app_list'],
113 APP_NOTIFICATION: SYNC_TYPE_FIELDS['app_notification'],
114 APP_SETTINGS: SYNC_TYPE_FIELDS['app_setting'],
115 APPS: SYNC_TYPE_FIELDS['app'],
116 ARTICLE: SYNC_TYPE_FIELDS['article'],
117 AUTOFILL: SYNC_TYPE_FIELDS['autofill'],
118 AUTOFILL_PROFILE: SYNC_TYPE_FIELDS['autofill_profile'],
119 BOOKMARK: SYNC_TYPE_FIELDS['bookmark'],
120 DEVICE_INFO: SYNC_TYPE_FIELDS['device_info'],
121 DICTIONARY: SYNC_TYPE_FIELDS['dictionary'],
122 EXPERIMENTS: SYNC_TYPE_FIELDS['experiments'],
123 EXTENSION_SETTINGS: SYNC_TYPE_FIELDS['extension_setting'],
124 EXTENSIONS: SYNC_TYPE_FIELDS['extension'],
125 FAVICON_IMAGES: SYNC_TYPE_FIELDS['favicon_image'],
126 FAVICON_TRACKING: SYNC_TYPE_FIELDS['favicon_tracking'],
127 HISTORY_DELETE_DIRECTIVE: SYNC_TYPE_FIELDS['history_delete_directive'],
128 MANAGED_USER_SHARED_SETTING:
129 SYNC_TYPE_FIELDS['managed_user_shared_setting'],
130 MANAGED_USER_SETTING: SYNC_TYPE_FIELDS['managed_user_setting'],
131 MANAGED_USER: SYNC_TYPE_FIELDS['managed_user'],
132 NIGORI: SYNC_TYPE_FIELDS['nigori'],
133 PASSWORD: SYNC_TYPE_FIELDS['password'],
134 PREFERENCE: SYNC_TYPE_FIELDS['preference'],
135 PRIORITY_PREFERENCE: SYNC_TYPE_FIELDS['priority_preference'],
136 SEARCH_ENGINE: SYNC_TYPE_FIELDS['search_engine'],
137 SESSION: SYNC_TYPE_FIELDS['session'],
138 SYNCED_NOTIFICATION: SYNC_TYPE_FIELDS["synced_notification"],
139 SYNCED_NOTIFICATION_APP_INFO:
140 SYNC_TYPE_FIELDS["synced_notification_app_info"],
141 THEME: SYNC_TYPE_FIELDS['theme'],
142 TYPED_URL: SYNC_TYPE_FIELDS['typed_url'],
143 WIFI_CREDENTIAL: SYNC_TYPE_FIELDS["wifi_credential"],
146 # The parent ID used to indicate a top-level node.
147 ROOT_ID = '0'
149 # Unix time epoch +1 day in struct_time format. The tuple corresponds to
150 # UTC Thursday Jan 2 1970, 00:00:00, non-dst.
151 # We have to add one day after start of epoch, since in timezones with positive
152 # UTC offset time.mktime throws an OverflowError,
153 # rather then returning negative number.
154 FIRST_DAY_UNIX_TIME_EPOCH = (1970, 1, 2, 0, 0, 0, 4, 2, 0)
155 ONE_DAY_SECONDS = 60 * 60 * 24
157 # The number of characters in the server-generated encryption key.
158 KEYSTORE_KEY_LENGTH = 16
160 # The hashed client tags for some experiment nodes.
161 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG = "pis8ZRzh98/MKLtVEio2mr42LQA="
162 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG = "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
164 class Error(Exception):
165 """Error class for this module."""
168 class ProtobufDataTypeFieldNotUnique(Error):
169 """An entry should not have more than one data type present."""
172 class DataTypeIdNotRecognized(Error):
173 """The requested data type is not recognized."""
176 class MigrationDoneError(Error):
177 """A server-side migration occurred; clients must re-sync some datatypes.
179 Attributes:
180 datatypes: a list of the datatypes (python enum) needing migration.
183 def __init__(self, datatypes):
184 self.datatypes = datatypes
187 class StoreBirthdayError(Error):
188 """The client sent a birthday that doesn't correspond to this server."""
191 class TransientError(Error):
192 """The client would be sent a transient error."""
195 class SyncInducedError(Error):
196 """The client would be sent an error."""
199 class InducedErrorFrequencyNotDefined(Error):
200 """The error frequency defined is not handled."""
203 class ClientNotConnectedError(Error):
204 """The client is not connected to the server."""
207 def GetEntryType(entry):
208 """Extract the sync type from a SyncEntry.
210 Args:
211 entry: A SyncEntity protobuf object whose type to determine.
212 Returns:
213 An enum value from ALL_TYPES if the entry's type can be determined, or None
214 if the type cannot be determined.
215 Raises:
216 ProtobufDataTypeFieldNotUnique: More than one type was indicated by
217 the entry.
219 if entry.server_defined_unique_tag == TOP_LEVEL_FOLDER_TAG:
220 return TOP_LEVEL
221 entry_types = GetEntryTypesFromSpecifics(entry.specifics)
222 if not entry_types:
223 return None
225 # If there is more than one, either there's a bug, or else the caller
226 # should use GetEntryTypes.
227 if len(entry_types) > 1:
228 raise ProtobufDataTypeFieldNotUnique
229 return entry_types[0]
232 def GetEntryTypesFromSpecifics(specifics):
233 """Determine the sync types indicated by an EntitySpecifics's field(s).
235 If the specifics have more than one recognized data type field (as commonly
236 happens with the requested_types field of GetUpdatesMessage), all types
237 will be returned. Callers must handle the possibility of the returned
238 value having more than one item.
240 Args:
241 specifics: A EntitySpecifics protobuf message whose extensions to
242 enumerate.
243 Returns:
244 A list of the sync types (values from ALL_TYPES) associated with each
245 recognized extension of the specifics message.
247 return [data_type for data_type, field_descriptor
248 in SYNC_TYPE_TO_DESCRIPTOR.iteritems()
249 if specifics.HasField(field_descriptor.name)]
252 def SyncTypeToProtocolDataTypeId(data_type):
253 """Convert from a sync type (python enum) to the protocol's data type id."""
254 return SYNC_TYPE_TO_DESCRIPTOR[data_type].number
257 def ProtocolDataTypeIdToSyncType(protocol_data_type_id):
258 """Convert from the protocol's data type id to a sync type (python enum)."""
259 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
260 if field_descriptor.number == protocol_data_type_id:
261 return data_type
262 raise DataTypeIdNotRecognized
265 def DataTypeStringToSyncTypeLoose(data_type_string):
266 """Converts a human-readable string to a sync type (python enum).
268 Capitalization and pluralization don't matter; this function is appropriate
269 for values that might have been typed by a human being; e.g., command-line
270 flags or query parameters.
272 if data_type_string.isdigit():
273 return ProtocolDataTypeIdToSyncType(int(data_type_string))
274 name = data_type_string.lower().rstrip('s')
275 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
276 if field_descriptor.name.lower().rstrip('s') == name:
277 return data_type
278 raise DataTypeIdNotRecognized
281 def MakeNewKeystoreKey():
282 """Returns a new random keystore key."""
283 return ''.join(random.choice(string.ascii_uppercase + string.digits)
284 for x in xrange(KEYSTORE_KEY_LENGTH))
287 def SyncTypeToString(data_type):
288 """Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
289 return SYNC_TYPE_TO_DESCRIPTOR[data_type].name
292 def CallerInfoToString(caller_info_source):
293 """Formats a GetUpdatesSource enum value to a readable string."""
294 return get_updates_caller_info_pb2.GetUpdatesCallerInfo \
295 .DESCRIPTOR.enum_types_by_name['GetUpdatesSource'] \
296 .values_by_number[caller_info_source].name
299 def ShortDatatypeListSummary(data_types):
300 """Formats compactly a list of sync types (python enums) for human eyes.
302 This function is intended for use by logging. If the list of datatypes
303 contains almost all of the values, the return value will be expressed
304 in terms of the datatypes that aren't set.
306 included = set(data_types) - set([TOP_LEVEL])
307 if not included:
308 return 'nothing'
309 excluded = set(ALL_TYPES) - included - set([TOP_LEVEL])
310 if not excluded:
311 return 'everything'
312 simple_text = '+'.join(sorted([SyncTypeToString(x) for x in included]))
313 all_but_text = 'all except %s' % (
314 '+'.join(sorted([SyncTypeToString(x) for x in excluded])))
315 if len(included) < len(excluded) or len(simple_text) <= len(all_but_text):
316 return simple_text
317 else:
318 return all_but_text
321 def GetDefaultEntitySpecifics(data_type):
322 """Get an EntitySpecifics having a sync type's default field value."""
323 specifics = sync_pb2.EntitySpecifics()
324 if data_type in SYNC_TYPE_TO_DESCRIPTOR:
325 descriptor = SYNC_TYPE_TO_DESCRIPTOR[data_type]
326 getattr(specifics, descriptor.name).SetInParent()
327 return specifics
330 class PermanentItem(object):
331 """A specification of one server-created permanent item.
333 Attributes:
334 tag: A known-to-the-client value that uniquely identifies a server-created
335 permanent item.
336 name: The human-readable display name for this item.
337 parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates
338 a top-level item. Otherwise, this must be the tag value of some other
339 server-created permanent item.
340 sync_type: A value from ALL_TYPES, giving the datatype of this permanent
341 item. This controls which types of client GetUpdates requests will
342 cause the permanent item to be created and returned.
343 create_by_default: Whether the permanent item is created at startup or not.
344 This value is set to True in the default case. Non-default permanent items
345 are those that are created only when a client explicitly tells the server
346 to do so.
349 def __init__(self, tag, name, parent_tag, sync_type, create_by_default=True):
350 self.tag = tag
351 self.name = name
352 self.parent_tag = parent_tag
353 self.sync_type = sync_type
354 self.create_by_default = create_by_default
357 class MigrationHistory(object):
358 """A record of the migration events associated with an account.
360 Each migration event invalidates one or more datatypes on all clients
361 that had synced the datatype before the event. Such clients will continue
362 to receive MigrationDone errors until they throw away their progress and
363 re-sync that datatype from the beginning.
365 def __init__(self):
366 self._migrations = {}
367 for datatype in ALL_TYPES:
368 self._migrations[datatype] = [1]
369 self._next_migration_version = 2
371 def GetLatestVersion(self, datatype):
372 return self._migrations[datatype][-1]
374 def CheckAllCurrent(self, versions_map):
375 """Raises an error if any the provided versions are out of date.
377 This function intentionally returns migrations in the order that they were
378 triggered. Doing it this way allows the client to queue up two migrations
379 in a row, so the second one is received while responding to the first.
381 Arguments:
382 version_map: a map whose keys are datatypes and whose values are versions.
384 Raises:
385 MigrationDoneError: if a mismatch is found.
387 problems = {}
388 for datatype, client_migration in versions_map.iteritems():
389 for server_migration in self._migrations[datatype]:
390 if client_migration < server_migration:
391 problems.setdefault(server_migration, []).append(datatype)
392 if problems:
393 raise MigrationDoneError(problems[min(problems.keys())])
395 def Bump(self, datatypes):
396 """Add a record of a migration, to cause errors on future requests."""
397 for idx, datatype in enumerate(datatypes):
398 self._migrations[datatype].append(self._next_migration_version)
399 self._next_migration_version += 1
402 class UpdateSieve(object):
403 """A filter to remove items the client has already seen."""
404 def __init__(self, request, migration_history=None):
405 self._original_request = request
406 self._state = {}
407 self._migration_history = migration_history or MigrationHistory()
408 self._migration_versions_to_check = {}
409 if request.from_progress_marker:
410 for marker in request.from_progress_marker:
411 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
412 if marker.HasField('timestamp_token_for_migration'):
413 timestamp = marker.timestamp_token_for_migration
414 if timestamp:
415 self._migration_versions_to_check[data_type] = 1
416 elif marker.token:
417 (timestamp, version) = pickle.loads(marker.token)
418 self._migration_versions_to_check[data_type] = version
419 elif marker.HasField('token'):
420 timestamp = 0
421 else:
422 raise ValueError('No timestamp information in progress marker.')
423 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
424 self._state[data_type] = timestamp
425 elif request.HasField('from_timestamp'):
426 for data_type in GetEntryTypesFromSpecifics(request.requested_types):
427 self._state[data_type] = request.from_timestamp
428 self._migration_versions_to_check[data_type] = 1
429 if self._state:
430 self._state[TOP_LEVEL] = min(self._state.itervalues())
432 def SummarizeRequest(self):
433 timestamps = {}
434 for data_type, timestamp in self._state.iteritems():
435 if data_type == TOP_LEVEL:
436 continue
437 timestamps.setdefault(timestamp, []).append(data_type)
438 return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types), stamp)
439 for stamp, types in sorted(timestamps.iteritems()))
441 def CheckMigrationState(self):
442 self._migration_history.CheckAllCurrent(self._migration_versions_to_check)
444 def ClientWantsItem(self, item):
445 """Return true if the client hasn't already seen an item."""
446 return self._state.get(GetEntryType(item), sys.maxint) < item.version
448 def HasAnyTimestamp(self):
449 """Return true if at least one datatype was requested."""
450 return bool(self._state)
452 def GetMinTimestamp(self):
453 """Return true the smallest timestamp requested across all datatypes."""
454 return min(self._state.itervalues())
456 def GetFirstTimeTypes(self):
457 """Return a list of datatypes requesting updates from timestamp zero."""
458 return [datatype for datatype, timestamp in self._state.iteritems()
459 if timestamp == 0]
461 def GetCreateMobileBookmarks(self):
462 """Return true if the client has requested to create the 'Mobile Bookmarks'
463 folder.
465 return (self._original_request.HasField('create_mobile_bookmarks_folder')
466 and self._original_request.create_mobile_bookmarks_folder)
468 def SaveProgress(self, new_timestamp, get_updates_response):
469 """Write the new_timestamp or new_progress_marker fields to a response."""
470 if self._original_request.from_progress_marker:
471 for data_type, old_timestamp in self._state.iteritems():
472 if data_type == TOP_LEVEL:
473 continue
474 new_marker = sync_pb2.DataTypeProgressMarker()
475 new_marker.data_type_id = SyncTypeToProtocolDataTypeId(data_type)
476 final_stamp = max(old_timestamp, new_timestamp)
477 final_migration = self._migration_history.GetLatestVersion(data_type)
478 new_marker.token = pickle.dumps((final_stamp, final_migration))
479 get_updates_response.new_progress_marker.add().MergeFrom(new_marker)
480 elif self._original_request.HasField('from_timestamp'):
481 if self._original_request.from_timestamp < new_timestamp:
482 get_updates_response.new_timestamp = new_timestamp
485 class SyncDataModel(object):
486 """Models the account state of one sync user."""
487 _BATCH_SIZE = 100
489 # Specify all the permanent items that a model might need.
490 _PERMANENT_ITEM_SPECS = [
491 PermanentItem('google_chrome_apps', name='Apps',
492 parent_tag=ROOT_ID, sync_type=APPS),
493 PermanentItem('google_chrome_app_list', name='App List',
494 parent_tag=ROOT_ID, sync_type=APP_LIST),
495 PermanentItem('google_chrome_app_notifications', name='App Notifications',
496 parent_tag=ROOT_ID, sync_type=APP_NOTIFICATION),
497 PermanentItem('google_chrome_app_settings',
498 name='App Settings',
499 parent_tag=ROOT_ID, sync_type=APP_SETTINGS),
500 PermanentItem('google_chrome_bookmarks', name='Bookmarks',
501 parent_tag=ROOT_ID, sync_type=BOOKMARK),
502 PermanentItem('bookmark_bar', name='Bookmark Bar',
503 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
504 PermanentItem('other_bookmarks', name='Other Bookmarks',
505 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
506 PermanentItem('synced_bookmarks', name='Synced Bookmarks',
507 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK,
508 create_by_default=False),
509 PermanentItem('google_chrome_autofill', name='Autofill',
510 parent_tag=ROOT_ID, sync_type=AUTOFILL),
511 PermanentItem('google_chrome_autofill_profiles', name='Autofill Profiles',
512 parent_tag=ROOT_ID, sync_type=AUTOFILL_PROFILE),
513 PermanentItem('google_chrome_device_info', name='Device Info',
514 parent_tag=ROOT_ID, sync_type=DEVICE_INFO),
515 PermanentItem('google_chrome_experiments', name='Experiments',
516 parent_tag=ROOT_ID, sync_type=EXPERIMENTS),
517 PermanentItem('google_chrome_extension_settings',
518 name='Extension Settings',
519 parent_tag=ROOT_ID, sync_type=EXTENSION_SETTINGS),
520 PermanentItem('google_chrome_extensions', name='Extensions',
521 parent_tag=ROOT_ID, sync_type=EXTENSIONS),
522 PermanentItem('google_chrome_history_delete_directives',
523 name='History Delete Directives',
524 parent_tag=ROOT_ID,
525 sync_type=HISTORY_DELETE_DIRECTIVE),
526 PermanentItem('google_chrome_favicon_images',
527 name='Favicon Images',
528 parent_tag=ROOT_ID,
529 sync_type=FAVICON_IMAGES),
530 PermanentItem('google_chrome_favicon_tracking',
531 name='Favicon Tracking',
532 parent_tag=ROOT_ID,
533 sync_type=FAVICON_TRACKING),
534 PermanentItem('google_chrome_managed_user_settings',
535 name='Managed User Settings',
536 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SETTING),
537 PermanentItem('google_chrome_managed_users',
538 name='Managed Users',
539 parent_tag=ROOT_ID, sync_type=MANAGED_USER),
540 PermanentItem('google_chrome_managed_user_shared_settings',
541 name='Managed User Shared Settings',
542 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SHARED_SETTING),
543 PermanentItem('google_chrome_nigori', name='Nigori',
544 parent_tag=ROOT_ID, sync_type=NIGORI),
545 PermanentItem('google_chrome_passwords', name='Passwords',
546 parent_tag=ROOT_ID, sync_type=PASSWORD),
547 PermanentItem('google_chrome_preferences', name='Preferences',
548 parent_tag=ROOT_ID, sync_type=PREFERENCE),
549 PermanentItem('google_chrome_priority_preferences',
550 name='Priority Preferences',
551 parent_tag=ROOT_ID, sync_type=PRIORITY_PREFERENCE),
552 PermanentItem('google_chrome_synced_notifications',
553 name='Synced Notifications',
554 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION),
555 PermanentItem('google_chrome_synced_notification_app_info',
556 name='Synced Notification App Info',
557 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION_APP_INFO),
558 PermanentItem('google_chrome_search_engines', name='Search Engines',
559 parent_tag=ROOT_ID, sync_type=SEARCH_ENGINE),
560 PermanentItem('google_chrome_sessions', name='Sessions',
561 parent_tag=ROOT_ID, sync_type=SESSION),
562 PermanentItem('google_chrome_themes', name='Themes',
563 parent_tag=ROOT_ID, sync_type=THEME),
564 PermanentItem('google_chrome_typed_urls', name='Typed URLs',
565 parent_tag=ROOT_ID, sync_type=TYPED_URL),
566 PermanentItem('google_chrome_wifi_credentials', name='WiFi Credentials',
567 parent_tag=ROOT_ID, sync_type=WIFI_CREDENTIAL),
568 PermanentItem('google_chrome_dictionary', name='Dictionary',
569 parent_tag=ROOT_ID, sync_type=DICTIONARY),
570 PermanentItem('google_chrome_articles', name='Articles',
571 parent_tag=ROOT_ID, sync_type=ARTICLE),
574 def __init__(self):
575 # Monotonically increasing version number. The next object change will
576 # take on this value + 1.
577 self._version = 0
579 # The definitive copy of this client's items: a map from ID string to a
580 # SyncEntity protocol buffer.
581 self._entries = {}
583 self.ResetStoreBirthday()
584 self.migration_history = MigrationHistory()
585 self.induced_error = sync_pb2.ClientToServerResponse.Error()
586 self.induced_error_frequency = 0
587 self.sync_count_before_errors = 0
588 self.acknowledge_managed_users = False
589 self._keys = [MakeNewKeystoreKey()]
591 def _SaveEntry(self, entry):
592 """Insert or update an entry in the change log, and give it a new version.
594 The ID fields of this entry are assumed to be valid server IDs. This
595 entry will be updated with a new version number and sync_timestamp.
597 Args:
598 entry: The entry to be added or updated.
600 self._version += 1
601 # Maintain a global (rather than per-item) sequence number and use it
602 # both as the per-entry version as well as the update-progress timestamp.
603 # This simulates the behavior of the original server implementation.
604 entry.version = self._version
605 entry.sync_timestamp = self._version
607 # Preserve the originator info, which the client is not required to send
608 # when updating.
609 base_entry = self._entries.get(entry.id_string)
610 if base_entry:
611 entry.originator_cache_guid = base_entry.originator_cache_guid
612 entry.originator_client_item_id = base_entry.originator_client_item_id
614 self._entries[entry.id_string] = copy.deepcopy(entry)
616 def _ServerTagToId(self, tag):
617 """Determine the server ID from a server-unique tag.
619 The resulting value is guaranteed not to collide with the other ID
620 generation methods.
622 Args:
623 tag: The unique, known-to-the-client tag of a server-generated item.
624 Returns:
625 The string value of the computed server ID.
627 if not tag or tag == ROOT_ID:
628 return tag
629 spec = [x for x in self._PERMANENT_ITEM_SPECS if x.tag == tag][0]
630 return self._MakeCurrentId(spec.sync_type, '<server tag>%s' % tag)
632 def _TypeToTypeRootId(self, model_type):
633 """Returns the server ID for the type root node of the given type."""
634 tag = [x.tag for x in self._PERMANENT_ITEM_SPECS
635 if x.sync_type == model_type][0]
636 return self._ServerTagToId(tag)
638 def _ClientTagToId(self, datatype, tag):
639 """Determine the server ID from a client-unique tag.
641 The resulting value is guaranteed not to collide with the other ID
642 generation methods.
644 Args:
645 datatype: The sync type (python enum) of the identified object.
646 tag: The unique, opaque-to-the-server tag of a client-tagged item.
647 Returns:
648 The string value of the computed server ID.
650 return self._MakeCurrentId(datatype, '<client tag>%s' % tag)
652 def _ClientIdToId(self, datatype, client_guid, client_item_id):
653 """Compute a unique server ID from a client-local ID tag.
655 The resulting value is guaranteed not to collide with the other ID
656 generation methods.
658 Args:
659 datatype: The sync type (python enum) of the identified object.
660 client_guid: A globally unique ID that identifies the client which
661 created this item.
662 client_item_id: An ID that uniquely identifies this item on the client
663 which created it.
664 Returns:
665 The string value of the computed server ID.
667 # Using the client ID info is not required here (we could instead generate
668 # a random ID), but it's useful for debugging.
669 return self._MakeCurrentId(datatype,
670 '<server ID originally>%s/%s' % (client_guid, client_item_id))
672 def _MakeCurrentId(self, datatype, inner_id):
673 return '%d^%d^%s' % (datatype,
674 self.migration_history.GetLatestVersion(datatype),
675 inner_id)
677 def _ExtractIdInfo(self, id_string):
678 if not id_string or id_string == ROOT_ID:
679 return None
680 datatype_string, separator, remainder = id_string.partition('^')
681 migration_version_string, separator, inner_id = remainder.partition('^')
682 return (int(datatype_string), int(migration_version_string), inner_id)
684 def _WritePosition(self, entry, parent_id):
685 """Ensure the entry has an absolute, numeric position and parent_id.
687 Historically, clients would specify positions using the predecessor-based
688 references in the insert_after_item_id field; starting July 2011, this
689 was changed and Chrome now sends up the absolute position. The server
690 must store a position_in_parent value and must not maintain
691 insert_after_item_id.
692 Starting in Jan 2013, the client will also send up a unique_position field
693 which should be saved and returned on subsequent GetUpdates.
695 Args:
696 entry: The entry for which to write a position. Its ID field are
697 assumed to be server IDs. This entry will have its parent_id_string,
698 position_in_parent and unique_position fields updated; its
699 insert_after_item_id field will be cleared.
700 parent_id: The ID of the entry intended as the new parent.
703 entry.parent_id_string = parent_id
704 if not entry.HasField('position_in_parent'):
705 entry.position_in_parent = 1337 # A debuggable, distinctive default.
706 entry.ClearField('insert_after_item_id')
708 def _ItemExists(self, id_string):
709 """Determine whether an item exists in the changelog."""
710 return id_string in self._entries
712 def _CreatePermanentItem(self, spec):
713 """Create one permanent item from its spec, if it doesn't exist.
715 The resulting item is added to the changelog.
717 Args:
718 spec: A PermanentItem object holding the properties of the item to create.
720 id_string = self._ServerTagToId(spec.tag)
721 if self._ItemExists(id_string):
722 return
723 print 'Creating permanent item: %s' % spec.name
724 entry = sync_pb2.SyncEntity()
725 entry.id_string = id_string
726 entry.non_unique_name = spec.name
727 entry.name = spec.name
728 entry.server_defined_unique_tag = spec.tag
729 entry.folder = True
730 entry.deleted = False
731 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
732 self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
733 self._SaveEntry(entry)
735 def _CreateDefaultPermanentItems(self, requested_types):
736 """Ensure creation of all default permanent items for a given set of types.
738 Args:
739 requested_types: A list of sync data types from ALL_TYPES.
740 All default permanent items of only these types will be created.
742 for spec in self._PERMANENT_ITEM_SPECS:
743 if spec.sync_type in requested_types and spec.create_by_default:
744 self._CreatePermanentItem(spec)
746 def ResetStoreBirthday(self):
747 """Resets the store birthday to a random value."""
748 # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
749 self.store_birthday = '%0.30f' % random.random()
751 def StoreBirthday(self):
752 """Gets the store birthday."""
753 return self.store_birthday
755 def GetChanges(self, sieve):
756 """Get entries which have changed, oldest first.
758 The returned entries are limited to being _BATCH_SIZE many. The entries
759 are returned in strict version order.
761 Args:
762 sieve: An update sieve to use to filter out updates the client
763 has already seen.
764 Returns:
765 A tuple of (version, entries, changes_remaining). Version is a new
766 timestamp value, which should be used as the starting point for the
767 next query. Entries is the batch of entries meeting the current
768 timestamp query. Changes_remaining indicates the number of changes
769 left on the server after this batch.
771 if not sieve.HasAnyTimestamp():
772 return (0, [], 0)
773 min_timestamp = sieve.GetMinTimestamp()
774 first_time_types = sieve.GetFirstTimeTypes()
775 self._CreateDefaultPermanentItems(first_time_types)
776 # Mobile bookmark folder is not created by default, create it only when
777 # client requested it.
778 if (sieve.GetCreateMobileBookmarks() and
779 first_time_types.count(BOOKMARK) > 0):
780 self.TriggerCreateSyncedBookmarks()
782 self.TriggerAcknowledgeManagedUsers()
784 change_log = sorted(self._entries.values(),
785 key=operator.attrgetter('version'))
786 new_changes = [x for x in change_log if x.version > min_timestamp]
787 # Pick batch_size new changes, and then filter them. This matches
788 # the RPC behavior of the production sync server.
789 batch = new_changes[:self._BATCH_SIZE]
790 if not batch:
791 # Client is up to date.
792 return (min_timestamp, [], 0)
794 # Restrict batch to requested types. Tombstones are untyped
795 # and will always get included.
796 filtered = [copy.deepcopy(item) for item in batch
797 if item.deleted or sieve.ClientWantsItem(item)]
799 # The new client timestamp is the timestamp of the last item in the
800 # batch, even if that item was filtered out.
801 return (batch[-1].version, filtered, len(new_changes) - len(batch))
803 def GetKeystoreKeys(self):
804 """Returns the encryption keys for this account."""
805 print "Returning encryption keys: %s" % self._keys
806 return self._keys
808 def _CopyOverImmutableFields(self, entry):
809 """Preserve immutable fields by copying pre-commit state.
811 Args:
812 entry: A sync entity from the client.
814 if entry.id_string in self._entries:
815 if self._entries[entry.id_string].HasField(
816 'server_defined_unique_tag'):
817 entry.server_defined_unique_tag = (
818 self._entries[entry.id_string].server_defined_unique_tag)
820 def _CheckVersionForCommit(self, entry):
821 """Perform an optimistic concurrency check on the version number.
823 Clients are only allowed to commit if they report having seen the most
824 recent version of an object.
826 Args:
827 entry: A sync entity from the client. It is assumed that ID fields
828 have been converted to server IDs.
829 Returns:
830 A boolean value indicating whether the client's version matches the
831 newest server version for the given entry.
833 if entry.id_string in self._entries:
834 # Allow edits/deletes if the version matches, and any undeletion.
835 return (self._entries[entry.id_string].version == entry.version or
836 self._entries[entry.id_string].deleted)
837 else:
838 # Allow unknown ID only if the client thinks it's new too.
839 return entry.version == 0
841 def _CheckParentIdForCommit(self, entry):
842 """Check that the parent ID referenced in a SyncEntity actually exists.
844 Args:
845 entry: A sync entity from the client. It is assumed that ID fields
846 have been converted to server IDs.
847 Returns:
848 A boolean value indicating whether the entity's parent ID is an object
849 that actually exists (and is not deleted) in the current account state.
851 if entry.parent_id_string == ROOT_ID:
852 # This is generally allowed.
853 return True
854 if (not entry.HasField('parent_id_string') and
855 entry.HasField('client_defined_unique_tag')):
856 return True # Unique client tag items do not need to specify a parent.
857 if entry.parent_id_string not in self._entries:
858 print 'Warning: Client sent unknown ID. Should never happen.'
859 return False
860 if entry.parent_id_string == entry.id_string:
861 print 'Warning: Client sent circular reference. Should never happen.'
862 return False
863 if self._entries[entry.parent_id_string].deleted:
864 # This can happen in a race condition between two clients.
865 return False
866 if not self._entries[entry.parent_id_string].folder:
867 print 'Warning: Client sent non-folder parent. Should never happen.'
868 return False
869 return True
871 def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session):
872 """Convert ID fields in a client sync entry to server IDs.
874 A commit batch sent by a client may contain new items for which the
875 server has not generated IDs yet. And within a commit batch, later
876 items are allowed to refer to earlier items. This method will
877 generate server IDs for new items, as well as rewrite references
878 to items whose server IDs were generated earlier in the batch.
880 Args:
881 entry: The client sync entry to modify.
882 cache_guid: The globally unique ID of the client that sent this
883 commit request.
884 commit_session: A dictionary mapping the original IDs to the new server
885 IDs, for any items committed earlier in the batch.
887 if entry.version == 0:
888 data_type = GetEntryType(entry)
889 if entry.HasField('client_defined_unique_tag'):
890 # When present, this should determine the item's ID.
891 new_id = self._ClientTagToId(data_type, entry.client_defined_unique_tag)
892 else:
893 new_id = self._ClientIdToId(data_type, cache_guid, entry.id_string)
894 entry.originator_cache_guid = cache_guid
895 entry.originator_client_item_id = entry.id_string
896 commit_session[entry.id_string] = new_id # Remember the remapping.
897 entry.id_string = new_id
898 if entry.parent_id_string in commit_session:
899 entry.parent_id_string = commit_session[entry.parent_id_string]
900 if entry.insert_after_item_id in commit_session:
901 entry.insert_after_item_id = commit_session[entry.insert_after_item_id]
903 def ValidateCommitEntries(self, entries):
904 """Raise an exception if a commit batch contains any global errors.
906 Arguments:
907 entries: an iterable containing commit-form SyncEntity protocol buffers.
909 Raises:
910 MigrationDoneError: if any of the entries reference a recently-migrated
911 datatype.
913 server_ids_in_commit = set()
914 local_ids_in_commit = set()
915 for entry in entries:
916 if entry.version:
917 server_ids_in_commit.add(entry.id_string)
918 else:
919 local_ids_in_commit.add(entry.id_string)
920 if entry.HasField('parent_id_string'):
921 if entry.parent_id_string not in local_ids_in_commit:
922 server_ids_in_commit.add(entry.parent_id_string)
924 versions_present = {}
925 for server_id in server_ids_in_commit:
926 parsed = self._ExtractIdInfo(server_id)
927 if parsed:
928 datatype, version, _ = parsed
929 versions_present.setdefault(datatype, []).append(version)
931 self.migration_history.CheckAllCurrent(
932 dict((k, min(v)) for k, v in versions_present.iteritems()))
934 def CommitEntry(self, entry, cache_guid, commit_session):
935 """Attempt to commit one entry to the user's account.
937 Args:
938 entry: A SyncEntity protobuf representing desired object changes.
939 cache_guid: A string value uniquely identifying the client; this
940 is used for ID generation and will determine the originator_cache_guid
941 if the entry is new.
942 commit_session: A dictionary mapping client IDs to server IDs for any
943 objects committed earlier this session. If the entry gets a new ID
944 during commit, the change will be recorded here.
945 Returns:
946 A SyncEntity reflecting the post-commit value of the entry, or None
947 if the entry was not committed due to an error.
949 entry = copy.deepcopy(entry)
951 # Generate server IDs for this entry, and write generated server IDs
952 # from earlier entries into the message's fields, as appropriate. The
953 # ID generation state is stored in 'commit_session'.
954 self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)
956 # Sets the parent ID field for a client-tagged item. The client is allowed
957 # to not specify parents for these types of items. The server can figure
958 # out on its own what the parent ID for this entry should be.
959 self._RewriteParentIdForUniqueClientEntry(entry)
961 # Perform the optimistic concurrency check on the entry's version number.
962 # Clients are not allowed to commit unless they indicate that they've seen
963 # the most recent version of an object.
964 if not self._CheckVersionForCommit(entry):
965 return None
967 # Check the validity of the parent ID; it must exist at this point.
968 # TODO(nick): Implement cycle detection and resolution.
969 if not self._CheckParentIdForCommit(entry):
970 return None
972 self._CopyOverImmutableFields(entry);
974 # At this point, the commit is definitely going to happen.
976 # Deletion works by storing a limited record for an entry, called a
977 # tombstone. A sync server must track deleted IDs forever, since it does
978 # not keep track of client knowledge (there's no deletion ACK event).
979 if entry.deleted:
980 def MakeTombstone(id_string, datatype):
981 """Make a tombstone entry that will replace the entry being deleted.
983 Args:
984 id_string: Index of the SyncEntity to be deleted.
985 Returns:
986 A new SyncEntity reflecting the fact that the entry is deleted.
988 # Only the ID, version and deletion state are preserved on a tombstone.
989 tombstone = sync_pb2.SyncEntity()
990 tombstone.id_string = id_string
991 tombstone.deleted = True
992 tombstone.name = ''
993 tombstone.specifics.CopyFrom(GetDefaultEntitySpecifics(datatype))
994 return tombstone
996 def IsChild(child_id):
997 """Check if a SyncEntity is a child of entry, or any of its children.
999 Args:
1000 child_id: Index of the SyncEntity that is a possible child of entry.
1001 Returns:
1002 True if it is a child; false otherwise.
1004 if child_id not in self._entries:
1005 return False
1006 if self._entries[child_id].parent_id_string == entry.id_string:
1007 return True
1008 return IsChild(self._entries[child_id].parent_id_string)
1010 # Identify any children entry might have.
1011 child_ids = [child.id_string for child in self._entries.itervalues()
1012 if IsChild(child.id_string)]
1014 # Mark all children that were identified as deleted.
1015 for child_id in child_ids:
1016 datatype = GetEntryType(self._entries[child_id])
1017 self._SaveEntry(MakeTombstone(child_id, datatype))
1019 # Delete entry itself.
1020 datatype = GetEntryType(self._entries[entry.id_string])
1021 entry = MakeTombstone(entry.id_string, datatype)
1022 else:
1023 # Comments in sync.proto detail how the representation of positional
1024 # ordering works.
1026 # We've almost fully deprecated the 'insert_after_item_id' field.
1027 # The 'position_in_parent' field is also deprecated, but as of Jan 2013
1028 # is still in common use. The 'unique_position' field is the latest
1029 # and greatest in positioning technology.
1031 # This server supports 'position_in_parent' and 'unique_position'.
1032 self._WritePosition(entry, entry.parent_id_string)
1034 # Preserve the originator info, which the client is not required to send
1035 # when updating.
1036 base_entry = self._entries.get(entry.id_string)
1037 if base_entry and not entry.HasField('originator_cache_guid'):
1038 entry.originator_cache_guid = base_entry.originator_cache_guid
1039 entry.originator_client_item_id = base_entry.originator_client_item_id
1041 # Store the current time since the Unix epoch in milliseconds.
1042 entry.mtime = (int((time.mktime(time.gmtime()) -
1043 (time.mktime(FIRST_DAY_UNIX_TIME_EPOCH) - ONE_DAY_SECONDS))*1000))
1045 # Commit the change. This also updates the version number.
1046 self._SaveEntry(entry)
1047 return entry
1049 def _RewriteVersionInId(self, id_string):
1050 """Rewrites an ID so that its migration version becomes current."""
1051 parsed_id = self._ExtractIdInfo(id_string)
1052 if not parsed_id:
1053 return id_string
1054 datatype, old_migration_version, inner_id = parsed_id
1055 return self._MakeCurrentId(datatype, inner_id)
1057 def _RewriteParentIdForUniqueClientEntry(self, entry):
1058 """Sets the entry's parent ID field to the appropriate value.
1060 The client must always set enough of the specifics of the entries it sends
1061 up such that the server can identify its type. (See crbug.com/373859)
1063 The client is under no obligation to set the parent ID field. The server
1064 can always infer what the appropriate parent for this model type should be.
1065 Having the client not send the parent ID is a step towards the removal of
1066 type root nodes. (See crbug.com/373869)
1068 This server implements these features by "faking" the existing of a parent
1069 ID early on in the commit processing.
1071 This function has no effect on non-client-tagged items.
1073 if not entry.HasField('client_defined_unique_tag'):
1074 return # Skip this processing for non-client-tagged types.
1075 data_type = GetEntryType(entry)
1076 entry.parent_id_string = self._TypeToTypeRootId(data_type)
1078 def TriggerMigration(self, datatypes):
1079 """Cause a migration to occur for a set of datatypes on this account.
1081 Clients will see the MIGRATION_DONE error for these datatypes until they
1082 resync them.
1084 versions_to_remap = self.migration_history.Bump(datatypes)
1085 all_entries = self._entries.values()
1086 self._entries.clear()
1087 for entry in all_entries:
1088 new_id = self._RewriteVersionInId(entry.id_string)
1089 entry.id_string = new_id
1090 if entry.HasField('parent_id_string'):
1091 entry.parent_id_string = self._RewriteVersionInId(
1092 entry.parent_id_string)
1093 self._entries[entry.id_string] = entry
1095 def TriggerSyncTabFavicons(self):
1096 """Set the 'sync_tab_favicons' field to this account's nigori node.
1098 If the field is not currently set, will write a new nigori node entry
1099 with the field set. Else does nothing.
1102 nigori_tag = "google_chrome_nigori"
1103 nigori_original = self._entries.get(self._ServerTagToId(nigori_tag))
1104 if (nigori_original.specifics.nigori.sync_tab_favicons):
1105 return
1106 nigori_new = copy.deepcopy(nigori_original)
1107 nigori_new.specifics.nigori.sync_tabs = True
1108 self._SaveEntry(nigori_new)
1110 def TriggerCreateSyncedBookmarks(self):
1111 """Create the Synced Bookmarks folder under the Bookmarks permanent item.
1113 Clients will then receive the Synced Bookmarks folder on future
1114 GetUpdates, and new bookmarks can be added within the Synced Bookmarks
1115 folder.
1118 synced_bookmarks_spec, = [spec for spec in self._PERMANENT_ITEM_SPECS
1119 if spec.name == "Synced Bookmarks"]
1120 self._CreatePermanentItem(synced_bookmarks_spec)
1122 def TriggerEnableKeystoreEncryption(self):
1123 """Create the keystore_encryption experiment entity and enable it.
1125 A new entity within the EXPERIMENTS datatype is created with the unique
1126 client tag "keystore_encryption" if it doesn't already exist. The
1127 keystore_encryption message is then filled with |enabled| set to true.
1130 experiment_id = self._ServerTagToId("google_chrome_experiments")
1131 keystore_encryption_id = self._ClientTagToId(
1132 EXPERIMENTS,
1133 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1134 keystore_entry = self._entries.get(keystore_encryption_id)
1135 if keystore_entry is None:
1136 keystore_entry = sync_pb2.SyncEntity()
1137 keystore_entry.id_string = keystore_encryption_id
1138 keystore_entry.name = "Keystore Encryption"
1139 keystore_entry.client_defined_unique_tag = (
1140 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1141 keystore_entry.folder = False
1142 keystore_entry.deleted = False
1143 keystore_entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1144 self._WritePosition(keystore_entry, experiment_id)
1146 keystore_entry.specifics.experiments.keystore_encryption.enabled = True
1148 self._SaveEntry(keystore_entry)
1150 def TriggerRotateKeystoreKeys(self):
1151 """Rotate the current set of keystore encryption keys.
1153 |self._keys| will have a new random encryption key appended to it. We touch
1154 the nigori node so that each client will receive the new encryption keys
1155 only once.
1158 # Add a new encryption key.
1159 self._keys += [MakeNewKeystoreKey(), ]
1161 # Increment the nigori node's timestamp, so clients will get the new keys
1162 # on their next GetUpdates (any time the nigori node is sent back, we also
1163 # send back the keystore keys).
1164 nigori_tag = "google_chrome_nigori"
1165 self._SaveEntry(self._entries.get(self._ServerTagToId(nigori_tag)))
1167 def TriggerAcknowledgeManagedUsers(self):
1168 """Set the "acknowledged" flag for any managed user entities that don't have
1169 it set already.
1172 if not self.acknowledge_managed_users:
1173 return
1175 managed_users = [copy.deepcopy(entry) for entry in self._entries.values()
1176 if entry.specifics.HasField('managed_user')
1177 and not entry.specifics.managed_user.acknowledged]
1178 for user in managed_users:
1179 user.specifics.managed_user.acknowledged = True
1180 self._SaveEntry(user)
1182 def TriggerEnablePreCommitGetUpdateAvoidance(self):
1183 """Sets the experiment to enable pre-commit GetUpdate avoidance."""
1184 experiment_id = self._ServerTagToId("google_chrome_experiments")
1185 pre_commit_gu_avoidance_id = self._ClientTagToId(
1186 EXPERIMENTS,
1187 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG)
1188 entry = self._entries.get(pre_commit_gu_avoidance_id)
1189 if entry is None:
1190 entry = sync_pb2.SyncEntity()
1191 entry.id_string = pre_commit_gu_avoidance_id
1192 entry.name = "Pre-commit GU avoidance"
1193 entry.client_defined_unique_tag = PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
1194 entry.folder = False
1195 entry.deleted = False
1196 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1197 self._WritePosition(entry, experiment_id)
1198 entry.specifics.experiments.pre_commit_update_avoidance.enabled = True
1199 self._SaveEntry(entry)
1201 def SetInducedError(self, error, error_frequency,
1202 sync_count_before_errors):
1203 self.induced_error = error
1204 self.induced_error_frequency = error_frequency
1205 self.sync_count_before_errors = sync_count_before_errors
1207 def GetInducedError(self):
1208 return self.induced_error
1210 def AddSyncedNotification(self, serialized_notification):
1211 """Adds a synced notification to the server data.
1213 The notification will be delivered to the client on the next GetUpdates
1214 call.
1216 Args:
1217 serialized_notification: A serialized CoalescedSyncedNotification.
1219 Returns:
1220 The string representation of the added SyncEntity.
1222 Raises:
1223 ClientNotConnectedError: if the client has not yet connected to this
1224 server
1226 # A unique string used wherever a unique ID for this notification is
1227 # required.
1228 unique_notification_id = str(uuid.uuid4())
1230 specifics = self._CreateSyncedNotificationEntitySpecifics(
1231 unique_notification_id, serialized_notification)
1233 # Create the root SyncEntity representing a single notification.
1234 entity = sync_pb2.SyncEntity()
1235 entity.specifics.CopyFrom(specifics)
1236 entity.parent_id_string = self._ServerTagToId(
1237 'google_chrome_synced_notifications')
1238 entity.name = 'Synced notification added for testing'
1239 entity.version = self._GetNextVersionNumber()
1241 entity.client_defined_unique_tag = self._CreateSyncedNotificationClientTag(
1242 specifics.synced_notification.coalesced_notification.key)
1243 entity.id_string = self._ClientTagToId(GetEntryType(entity),
1244 entity.client_defined_unique_tag)
1246 self._entries[entity.id_string] = copy.deepcopy(entity)
1248 return google.protobuf.text_format.MessageToString(entity)
1250 def _GetNextVersionNumber(self):
1251 """Set the version to one more than the greatest version number seen."""
1252 entries = sorted(self._entries.values(), key=operator.attrgetter('version'))
1253 if len(entries) < 1:
1254 raise ClientNotConnectedError
1255 return entries[-1].version + 1
1257 def _CreateSyncedNotificationEntitySpecifics(self, unique_id,
1258 serialized_notification):
1259 """Create the EntitySpecifics proto for a synced notification."""
1260 coalesced = synced_notification_data_pb2.CoalescedSyncedNotification()
1261 google.protobuf.text_format.Merge(serialized_notification, coalesced)
1263 # Override the provided key so that we have a unique one.
1264 coalesced.key = unique_id
1266 specifics = sync_pb2.EntitySpecifics()
1267 notification_specifics = \
1268 synced_notification_specifics_pb2.SyncedNotificationSpecifics()
1269 notification_specifics.coalesced_notification.CopyFrom(coalesced)
1270 specifics.synced_notification.CopyFrom(notification_specifics)
1272 return specifics
1274 def _CreateSyncedNotificationClientTag(self, key):
1275 """Create the client_defined_unique_tag value for a SyncedNotification.
1277 Args:
1278 key: The entity used to create the client tag.
1280 Returns:
1281 The string value of the to be used as the client_defined_unique_tag.
1283 serialized_type = sync_pb2.EntitySpecifics()
1284 specifics = synced_notification_specifics_pb2.SyncedNotificationSpecifics()
1285 serialized_type.synced_notification.CopyFrom(specifics)
1286 hash_input = serialized_type.SerializeToString() + key
1287 return base64.b64encode(hashlib.sha1(hash_input).digest())
1289 def AddSyncedNotificationAppInfo(self, app_info):
1290 """Adds an app info struct to the server data.
1292 The notification will be delivered to the client on the next GetUpdates
1293 call.
1295 Args:
1296 app_info: A serialized AppInfo.
1298 Returns:
1299 The string representation of the added SyncEntity.
1301 Raises:
1302 ClientNotConnectedError: if the client has not yet connected to this
1303 server
1305 specifics = self._CreateSyncedNotificationAppInfoEntitySpecifics(app_info)
1307 # Create the root SyncEntity representing a single app info protobuf.
1308 entity = sync_pb2.SyncEntity()
1309 entity.specifics.CopyFrom(specifics)
1310 entity.parent_id_string = self._ServerTagToId(
1311 'google_chrome_synced_notification_app_info')
1312 entity.name = 'App info added for testing'
1313 entity.version = self._GetNextVersionNumber()
1315 # App Infos do not have a strong id, it only needs to be unique.
1316 entity.client_defined_unique_tag = "foo"
1317 entity.id_string = "foo"
1319 self._entries[entity.id_string] = copy.deepcopy(entity)
1321 print "entity before exit is ", entity
1323 return google.protobuf.text_format.MessageToString(entity)
1325 def _CreateSyncedNotificationAppInfoEntitySpecifics(
1326 self, synced_notification_app_info):
1327 """Create the EntitySpecifics proto for a synced notification app info."""
1328 # Create a single, empty app_info object
1329 app_info = \
1330 synced_notification_app_info_specifics_pb2.SyncedNotificationAppInfo()
1331 # Fill the app_info object from the text format protobuf.
1332 google.protobuf.text_format.Merge(synced_notification_app_info, app_info)
1334 # Create a new specifics object with a contained app_info
1335 specifics = sync_pb2.EntitySpecifics()
1336 app_info_specifics = \
1337 synced_notification_app_info_specifics_pb2.\
1338 SyncedNotificationAppInfoSpecifics()
1340 # Copy the app info from the text format protobuf
1341 contained_app_info = app_info_specifics.synced_notification_app_info.add()
1342 contained_app_info.CopyFrom(app_info)
1344 # And put the new app_info_specifics into the specifics before returning.
1345 specifics.synced_notification_app_info.CopyFrom(app_info_specifics)
1347 return specifics
1349 class TestServer(object):
1350 """An object to handle requests for one (and only one) Chrome Sync account.
1352 TestServer consumes the sync command messages that are the outermost
1353 layers of the protocol, performs the corresponding actions on its
1354 SyncDataModel, and constructs an appropriate response message.
1357 def __init__(self):
1358 # The implementation supports exactly one account; its state is here.
1359 self.account = SyncDataModel()
1360 self.account_lock = threading.Lock()
1361 # Clients that have talked to us: a map from the full client ID
1362 # to its nickname.
1363 self.clients = {}
1364 self.client_name_generator = ('+' * times + chr(c)
1365 for times in xrange(0, sys.maxint) for c in xrange(ord('A'), ord('Z')))
1366 self.transient_error = False
1367 self.sync_count = 0
1368 # Gaia OAuth2 Token fields and their default values.
1369 self.response_code = 200
1370 self.request_token = 'rt1'
1371 self.access_token = 'at1'
1372 self.expires_in = 3600
1373 self.token_type = 'Bearer'
1374 # The ClientCommand to send back on each ServerToClientResponse. If set to
1375 # None, no ClientCommand should be sent.
1376 self._client_command = None
1379 def GetShortClientName(self, query):
1380 parsed = cgi.parse_qs(query[query.find('?')+1:])
1381 client_id = parsed.get('client_id')
1382 if not client_id:
1383 return '?'
1384 client_id = client_id[0]
1385 if client_id not in self.clients:
1386 self.clients[client_id] = self.client_name_generator.next()
1387 return self.clients[client_id]
1389 def CheckStoreBirthday(self, request):
1390 """Raises StoreBirthdayError if the request's birthday is a mismatch."""
1391 if not request.HasField('store_birthday'):
1392 return
1393 if self.account.StoreBirthday() != request.store_birthday:
1394 raise StoreBirthdayError
1396 def CheckTransientError(self):
1397 """Raises TransientError if transient_error variable is set."""
1398 if self.transient_error:
1399 raise TransientError
1401 def CheckSendError(self):
1402 """Raises SyncInducedError if needed."""
1403 if (self.account.induced_error.error_type !=
1404 sync_enums_pb2.SyncEnums.UNKNOWN):
1405 # Always means return the given error for all requests.
1406 if self.account.induced_error_frequency == ERROR_FREQUENCY_ALWAYS:
1407 raise SyncInducedError
1408 # This means the FIRST 2 requests of every 3 requests
1409 # return an error. Don't switch the order of failures. There are
1410 # test cases that rely on the first 2 being the failure rather than
1411 # the last 2.
1412 elif (self.account.induced_error_frequency ==
1413 ERROR_FREQUENCY_TWO_THIRDS):
1414 if (((self.sync_count -
1415 self.account.sync_count_before_errors) % 3) != 0):
1416 raise SyncInducedError
1417 else:
1418 raise InducedErrorFrequencyNotDefined
1420 def HandleMigrate(self, path):
1421 query = urlparse.urlparse(path)[4]
1422 code = 200
1423 self.account_lock.acquire()
1424 try:
1425 datatypes = [DataTypeStringToSyncTypeLoose(x)
1426 for x in urlparse.parse_qs(query).get('type',[])]
1427 if datatypes:
1428 self.account.TriggerMigration(datatypes)
1429 response = 'Migrated datatypes %s' % (
1430 ' and '.join(SyncTypeToString(x).upper() for x in datatypes))
1431 else:
1432 response = 'Please specify one or more <i>type=name</i> parameters'
1433 code = 400
1434 except DataTypeIdNotRecognized, error:
1435 response = 'Could not interpret datatype name'
1436 code = 400
1437 finally:
1438 self.account_lock.release()
1439 return (code, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
1440 (code, code, response))
1442 def HandleSetInducedError(self, path):
1443 query = urlparse.urlparse(path)[4]
1444 self.account_lock.acquire()
1445 code = 200
1446 response = 'Success'
1447 error = sync_pb2.ClientToServerResponse.Error()
1448 try:
1449 error_type = urlparse.parse_qs(query)['error']
1450 action = urlparse.parse_qs(query)['action']
1451 error.error_type = int(error_type[0])
1452 error.action = int(action[0])
1453 try:
1454 error.url = (urlparse.parse_qs(query)['url'])[0]
1455 except KeyError:
1456 error.url = ''
1457 try:
1458 error.error_description =(
1459 (urlparse.parse_qs(query)['error_description'])[0])
1460 except KeyError:
1461 error.error_description = ''
1462 try:
1463 error_frequency = int((urlparse.parse_qs(query)['frequency'])[0])
1464 except KeyError:
1465 error_frequency = ERROR_FREQUENCY_ALWAYS
1466 self.account.SetInducedError(error, error_frequency, self.sync_count)
1467 response = ('Error = %d, action = %d, url = %s, description = %s' %
1468 (error.error_type, error.action,
1469 error.url,
1470 error.error_description))
1471 except error:
1472 response = 'Could not parse url'
1473 code = 400
1474 finally:
1475 self.account_lock.release()
1476 return (code, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
1477 (code, code, response))
1479 def HandleCreateBirthdayError(self):
1480 self.account.ResetStoreBirthday()
1481 return (
1482 200,
1483 '<html><title>Birthday error</title><H1>Birthday error</H1></html>')
1485 def HandleSetTransientError(self):
1486 self.transient_error = True
1487 return (
1488 200,
1489 '<html><title>Transient error</title><H1>Transient error</H1></html>')
1491 def HandleSetSyncTabFavicons(self):
1492 """Set 'sync_tab_favicons' field of the nigori node for this account."""
1493 self.account.TriggerSyncTabFavicons()
1494 return (
1495 200,
1496 '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
1498 def HandleCreateSyncedBookmarks(self):
1499 """Create the Synced Bookmarks folder under Bookmarks."""
1500 self.account.TriggerCreateSyncedBookmarks()
1501 return (
1502 200,
1503 '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
1505 def HandleEnableKeystoreEncryption(self):
1506 """Enables the keystore encryption experiment."""
1507 self.account.TriggerEnableKeystoreEncryption()
1508 return (
1509 200,
1510 '<html><title>Enable Keystore Encryption</title>'
1511 '<H1>Enable Keystore Encryption</H1></html>')
1513 def HandleRotateKeystoreKeys(self):
1514 """Rotate the keystore encryption keys."""
1515 self.account.TriggerRotateKeystoreKeys()
1516 return (
1517 200,
1518 '<html><title>Rotate Keystore Keys</title>'
1519 '<H1>Rotate Keystore Keys</H1></html>')
1521 def HandleEnableManagedUserAcknowledgement(self):
1522 """Enable acknowledging newly created managed users."""
1523 self.account.acknowledge_managed_users = True
1524 return (
1525 200,
1526 '<html><title>Enable Managed User Acknowledgement</title>'
1527 '<h1>Enable Managed User Acknowledgement</h1></html>')
1529 def HandleEnablePreCommitGetUpdateAvoidance(self):
1530 """Enables the pre-commit GU avoidance experiment."""
1531 self.account.TriggerEnablePreCommitGetUpdateAvoidance()
1532 return (
1533 200,
1534 '<html><title>Enable pre-commit GU avoidance</title>'
1535 '<H1>Enable pre-commit GU avoidance</H1></html>')
1537 def HandleCommand(self, query, raw_request):
1538 """Decode and handle a sync command from a raw input of bytes.
1540 This is the main entry point for this class. It is safe to call this
1541 method from multiple threads.
1543 Args:
1544 raw_request: An iterable byte sequence to be interpreted as a sync
1545 protocol command.
1546 Returns:
1547 A tuple (response_code, raw_response); the first value is an HTTP
1548 result code, while the second value is a string of bytes which is the
1549 serialized reply to the command.
1551 self.account_lock.acquire()
1552 self.sync_count += 1
1553 def print_context(direction):
1554 print '[Client %s %s %s.py]' % (self.GetShortClientName(query), direction,
1555 __name__),
1557 try:
1558 request = sync_pb2.ClientToServerMessage()
1559 request.MergeFromString(raw_request)
1560 contents = request.message_contents
1562 response = sync_pb2.ClientToServerResponse()
1563 response.error_code = sync_enums_pb2.SyncEnums.SUCCESS
1565 if self._client_command:
1566 response.client_command.CopyFrom(self._client_command)
1568 self.CheckStoreBirthday(request)
1569 response.store_birthday = self.account.store_birthday
1570 self.CheckTransientError()
1571 self.CheckSendError()
1573 print_context('->')
1575 if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE:
1576 print 'Authenticate'
1577 # We accept any authentication token, and support only one account.
1578 # TODO(nick): Mock out the GAIA authentication as well; hook up here.
1579 response.authenticate.user.email = 'syncjuser@chromium'
1580 response.authenticate.user.display_name = 'Sync J User'
1581 elif contents == sync_pb2.ClientToServerMessage.COMMIT:
1582 print 'Commit %d item(s)' % len(request.commit.entries)
1583 self.HandleCommit(request.commit, response.commit)
1584 elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES:
1585 print 'GetUpdates',
1586 self.HandleGetUpdates(request.get_updates, response.get_updates)
1587 print_context('<-')
1588 print '%d update(s)' % len(response.get_updates.entries)
1589 else:
1590 print 'Unrecognizable sync request!'
1591 return (400, None) # Bad request.
1592 return (200, response.SerializeToString())
1593 except MigrationDoneError, error:
1594 print_context('<-')
1595 print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error.datatypes))
1596 response = sync_pb2.ClientToServerResponse()
1597 response.store_birthday = self.account.store_birthday
1598 response.error_code = sync_enums_pb2.SyncEnums.MIGRATION_DONE
1599 response.migrated_data_type_id[:] = [
1600 SyncTypeToProtocolDataTypeId(x) for x in error.datatypes]
1601 return (200, response.SerializeToString())
1602 except StoreBirthdayError, error:
1603 print_context('<-')
1604 print 'NOT_MY_BIRTHDAY'
1605 response = sync_pb2.ClientToServerResponse()
1606 response.store_birthday = self.account.store_birthday
1607 response.error_code = sync_enums_pb2.SyncEnums.NOT_MY_BIRTHDAY
1608 return (200, response.SerializeToString())
1609 except TransientError, error:
1610 ### This is deprecated now. Would be removed once test cases are removed.
1611 print_context('<-')
1612 print 'TRANSIENT_ERROR'
1613 response.store_birthday = self.account.store_birthday
1614 response.error_code = sync_enums_pb2.SyncEnums.TRANSIENT_ERROR
1615 return (200, response.SerializeToString())
1616 except SyncInducedError, error:
1617 print_context('<-')
1618 print 'INDUCED_ERROR'
1619 response.store_birthday = self.account.store_birthday
1620 error = self.account.GetInducedError()
1621 response.error.error_type = error.error_type
1622 response.error.url = error.url
1623 response.error.error_description = error.error_description
1624 response.error.action = error.action
1625 return (200, response.SerializeToString())
1626 finally:
1627 self.account_lock.release()
1629 def HandleCommit(self, commit_message, commit_response):
1630 """Respond to a Commit request by updating the user's account state.
1632 Commit attempts stop after the first error, returning a CONFLICT result
1633 for any unattempted entries.
1635 Args:
1636 commit_message: A sync_pb.CommitMessage protobuf holding the content
1637 of the client's request.
1638 commit_response: A sync_pb.CommitResponse protobuf into which a reply
1639 to the client request will be written.
1641 commit_response.SetInParent()
1642 batch_failure = False
1643 session = {} # Tracks ID renaming during the commit operation.
1644 guid = commit_message.cache_guid
1646 self.account.ValidateCommitEntries(commit_message.entries)
1648 for entry in commit_message.entries:
1649 server_entry = None
1650 if not batch_failure:
1651 # Try to commit the change to the account.
1652 server_entry = self.account.CommitEntry(entry, guid, session)
1654 # An entryresponse is returned in both success and failure cases.
1655 reply = commit_response.entryresponse.add()
1656 if not server_entry:
1657 reply.response_type = sync_pb2.CommitResponse.CONFLICT
1658 reply.error_message = 'Conflict.'
1659 batch_failure = True # One failure halts the batch.
1660 else:
1661 reply.response_type = sync_pb2.CommitResponse.SUCCESS
1662 # These are the properties that the server is allowed to override
1663 # during commit; the client wants to know their values at the end
1664 # of the operation.
1665 reply.id_string = server_entry.id_string
1666 if not server_entry.deleted:
1667 # Note: the production server doesn't actually send the
1668 # parent_id_string on commit responses, so we don't either.
1669 reply.position_in_parent = server_entry.position_in_parent
1670 reply.version = server_entry.version
1671 reply.name = server_entry.name
1672 reply.non_unique_name = server_entry.non_unique_name
1673 else:
1674 reply.version = entry.version + 1
1676 def HandleGetUpdates(self, update_request, update_response):
1677 """Respond to a GetUpdates request by querying the user's account.
1679 Args:
1680 update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
1681 of the client's request.
1682 update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
1683 to the client request will be written.
1685 update_response.SetInParent()
1686 update_sieve = UpdateSieve(update_request, self.account.migration_history)
1688 print CallerInfoToString(update_request.caller_info.source),
1689 print update_sieve.SummarizeRequest()
1691 update_sieve.CheckMigrationState()
1693 new_timestamp, entries, remaining = self.account.GetChanges(update_sieve)
1695 update_response.changes_remaining = remaining
1696 sending_nigori_node = False
1697 for entry in entries:
1698 if entry.name == 'Nigori':
1699 sending_nigori_node = True
1700 reply = update_response.entries.add()
1701 reply.CopyFrom(entry)
1702 update_sieve.SaveProgress(new_timestamp, update_response)
1704 if update_request.need_encryption_key or sending_nigori_node:
1705 update_response.encryption_keys.extend(self.account.GetKeystoreKeys())
1707 def HandleGetOauth2Token(self):
1708 return (int(self.response_code),
1709 '{\n'
1710 ' \"refresh_token\": \"' + self.request_token + '\",\n'
1711 ' \"access_token\": \"' + self.access_token + '\",\n'
1712 ' \"expires_in\": ' + str(self.expires_in) + ',\n'
1713 ' \"token_type\": \"' + self.token_type +'\"\n'
1714 '}')
1716 def HandleSetOauth2Token(self, response_code, request_token, access_token,
1717 expires_in, token_type):
1718 if response_code != 0:
1719 self.response_code = response_code
1720 if request_token != '':
1721 self.request_token = request_token
1722 if access_token != '':
1723 self.access_token = access_token
1724 if expires_in != 0:
1725 self.expires_in = expires_in
1726 if token_type != '':
1727 self.token_type = token_type
1729 return (200,
1730 '<html><title>Set OAuth2 Token</title>'
1731 '<H1>This server will now return the OAuth2 Token:</H1>'
1732 '<p>response_code: ' + str(self.response_code) + '</p>'
1733 '<p>request_token: ' + self.request_token + '</p>'
1734 '<p>access_token: ' + self.access_token + '</p>'
1735 '<p>expires_in: ' + str(self.expires_in) + '</p>'
1736 '<p>token_type: ' + self.token_type + '</p>'
1737 '</html>')
1739 def CustomizeClientCommand(self, sessions_commit_delay_seconds):
1740 """Customizes the value of the ClientCommand of ServerToClientResponse.
1742 Currently, this only allows for changing the sessions_commit_delay_seconds
1743 field. This is useful for testing in conjunction with
1744 AddSyncedNotification so that synced notifications are seen immediately
1745 after triggering them with an HTTP call to the test server.
1747 Args:
1748 sessions_commit_delay_seconds: The desired sync delay time for sessions.
1750 if not self._client_command:
1751 self._client_command = client_commands_pb2.ClientCommand()
1753 self._client_command.sessions_commit_delay_seconds = \
1754 sessions_commit_delay_seconds
1755 return self._client_command