Don't show supervised user as "already on this device" while they're being imported.
[chromium-blink-merge.git] / sync / tools / testserver / chromiumsync.py
blob459f1e10ed3d765dbee8ff050641b54f97bfb2eb
1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """An implementation of the server side of the Chromium sync protocol.
7 The details of the protocol are described mostly by comments in the protocol
8 buffer definition at chrome/browser/sync/protocol/sync.proto.
9 """
11 import base64
12 import cgi
13 import copy
14 import google.protobuf.text_format
15 import hashlib
16 import operator
17 import pickle
18 import random
19 import string
20 import sys
21 import threading
22 import time
23 import urlparse
24 import uuid
26 import app_list_specifics_pb2
27 import app_notification_specifics_pb2
28 import app_setting_specifics_pb2
29 import app_specifics_pb2
30 import article_specifics_pb2
31 import autofill_specifics_pb2
32 import bookmark_specifics_pb2
33 import client_commands_pb2
34 import dictionary_specifics_pb2
35 import get_updates_caller_info_pb2
36 import extension_setting_specifics_pb2
37 import extension_specifics_pb2
38 import favicon_image_specifics_pb2
39 import favicon_tracking_specifics_pb2
40 import history_delete_directive_specifics_pb2
41 import managed_user_setting_specifics_pb2
42 import managed_user_specifics_pb2
43 import managed_user_shared_setting_specifics_pb2
44 import managed_user_whitelist_specifics_pb2
45 import nigori_specifics_pb2
46 import password_specifics_pb2
47 import preference_specifics_pb2
48 import priority_preference_specifics_pb2
49 import search_engine_specifics_pb2
50 import session_specifics_pb2
51 import sync_pb2
52 import sync_enums_pb2
53 import synced_notification_app_info_specifics_pb2
54 import synced_notification_specifics_pb2
55 import theme_specifics_pb2
56 import typed_url_specifics_pb2
57 import wifi_credential_specifics_pb2
59 # An enumeration of the various kinds of data that can be synced.
60 # Over the wire, this enumeration is not used: a sync object's type is
61 # inferred by which EntitySpecifics field it has. But in the context
62 # of a program, it is useful to have an enumeration.
63 ALL_TYPES = (
64 TOP_LEVEL, # The type of the 'Google Chrome' folder.
65 APPS,
66 APP_LIST,
67 APP_NOTIFICATION,
68 APP_SETTINGS,
69 ARTICLE,
70 AUTOFILL,
71 AUTOFILL_PROFILE,
72 AUTOFILL_WALLET_METADATA,
73 BOOKMARK,
74 DEVICE_INFO,
75 DICTIONARY,
76 EXPERIMENTS,
77 EXTENSIONS,
78 HISTORY_DELETE_DIRECTIVE,
79 MANAGED_USER_SETTING,
80 MANAGED_USER_SHARED_SETTING,
81 MANAGED_USER_WHITELIST,
82 MANAGED_USER,
83 NIGORI,
84 PASSWORD,
85 PREFERENCE,
86 PRIORITY_PREFERENCE,
87 SEARCH_ENGINE,
88 SESSION,
89 SYNCED_NOTIFICATION,
90 SYNCED_NOTIFICATION_APP_INFO,
91 THEME,
92 TYPED_URL,
93 EXTENSION_SETTINGS,
94 FAVICON_IMAGES,
95 FAVICON_TRACKING,
96 WIFI_CREDENTIAL) = range(33)
98 # An enumeration on the frequency at which the server should send errors
99 # to the client. This would be specified by the url that triggers the error.
100 # Note: This enum should be kept in the same order as the enum in sync_test.h.
101 SYNC_ERROR_FREQUENCY = (
102 ERROR_FREQUENCY_NONE,
103 ERROR_FREQUENCY_ALWAYS,
104 ERROR_FREQUENCY_TWO_THIRDS) = range(3)
106 # Well-known server tag of the top level 'Google Chrome' folder.
107 TOP_LEVEL_FOLDER_TAG = 'google_chrome'
109 # Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
110 # to that datatype. Note that TOP_LEVEL has no such token.
111 SYNC_TYPE_FIELDS = sync_pb2.EntitySpecifics.DESCRIPTOR.fields_by_name
112 SYNC_TYPE_TO_DESCRIPTOR = {
113 APP_LIST: SYNC_TYPE_FIELDS['app_list'],
114 APP_NOTIFICATION: SYNC_TYPE_FIELDS['app_notification'],
115 APP_SETTINGS: SYNC_TYPE_FIELDS['app_setting'],
116 APPS: SYNC_TYPE_FIELDS['app'],
117 ARTICLE: SYNC_TYPE_FIELDS['article'],
118 AUTOFILL: SYNC_TYPE_FIELDS['autofill'],
119 AUTOFILL_PROFILE: SYNC_TYPE_FIELDS['autofill_profile'],
120 AUTOFILL_WALLET_METADATA: SYNC_TYPE_FIELDS['wallet_metadata'],
121 BOOKMARK: SYNC_TYPE_FIELDS['bookmark'],
122 DEVICE_INFO: SYNC_TYPE_FIELDS['device_info'],
123 DICTIONARY: SYNC_TYPE_FIELDS['dictionary'],
124 EXPERIMENTS: SYNC_TYPE_FIELDS['experiments'],
125 EXTENSION_SETTINGS: SYNC_TYPE_FIELDS['extension_setting'],
126 EXTENSIONS: SYNC_TYPE_FIELDS['extension'],
127 FAVICON_IMAGES: SYNC_TYPE_FIELDS['favicon_image'],
128 FAVICON_TRACKING: SYNC_TYPE_FIELDS['favicon_tracking'],
129 HISTORY_DELETE_DIRECTIVE: SYNC_TYPE_FIELDS['history_delete_directive'],
130 MANAGED_USER_SHARED_SETTING:
131 SYNC_TYPE_FIELDS['managed_user_shared_setting'],
132 MANAGED_USER_SETTING: SYNC_TYPE_FIELDS['managed_user_setting'],
133 MANAGED_USER_WHITELIST: SYNC_TYPE_FIELDS['managed_user_whitelist'],
134 MANAGED_USER: SYNC_TYPE_FIELDS['managed_user'],
135 NIGORI: SYNC_TYPE_FIELDS['nigori'],
136 PASSWORD: SYNC_TYPE_FIELDS['password'],
137 PREFERENCE: SYNC_TYPE_FIELDS['preference'],
138 PRIORITY_PREFERENCE: SYNC_TYPE_FIELDS['priority_preference'],
139 SEARCH_ENGINE: SYNC_TYPE_FIELDS['search_engine'],
140 SESSION: SYNC_TYPE_FIELDS['session'],
141 SYNCED_NOTIFICATION: SYNC_TYPE_FIELDS["synced_notification"],
142 SYNCED_NOTIFICATION_APP_INFO:
143 SYNC_TYPE_FIELDS["synced_notification_app_info"],
144 THEME: SYNC_TYPE_FIELDS['theme'],
145 TYPED_URL: SYNC_TYPE_FIELDS['typed_url'],
146 WIFI_CREDENTIAL: SYNC_TYPE_FIELDS["wifi_credential"],
149 # The parent ID used to indicate a top-level node.
150 ROOT_ID = '0'
152 # Unix time epoch +1 day in struct_time format. The tuple corresponds to
153 # UTC Thursday Jan 2 1970, 00:00:00, non-dst.
154 # We have to add one day after start of epoch, since in timezones with positive
155 # UTC offset time.mktime throws an OverflowError,
156 # rather then returning negative number.
157 FIRST_DAY_UNIX_TIME_EPOCH = (1970, 1, 2, 0, 0, 0, 4, 2, 0)
158 ONE_DAY_SECONDS = 60 * 60 * 24
160 # The number of characters in the server-generated encryption key.
161 KEYSTORE_KEY_LENGTH = 16
163 # The hashed client tags for some experiment nodes.
164 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG = "pis8ZRzh98/MKLtVEio2mr42LQA="
165 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG = "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
167 class Error(Exception):
168 """Error class for this module."""
171 class ProtobufDataTypeFieldNotUnique(Error):
172 """An entry should not have more than one data type present."""
175 class DataTypeIdNotRecognized(Error):
176 """The requested data type is not recognized."""
179 class MigrationDoneError(Error):
180 """A server-side migration occurred; clients must re-sync some datatypes.
182 Attributes:
183 datatypes: a list of the datatypes (python enum) needing migration.
186 def __init__(self, datatypes):
187 self.datatypes = datatypes
190 class StoreBirthdayError(Error):
191 """The client sent a birthday that doesn't correspond to this server."""
194 class TransientError(Error):
195 """The client would be sent a transient error."""
198 class SyncInducedError(Error):
199 """The client would be sent an error."""
202 class InducedErrorFrequencyNotDefined(Error):
203 """The error frequency defined is not handled."""
206 class ClientNotConnectedError(Error):
207 """The client is not connected to the server."""
210 def GetEntryType(entry):
211 """Extract the sync type from a SyncEntry.
213 Args:
214 entry: A SyncEntity protobuf object whose type to determine.
215 Returns:
216 An enum value from ALL_TYPES if the entry's type can be determined, or None
217 if the type cannot be determined.
218 Raises:
219 ProtobufDataTypeFieldNotUnique: More than one type was indicated by
220 the entry.
222 if entry.server_defined_unique_tag == TOP_LEVEL_FOLDER_TAG:
223 return TOP_LEVEL
224 entry_types = GetEntryTypesFromSpecifics(entry.specifics)
225 if not entry_types:
226 return None
228 # If there is more than one, either there's a bug, or else the caller
229 # should use GetEntryTypes.
230 if len(entry_types) > 1:
231 raise ProtobufDataTypeFieldNotUnique
232 return entry_types[0]
235 def GetEntryTypesFromSpecifics(specifics):
236 """Determine the sync types indicated by an EntitySpecifics's field(s).
238 If the specifics have more than one recognized data type field (as commonly
239 happens with the requested_types field of GetUpdatesMessage), all types
240 will be returned. Callers must handle the possibility of the returned
241 value having more than one item.
243 Args:
244 specifics: A EntitySpecifics protobuf message whose extensions to
245 enumerate.
246 Returns:
247 A list of the sync types (values from ALL_TYPES) associated with each
248 recognized extension of the specifics message.
250 return [data_type for data_type, field_descriptor
251 in SYNC_TYPE_TO_DESCRIPTOR.iteritems()
252 if specifics.HasField(field_descriptor.name)]
255 def SyncTypeToProtocolDataTypeId(data_type):
256 """Convert from a sync type (python enum) to the protocol's data type id."""
257 return SYNC_TYPE_TO_DESCRIPTOR[data_type].number
260 def ProtocolDataTypeIdToSyncType(protocol_data_type_id):
261 """Convert from the protocol's data type id to a sync type (python enum)."""
262 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
263 if field_descriptor.number == protocol_data_type_id:
264 return data_type
265 raise DataTypeIdNotRecognized
268 def DataTypeStringToSyncTypeLoose(data_type_string):
269 """Converts a human-readable string to a sync type (python enum).
271 Capitalization and pluralization don't matter; this function is appropriate
272 for values that might have been typed by a human being; e.g., command-line
273 flags or query parameters.
275 if data_type_string.isdigit():
276 return ProtocolDataTypeIdToSyncType(int(data_type_string))
277 name = data_type_string.lower().rstrip('s')
278 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
279 if field_descriptor.name.lower().rstrip('s') == name:
280 return data_type
281 raise DataTypeIdNotRecognized
284 def MakeNewKeystoreKey():
285 """Returns a new random keystore key."""
286 return ''.join(random.choice(string.ascii_uppercase + string.digits)
287 for x in xrange(KEYSTORE_KEY_LENGTH))
290 def SyncTypeToString(data_type):
291 """Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
292 return SYNC_TYPE_TO_DESCRIPTOR[data_type].name
295 def CallerInfoToString(caller_info_source):
296 """Formats a GetUpdatesSource enum value to a readable string."""
297 return get_updates_caller_info_pb2.GetUpdatesCallerInfo \
298 .DESCRIPTOR.enum_types_by_name['GetUpdatesSource'] \
299 .values_by_number[caller_info_source].name
302 def ShortDatatypeListSummary(data_types):
303 """Formats compactly a list of sync types (python enums) for human eyes.
305 This function is intended for use by logging. If the list of datatypes
306 contains almost all of the values, the return value will be expressed
307 in terms of the datatypes that aren't set.
309 included = set(data_types) - set([TOP_LEVEL])
310 if not included:
311 return 'nothing'
312 excluded = set(ALL_TYPES) - included - set([TOP_LEVEL])
313 if not excluded:
314 return 'everything'
315 simple_text = '+'.join(sorted([SyncTypeToString(x) for x in included]))
316 all_but_text = 'all except %s' % (
317 '+'.join(sorted([SyncTypeToString(x) for x in excluded])))
318 if len(included) < len(excluded) or len(simple_text) <= len(all_but_text):
319 return simple_text
320 else:
321 return all_but_text
324 def GetDefaultEntitySpecifics(data_type):
325 """Get an EntitySpecifics having a sync type's default field value."""
326 specifics = sync_pb2.EntitySpecifics()
327 if data_type in SYNC_TYPE_TO_DESCRIPTOR:
328 descriptor = SYNC_TYPE_TO_DESCRIPTOR[data_type]
329 getattr(specifics, descriptor.name).SetInParent()
330 return specifics
333 class PermanentItem(object):
334 """A specification of one server-created permanent item.
336 Attributes:
337 tag: A known-to-the-client value that uniquely identifies a server-created
338 permanent item.
339 name: The human-readable display name for this item.
340 parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates
341 a top-level item. Otherwise, this must be the tag value of some other
342 server-created permanent item.
343 sync_type: A value from ALL_TYPES, giving the datatype of this permanent
344 item. This controls which types of client GetUpdates requests will
345 cause the permanent item to be created and returned.
346 create_by_default: Whether the permanent item is created at startup or not.
347 This value is set to True in the default case. Non-default permanent items
348 are those that are created only when a client explicitly tells the server
349 to do so.
352 def __init__(self, tag, name, parent_tag, sync_type, create_by_default=True):
353 self.tag = tag
354 self.name = name
355 self.parent_tag = parent_tag
356 self.sync_type = sync_type
357 self.create_by_default = create_by_default
360 class MigrationHistory(object):
361 """A record of the migration events associated with an account.
363 Each migration event invalidates one or more datatypes on all clients
364 that had synced the datatype before the event. Such clients will continue
365 to receive MigrationDone errors until they throw away their progress and
366 re-sync that datatype from the beginning.
368 def __init__(self):
369 self._migrations = {}
370 for datatype in ALL_TYPES:
371 self._migrations[datatype] = [1]
372 self._next_migration_version = 2
374 def GetLatestVersion(self, datatype):
375 return self._migrations[datatype][-1]
377 def CheckAllCurrent(self, versions_map):
378 """Raises an error if any the provided versions are out of date.
380 This function intentionally returns migrations in the order that they were
381 triggered. Doing it this way allows the client to queue up two migrations
382 in a row, so the second one is received while responding to the first.
384 Arguments:
385 version_map: a map whose keys are datatypes and whose values are versions.
387 Raises:
388 MigrationDoneError: if a mismatch is found.
390 problems = {}
391 for datatype, client_migration in versions_map.iteritems():
392 for server_migration in self._migrations[datatype]:
393 if client_migration < server_migration:
394 problems.setdefault(server_migration, []).append(datatype)
395 if problems:
396 raise MigrationDoneError(problems[min(problems.keys())])
398 def Bump(self, datatypes):
399 """Add a record of a migration, to cause errors on future requests."""
400 for idx, datatype in enumerate(datatypes):
401 self._migrations[datatype].append(self._next_migration_version)
402 self._next_migration_version += 1
405 class UpdateSieve(object):
406 """A filter to remove items the client has already seen."""
407 def __init__(self, request, migration_history=None):
408 self._original_request = request
409 self._state = {}
410 self._migration_history = migration_history or MigrationHistory()
411 self._migration_versions_to_check = {}
412 if request.from_progress_marker:
413 for marker in request.from_progress_marker:
414 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
415 if marker.HasField('timestamp_token_for_migration'):
416 timestamp = marker.timestamp_token_for_migration
417 if timestamp:
418 self._migration_versions_to_check[data_type] = 1
419 elif marker.token:
420 (timestamp, version) = pickle.loads(marker.token)
421 self._migration_versions_to_check[data_type] = version
422 elif marker.HasField('token'):
423 timestamp = 0
424 else:
425 raise ValueError('No timestamp information in progress marker.')
426 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
427 self._state[data_type] = timestamp
428 elif request.HasField('from_timestamp'):
429 for data_type in GetEntryTypesFromSpecifics(request.requested_types):
430 self._state[data_type] = request.from_timestamp
431 self._migration_versions_to_check[data_type] = 1
432 if self._state:
433 self._state[TOP_LEVEL] = min(self._state.itervalues())
435 def SummarizeRequest(self):
436 timestamps = {}
437 for data_type, timestamp in self._state.iteritems():
438 if data_type == TOP_LEVEL:
439 continue
440 timestamps.setdefault(timestamp, []).append(data_type)
441 return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types), stamp)
442 for stamp, types in sorted(timestamps.iteritems()))
444 def CheckMigrationState(self):
445 self._migration_history.CheckAllCurrent(self._migration_versions_to_check)
447 def ClientWantsItem(self, item):
448 """Return true if the client hasn't already seen an item."""
449 return self._state.get(GetEntryType(item), sys.maxint) < item.version
451 def HasAnyTimestamp(self):
452 """Return true if at least one datatype was requested."""
453 return bool(self._state)
455 def GetMinTimestamp(self):
456 """Return true the smallest timestamp requested across all datatypes."""
457 return min(self._state.itervalues())
459 def GetFirstTimeTypes(self):
460 """Return a list of datatypes requesting updates from timestamp zero."""
461 return [datatype for datatype, timestamp in self._state.iteritems()
462 if timestamp == 0]
464 def GetCreateMobileBookmarks(self):
465 """Return true if the client has requested to create the 'Mobile Bookmarks'
466 folder.
468 return (self._original_request.HasField('create_mobile_bookmarks_folder')
469 and self._original_request.create_mobile_bookmarks_folder)
471 def SaveProgress(self, new_timestamp, get_updates_response):
472 """Write the new_timestamp or new_progress_marker fields to a response."""
473 if self._original_request.from_progress_marker:
474 for data_type, old_timestamp in self._state.iteritems():
475 if data_type == TOP_LEVEL:
476 continue
477 new_marker = sync_pb2.DataTypeProgressMarker()
478 new_marker.data_type_id = SyncTypeToProtocolDataTypeId(data_type)
479 final_stamp = max(old_timestamp, new_timestamp)
480 final_migration = self._migration_history.GetLatestVersion(data_type)
481 new_marker.token = pickle.dumps((final_stamp, final_migration))
482 get_updates_response.new_progress_marker.add().MergeFrom(new_marker)
483 elif self._original_request.HasField('from_timestamp'):
484 if self._original_request.from_timestamp < new_timestamp:
485 get_updates_response.new_timestamp = new_timestamp
488 class SyncDataModel(object):
489 """Models the account state of one sync user."""
490 _BATCH_SIZE = 100
492 # Specify all the permanent items that a model might need.
493 _PERMANENT_ITEM_SPECS = [
494 PermanentItem('google_chrome_apps', name='Apps',
495 parent_tag=ROOT_ID, sync_type=APPS),
496 PermanentItem('google_chrome_app_list', name='App List',
497 parent_tag=ROOT_ID, sync_type=APP_LIST),
498 PermanentItem('google_chrome_app_notifications', name='App Notifications',
499 parent_tag=ROOT_ID, sync_type=APP_NOTIFICATION),
500 PermanentItem('google_chrome_app_settings',
501 name='App Settings',
502 parent_tag=ROOT_ID, sync_type=APP_SETTINGS),
503 PermanentItem('google_chrome_bookmarks', name='Bookmarks',
504 parent_tag=ROOT_ID, sync_type=BOOKMARK),
505 PermanentItem('bookmark_bar', name='Bookmark Bar',
506 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
507 PermanentItem('other_bookmarks', name='Other Bookmarks',
508 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
509 PermanentItem('synced_bookmarks', name='Synced Bookmarks',
510 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK,
511 create_by_default=False),
512 PermanentItem('google_chrome_autofill', name='Autofill',
513 parent_tag=ROOT_ID, sync_type=AUTOFILL),
514 PermanentItem('google_chrome_autofill_profiles', name='Autofill Profiles',
515 parent_tag=ROOT_ID, sync_type=AUTOFILL_PROFILE),
516 PermanentItem('google_chrome_autofill_wallet_metadata',
517 name='Autofill Wallet Metadata', parent_tag=ROOT_ID,
518 sync_type=AUTOFILL_WALLET_METADATA),
519 PermanentItem('google_chrome_device_info', name='Device Info',
520 parent_tag=ROOT_ID, sync_type=DEVICE_INFO),
521 PermanentItem('google_chrome_experiments', name='Experiments',
522 parent_tag=ROOT_ID, sync_type=EXPERIMENTS),
523 PermanentItem('google_chrome_extension_settings',
524 name='Extension Settings',
525 parent_tag=ROOT_ID, sync_type=EXTENSION_SETTINGS),
526 PermanentItem('google_chrome_extensions', name='Extensions',
527 parent_tag=ROOT_ID, sync_type=EXTENSIONS),
528 PermanentItem('google_chrome_history_delete_directives',
529 name='History Delete Directives',
530 parent_tag=ROOT_ID,
531 sync_type=HISTORY_DELETE_DIRECTIVE),
532 PermanentItem('google_chrome_favicon_images',
533 name='Favicon Images',
534 parent_tag=ROOT_ID,
535 sync_type=FAVICON_IMAGES),
536 PermanentItem('google_chrome_favicon_tracking',
537 name='Favicon Tracking',
538 parent_tag=ROOT_ID,
539 sync_type=FAVICON_TRACKING),
540 PermanentItem('google_chrome_managed_user_settings',
541 name='Managed User Settings',
542 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SETTING),
543 PermanentItem('google_chrome_managed_users',
544 name='Managed Users',
545 parent_tag=ROOT_ID, sync_type=MANAGED_USER),
546 PermanentItem('google_chrome_managed_user_shared_settings',
547 name='Managed User Shared Settings',
548 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SHARED_SETTING),
549 PermanentItem('google_chrome_managed_user_whitelists',
550 name='Managed User Whitelists', parent_tag=ROOT_ID,
551 sync_type=MANAGED_USER_WHITELIST),
552 PermanentItem('google_chrome_nigori', name='Nigori',
553 parent_tag=ROOT_ID, sync_type=NIGORI),
554 PermanentItem('google_chrome_passwords', name='Passwords',
555 parent_tag=ROOT_ID, sync_type=PASSWORD),
556 PermanentItem('google_chrome_preferences', name='Preferences',
557 parent_tag=ROOT_ID, sync_type=PREFERENCE),
558 PermanentItem('google_chrome_priority_preferences',
559 name='Priority Preferences',
560 parent_tag=ROOT_ID, sync_type=PRIORITY_PREFERENCE),
561 PermanentItem('google_chrome_synced_notifications',
562 name='Synced Notifications',
563 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION),
564 PermanentItem('google_chrome_synced_notification_app_info',
565 name='Synced Notification App Info',
566 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION_APP_INFO),
567 PermanentItem('google_chrome_search_engines', name='Search Engines',
568 parent_tag=ROOT_ID, sync_type=SEARCH_ENGINE),
569 PermanentItem('google_chrome_sessions', name='Sessions',
570 parent_tag=ROOT_ID, sync_type=SESSION),
571 PermanentItem('google_chrome_themes', name='Themes',
572 parent_tag=ROOT_ID, sync_type=THEME),
573 PermanentItem('google_chrome_typed_urls', name='Typed URLs',
574 parent_tag=ROOT_ID, sync_type=TYPED_URL),
575 PermanentItem('google_chrome_wifi_credentials', name='WiFi Credentials',
576 parent_tag=ROOT_ID, sync_type=WIFI_CREDENTIAL),
577 PermanentItem('google_chrome_dictionary', name='Dictionary',
578 parent_tag=ROOT_ID, sync_type=DICTIONARY),
579 PermanentItem('google_chrome_articles', name='Articles',
580 parent_tag=ROOT_ID, sync_type=ARTICLE),
583 def __init__(self):
584 # Monotonically increasing version number. The next object change will
585 # take on this value + 1.
586 self._version = 0
588 # The definitive copy of this client's items: a map from ID string to a
589 # SyncEntity protocol buffer.
590 self._entries = {}
592 self.ResetStoreBirthday()
593 self.migration_history = MigrationHistory()
594 self.induced_error = sync_pb2.ClientToServerResponse.Error()
595 self.induced_error_frequency = 0
596 self.sync_count_before_errors = 0
597 self.acknowledge_managed_users = False
598 self._keys = [MakeNewKeystoreKey()]
600 def _SaveEntry(self, entry):
601 """Insert or update an entry in the change log, and give it a new version.
603 The ID fields of this entry are assumed to be valid server IDs. This
604 entry will be updated with a new version number and sync_timestamp.
606 Args:
607 entry: The entry to be added or updated.
609 self._version += 1
610 # Maintain a global (rather than per-item) sequence number and use it
611 # both as the per-entry version as well as the update-progress timestamp.
612 # This simulates the behavior of the original server implementation.
613 entry.version = self._version
614 entry.sync_timestamp = self._version
616 # Preserve the originator info, which the client is not required to send
617 # when updating.
618 base_entry = self._entries.get(entry.id_string)
619 if base_entry:
620 entry.originator_cache_guid = base_entry.originator_cache_guid
621 entry.originator_client_item_id = base_entry.originator_client_item_id
623 self._entries[entry.id_string] = copy.deepcopy(entry)
625 def _ServerTagToId(self, tag):
626 """Determine the server ID from a server-unique tag.
628 The resulting value is guaranteed not to collide with the other ID
629 generation methods.
631 Args:
632 tag: The unique, known-to-the-client tag of a server-generated item.
633 Returns:
634 The string value of the computed server ID.
636 if not tag or tag == ROOT_ID:
637 return tag
638 spec = [x for x in self._PERMANENT_ITEM_SPECS if x.tag == tag][0]
639 return self._MakeCurrentId(spec.sync_type, '<server tag>%s' % tag)
641 def _TypeToTypeRootId(self, model_type):
642 """Returns the server ID for the type root node of the given type."""
643 tag = [x.tag for x in self._PERMANENT_ITEM_SPECS
644 if x.sync_type == model_type][0]
645 return self._ServerTagToId(tag)
647 def _ClientTagToId(self, datatype, tag):
648 """Determine the server ID from a client-unique tag.
650 The resulting value is guaranteed not to collide with the other ID
651 generation methods.
653 Args:
654 datatype: The sync type (python enum) of the identified object.
655 tag: The unique, opaque-to-the-server tag of a client-tagged item.
656 Returns:
657 The string value of the computed server ID.
659 return self._MakeCurrentId(datatype, '<client tag>%s' % tag)
661 def _ClientIdToId(self, datatype, client_guid, client_item_id):
662 """Compute a unique server ID from a client-local ID tag.
664 The resulting value is guaranteed not to collide with the other ID
665 generation methods.
667 Args:
668 datatype: The sync type (python enum) of the identified object.
669 client_guid: A globally unique ID that identifies the client which
670 created this item.
671 client_item_id: An ID that uniquely identifies this item on the client
672 which created it.
673 Returns:
674 The string value of the computed server ID.
676 # Using the client ID info is not required here (we could instead generate
677 # a random ID), but it's useful for debugging.
678 return self._MakeCurrentId(datatype,
679 '<server ID originally>%s/%s' % (client_guid, client_item_id))
681 def _MakeCurrentId(self, datatype, inner_id):
682 return '%d^%d^%s' % (datatype,
683 self.migration_history.GetLatestVersion(datatype),
684 inner_id)
686 def _ExtractIdInfo(self, id_string):
687 if not id_string or id_string == ROOT_ID:
688 return None
689 datatype_string, separator, remainder = id_string.partition('^')
690 migration_version_string, separator, inner_id = remainder.partition('^')
691 return (int(datatype_string), int(migration_version_string), inner_id)
693 def _WritePosition(self, entry, parent_id):
694 """Ensure the entry has an absolute, numeric position and parent_id.
696 Historically, clients would specify positions using the predecessor-based
697 references in the insert_after_item_id field; starting July 2011, this
698 was changed and Chrome now sends up the absolute position. The server
699 must store a position_in_parent value and must not maintain
700 insert_after_item_id.
701 Starting in Jan 2013, the client will also send up a unique_position field
702 which should be saved and returned on subsequent GetUpdates.
704 Args:
705 entry: The entry for which to write a position. Its ID field are
706 assumed to be server IDs. This entry will have its parent_id_string,
707 position_in_parent and unique_position fields updated; its
708 insert_after_item_id field will be cleared.
709 parent_id: The ID of the entry intended as the new parent.
712 entry.parent_id_string = parent_id
713 if not entry.HasField('position_in_parent'):
714 entry.position_in_parent = 1337 # A debuggable, distinctive default.
715 entry.ClearField('insert_after_item_id')
717 def _ItemExists(self, id_string):
718 """Determine whether an item exists in the changelog."""
719 return id_string in self._entries
721 def _CreatePermanentItem(self, spec):
722 """Create one permanent item from its spec, if it doesn't exist.
724 The resulting item is added to the changelog.
726 Args:
727 spec: A PermanentItem object holding the properties of the item to create.
729 id_string = self._ServerTagToId(spec.tag)
730 if self._ItemExists(id_string):
731 return
732 print 'Creating permanent item: %s' % spec.name
733 entry = sync_pb2.SyncEntity()
734 entry.id_string = id_string
735 entry.non_unique_name = spec.name
736 entry.name = spec.name
737 entry.server_defined_unique_tag = spec.tag
738 entry.folder = True
739 entry.deleted = False
740 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
741 self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
742 self._SaveEntry(entry)
744 def _CreateDefaultPermanentItems(self, requested_types):
745 """Ensure creation of all default permanent items for a given set of types.
747 Args:
748 requested_types: A list of sync data types from ALL_TYPES.
749 All default permanent items of only these types will be created.
751 for spec in self._PERMANENT_ITEM_SPECS:
752 if spec.sync_type in requested_types and spec.create_by_default:
753 self._CreatePermanentItem(spec)
755 def ResetStoreBirthday(self):
756 """Resets the store birthday to a random value."""
757 # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
758 self.store_birthday = '%0.30f' % random.random()
760 def StoreBirthday(self):
761 """Gets the store birthday."""
762 return self.store_birthday
764 def GetChanges(self, sieve):
765 """Get entries which have changed, oldest first.
767 The returned entries are limited to being _BATCH_SIZE many. The entries
768 are returned in strict version order.
770 Args:
771 sieve: An update sieve to use to filter out updates the client
772 has already seen.
773 Returns:
774 A tuple of (version, entries, changes_remaining). Version is a new
775 timestamp value, which should be used as the starting point for the
776 next query. Entries is the batch of entries meeting the current
777 timestamp query. Changes_remaining indicates the number of changes
778 left on the server after this batch.
780 if not sieve.HasAnyTimestamp():
781 return (0, [], 0)
782 min_timestamp = sieve.GetMinTimestamp()
783 first_time_types = sieve.GetFirstTimeTypes()
784 self._CreateDefaultPermanentItems(first_time_types)
785 # Mobile bookmark folder is not created by default, create it only when
786 # client requested it.
787 if (sieve.GetCreateMobileBookmarks() and
788 first_time_types.count(BOOKMARK) > 0):
789 self.TriggerCreateSyncedBookmarks()
791 self.TriggerAcknowledgeManagedUsers()
793 change_log = sorted(self._entries.values(),
794 key=operator.attrgetter('version'))
795 new_changes = [x for x in change_log if x.version > min_timestamp]
796 # Pick batch_size new changes, and then filter them. This matches
797 # the RPC behavior of the production sync server.
798 batch = new_changes[:self._BATCH_SIZE]
799 if not batch:
800 # Client is up to date.
801 return (min_timestamp, [], 0)
803 # Restrict batch to requested types. Tombstones are untyped
804 # and will always get included.
805 filtered = [copy.deepcopy(item) for item in batch
806 if item.deleted or sieve.ClientWantsItem(item)]
808 # The new client timestamp is the timestamp of the last item in the
809 # batch, even if that item was filtered out.
810 return (batch[-1].version, filtered, len(new_changes) - len(batch))
812 def GetKeystoreKeys(self):
813 """Returns the encryption keys for this account."""
814 print "Returning encryption keys: %s" % self._keys
815 return self._keys
817 def _CopyOverImmutableFields(self, entry):
818 """Preserve immutable fields by copying pre-commit state.
820 Args:
821 entry: A sync entity from the client.
823 if entry.id_string in self._entries:
824 if self._entries[entry.id_string].HasField(
825 'server_defined_unique_tag'):
826 entry.server_defined_unique_tag = (
827 self._entries[entry.id_string].server_defined_unique_tag)
829 def _CheckVersionForCommit(self, entry):
830 """Perform an optimistic concurrency check on the version number.
832 Clients are only allowed to commit if they report having seen the most
833 recent version of an object.
835 Args:
836 entry: A sync entity from the client. It is assumed that ID fields
837 have been converted to server IDs.
838 Returns:
839 A boolean value indicating whether the client's version matches the
840 newest server version for the given entry.
842 if entry.id_string in self._entries:
843 # Allow edits/deletes if the version matches, and any undeletion.
844 return (self._entries[entry.id_string].version == entry.version or
845 self._entries[entry.id_string].deleted)
846 else:
847 # Allow unknown ID only if the client thinks it's new too.
848 return entry.version == 0
850 def _CheckParentIdForCommit(self, entry):
851 """Check that the parent ID referenced in a SyncEntity actually exists.
853 Args:
854 entry: A sync entity from the client. It is assumed that ID fields
855 have been converted to server IDs.
856 Returns:
857 A boolean value indicating whether the entity's parent ID is an object
858 that actually exists (and is not deleted) in the current account state.
860 if entry.parent_id_string == ROOT_ID:
861 # This is generally allowed.
862 return True
863 if (not entry.HasField('parent_id_string') and
864 entry.HasField('client_defined_unique_tag')):
865 return True # Unique client tag items do not need to specify a parent.
866 if entry.parent_id_string not in self._entries:
867 print 'Warning: Client sent unknown ID. Should never happen.'
868 return False
869 if entry.parent_id_string == entry.id_string:
870 print 'Warning: Client sent circular reference. Should never happen.'
871 return False
872 if self._entries[entry.parent_id_string].deleted:
873 # This can happen in a race condition between two clients.
874 return False
875 if not self._entries[entry.parent_id_string].folder:
876 print 'Warning: Client sent non-folder parent. Should never happen.'
877 return False
878 return True
880 def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session):
881 """Convert ID fields in a client sync entry to server IDs.
883 A commit batch sent by a client may contain new items for which the
884 server has not generated IDs yet. And within a commit batch, later
885 items are allowed to refer to earlier items. This method will
886 generate server IDs for new items, as well as rewrite references
887 to items whose server IDs were generated earlier in the batch.
889 Args:
890 entry: The client sync entry to modify.
891 cache_guid: The globally unique ID of the client that sent this
892 commit request.
893 commit_session: A dictionary mapping the original IDs to the new server
894 IDs, for any items committed earlier in the batch.
896 if entry.version == 0:
897 data_type = GetEntryType(entry)
898 if entry.HasField('client_defined_unique_tag'):
899 # When present, this should determine the item's ID.
900 new_id = self._ClientTagToId(data_type, entry.client_defined_unique_tag)
901 else:
902 new_id = self._ClientIdToId(data_type, cache_guid, entry.id_string)
903 entry.originator_cache_guid = cache_guid
904 entry.originator_client_item_id = entry.id_string
905 commit_session[entry.id_string] = new_id # Remember the remapping.
906 entry.id_string = new_id
907 if entry.parent_id_string in commit_session:
908 entry.parent_id_string = commit_session[entry.parent_id_string]
909 if entry.insert_after_item_id in commit_session:
910 entry.insert_after_item_id = commit_session[entry.insert_after_item_id]
912 def ValidateCommitEntries(self, entries):
913 """Raise an exception if a commit batch contains any global errors.
915 Arguments:
916 entries: an iterable containing commit-form SyncEntity protocol buffers.
918 Raises:
919 MigrationDoneError: if any of the entries reference a recently-migrated
920 datatype.
922 server_ids_in_commit = set()
923 local_ids_in_commit = set()
924 for entry in entries:
925 if entry.version:
926 server_ids_in_commit.add(entry.id_string)
927 else:
928 local_ids_in_commit.add(entry.id_string)
929 if entry.HasField('parent_id_string'):
930 if entry.parent_id_string not in local_ids_in_commit:
931 server_ids_in_commit.add(entry.parent_id_string)
933 versions_present = {}
934 for server_id in server_ids_in_commit:
935 parsed = self._ExtractIdInfo(server_id)
936 if parsed:
937 datatype, version, _ = parsed
938 versions_present.setdefault(datatype, []).append(version)
940 self.migration_history.CheckAllCurrent(
941 dict((k, min(v)) for k, v in versions_present.iteritems()))
943 def CommitEntry(self, entry, cache_guid, commit_session):
944 """Attempt to commit one entry to the user's account.
946 Args:
947 entry: A SyncEntity protobuf representing desired object changes.
948 cache_guid: A string value uniquely identifying the client; this
949 is used for ID generation and will determine the originator_cache_guid
950 if the entry is new.
951 commit_session: A dictionary mapping client IDs to server IDs for any
952 objects committed earlier this session. If the entry gets a new ID
953 during commit, the change will be recorded here.
954 Returns:
955 A SyncEntity reflecting the post-commit value of the entry, or None
956 if the entry was not committed due to an error.
958 entry = copy.deepcopy(entry)
960 # Generate server IDs for this entry, and write generated server IDs
961 # from earlier entries into the message's fields, as appropriate. The
962 # ID generation state is stored in 'commit_session'.
963 self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)
965 # Sets the parent ID field for a client-tagged item. The client is allowed
966 # to not specify parents for these types of items. The server can figure
967 # out on its own what the parent ID for this entry should be.
968 self._RewriteParentIdForUniqueClientEntry(entry)
970 # Perform the optimistic concurrency check on the entry's version number.
971 # Clients are not allowed to commit unless they indicate that they've seen
972 # the most recent version of an object.
973 if not self._CheckVersionForCommit(entry):
974 return None
976 # Check the validity of the parent ID; it must exist at this point.
977 # TODO(nick): Implement cycle detection and resolution.
978 if not self._CheckParentIdForCommit(entry):
979 return None
981 self._CopyOverImmutableFields(entry);
983 # At this point, the commit is definitely going to happen.
985 # Deletion works by storing a limited record for an entry, called a
986 # tombstone. A sync server must track deleted IDs forever, since it does
987 # not keep track of client knowledge (there's no deletion ACK event).
988 if entry.deleted:
989 def MakeTombstone(id_string, datatype):
990 """Make a tombstone entry that will replace the entry being deleted.
992 Args:
993 id_string: Index of the SyncEntity to be deleted.
994 Returns:
995 A new SyncEntity reflecting the fact that the entry is deleted.
997 # Only the ID, version and deletion state are preserved on a tombstone.
998 tombstone = sync_pb2.SyncEntity()
999 tombstone.id_string = id_string
1000 tombstone.deleted = True
1001 tombstone.name = ''
1002 tombstone.specifics.CopyFrom(GetDefaultEntitySpecifics(datatype))
1003 return tombstone
1005 def IsChild(child_id):
1006 """Check if a SyncEntity is a child of entry, or any of its children.
1008 Args:
1009 child_id: Index of the SyncEntity that is a possible child of entry.
1010 Returns:
1011 True if it is a child; false otherwise.
1013 if child_id not in self._entries:
1014 return False
1015 if self._entries[child_id].parent_id_string == entry.id_string:
1016 return True
1017 return IsChild(self._entries[child_id].parent_id_string)
1019 # Identify any children entry might have.
1020 child_ids = [child.id_string for child in self._entries.itervalues()
1021 if IsChild(child.id_string)]
1023 # Mark all children that were identified as deleted.
1024 for child_id in child_ids:
1025 datatype = GetEntryType(self._entries[child_id])
1026 self._SaveEntry(MakeTombstone(child_id, datatype))
1028 # Delete entry itself.
1029 datatype = GetEntryType(self._entries[entry.id_string])
1030 entry = MakeTombstone(entry.id_string, datatype)
1031 else:
1032 # Comments in sync.proto detail how the representation of positional
1033 # ordering works.
1035 # We've almost fully deprecated the 'insert_after_item_id' field.
1036 # The 'position_in_parent' field is also deprecated, but as of Jan 2013
1037 # is still in common use. The 'unique_position' field is the latest
1038 # and greatest in positioning technology.
1040 # This server supports 'position_in_parent' and 'unique_position'.
1041 self._WritePosition(entry, entry.parent_id_string)
1043 # Preserve the originator info, which the client is not required to send
1044 # when updating.
1045 base_entry = self._entries.get(entry.id_string)
1046 if base_entry and not entry.HasField('originator_cache_guid'):
1047 entry.originator_cache_guid = base_entry.originator_cache_guid
1048 entry.originator_client_item_id = base_entry.originator_client_item_id
1050 # Store the current time since the Unix epoch in milliseconds.
1051 entry.mtime = (int((time.mktime(time.gmtime()) -
1052 (time.mktime(FIRST_DAY_UNIX_TIME_EPOCH) - ONE_DAY_SECONDS))*1000))
1054 # Commit the change. This also updates the version number.
1055 self._SaveEntry(entry)
1056 return entry
1058 def _RewriteVersionInId(self, id_string):
1059 """Rewrites an ID so that its migration version becomes current."""
1060 parsed_id = self._ExtractIdInfo(id_string)
1061 if not parsed_id:
1062 return id_string
1063 datatype, old_migration_version, inner_id = parsed_id
1064 return self._MakeCurrentId(datatype, inner_id)
1066 def _RewriteParentIdForUniqueClientEntry(self, entry):
1067 """Sets the entry's parent ID field to the appropriate value.
1069 The client must always set enough of the specifics of the entries it sends
1070 up such that the server can identify its type. (See crbug.com/373859)
1072 The client is under no obligation to set the parent ID field. The server
1073 can always infer what the appropriate parent for this model type should be.
1074 Having the client not send the parent ID is a step towards the removal of
1075 type root nodes. (See crbug.com/373869)
1077 This server implements these features by "faking" the existing of a parent
1078 ID early on in the commit processing.
1080 This function has no effect on non-client-tagged items.
1082 if not entry.HasField('client_defined_unique_tag'):
1083 return # Skip this processing for non-client-tagged types.
1084 data_type = GetEntryType(entry)
1085 entry.parent_id_string = self._TypeToTypeRootId(data_type)
1087 def TriggerMigration(self, datatypes):
1088 """Cause a migration to occur for a set of datatypes on this account.
1090 Clients will see the MIGRATION_DONE error for these datatypes until they
1091 resync them.
1093 versions_to_remap = self.migration_history.Bump(datatypes)
1094 all_entries = self._entries.values()
1095 self._entries.clear()
1096 for entry in all_entries:
1097 new_id = self._RewriteVersionInId(entry.id_string)
1098 entry.id_string = new_id
1099 if entry.HasField('parent_id_string'):
1100 entry.parent_id_string = self._RewriteVersionInId(
1101 entry.parent_id_string)
1102 self._entries[entry.id_string] = entry
1104 def TriggerSyncTabFavicons(self):
1105 """Set the 'sync_tab_favicons' field to this account's nigori node.
1107 If the field is not currently set, will write a new nigori node entry
1108 with the field set. Else does nothing.
1111 nigori_tag = "google_chrome_nigori"
1112 nigori_original = self._entries.get(self._ServerTagToId(nigori_tag))
1113 if (nigori_original.specifics.nigori.sync_tab_favicons):
1114 return
1115 nigori_new = copy.deepcopy(nigori_original)
1116 nigori_new.specifics.nigori.sync_tabs = True
1117 self._SaveEntry(nigori_new)
1119 def TriggerCreateSyncedBookmarks(self):
1120 """Create the Synced Bookmarks folder under the Bookmarks permanent item.
1122 Clients will then receive the Synced Bookmarks folder on future
1123 GetUpdates, and new bookmarks can be added within the Synced Bookmarks
1124 folder.
1127 synced_bookmarks_spec, = [spec for spec in self._PERMANENT_ITEM_SPECS
1128 if spec.name == "Synced Bookmarks"]
1129 self._CreatePermanentItem(synced_bookmarks_spec)
1131 def TriggerEnableKeystoreEncryption(self):
1132 """Create the keystore_encryption experiment entity and enable it.
1134 A new entity within the EXPERIMENTS datatype is created with the unique
1135 client tag "keystore_encryption" if it doesn't already exist. The
1136 keystore_encryption message is then filled with |enabled| set to true.
1139 experiment_id = self._ServerTagToId("google_chrome_experiments")
1140 keystore_encryption_id = self._ClientTagToId(
1141 EXPERIMENTS,
1142 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1143 keystore_entry = self._entries.get(keystore_encryption_id)
1144 if keystore_entry is None:
1145 keystore_entry = sync_pb2.SyncEntity()
1146 keystore_entry.id_string = keystore_encryption_id
1147 keystore_entry.name = "Keystore Encryption"
1148 keystore_entry.client_defined_unique_tag = (
1149 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
1150 keystore_entry.folder = False
1151 keystore_entry.deleted = False
1152 keystore_entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1153 self._WritePosition(keystore_entry, experiment_id)
1155 keystore_entry.specifics.experiments.keystore_encryption.enabled = True
1157 self._SaveEntry(keystore_entry)
1159 def TriggerRotateKeystoreKeys(self):
1160 """Rotate the current set of keystore encryption keys.
1162 |self._keys| will have a new random encryption key appended to it. We touch
1163 the nigori node so that each client will receive the new encryption keys
1164 only once.
1167 # Add a new encryption key.
1168 self._keys += [MakeNewKeystoreKey(), ]
1170 # Increment the nigori node's timestamp, so clients will get the new keys
1171 # on their next GetUpdates (any time the nigori node is sent back, we also
1172 # send back the keystore keys).
1173 nigori_tag = "google_chrome_nigori"
1174 self._SaveEntry(self._entries.get(self._ServerTagToId(nigori_tag)))
1176 def TriggerAcknowledgeManagedUsers(self):
1177 """Set the "acknowledged" flag for any managed user entities that don't have
1178 it set already.
1181 if not self.acknowledge_managed_users:
1182 return
1184 managed_users = [copy.deepcopy(entry) for entry in self._entries.values()
1185 if entry.specifics.HasField('managed_user')
1186 and not entry.specifics.managed_user.acknowledged]
1187 for user in managed_users:
1188 user.specifics.managed_user.acknowledged = True
1189 self._SaveEntry(user)
1191 def TriggerEnablePreCommitGetUpdateAvoidance(self):
1192 """Sets the experiment to enable pre-commit GetUpdate avoidance."""
1193 experiment_id = self._ServerTagToId("google_chrome_experiments")
1194 pre_commit_gu_avoidance_id = self._ClientTagToId(
1195 EXPERIMENTS,
1196 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG)
1197 entry = self._entries.get(pre_commit_gu_avoidance_id)
1198 if entry is None:
1199 entry = sync_pb2.SyncEntity()
1200 entry.id_string = pre_commit_gu_avoidance_id
1201 entry.name = "Pre-commit GU avoidance"
1202 entry.client_defined_unique_tag = PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
1203 entry.folder = False
1204 entry.deleted = False
1205 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
1206 self._WritePosition(entry, experiment_id)
1207 entry.specifics.experiments.pre_commit_update_avoidance.enabled = True
1208 self._SaveEntry(entry)
1210 def SetInducedError(self, error, error_frequency,
1211 sync_count_before_errors):
1212 self.induced_error = error
1213 self.induced_error_frequency = error_frequency
1214 self.sync_count_before_errors = sync_count_before_errors
1216 def GetInducedError(self):
1217 return self.induced_error
1219 def _GetNextVersionNumber(self):
1220 """Set the version to one more than the greatest version number seen."""
1221 entries = sorted(self._entries.values(), key=operator.attrgetter('version'))
1222 if len(entries) < 1:
1223 raise ClientNotConnectedError
1224 return entries[-1].version + 1
1227 class TestServer(object):
1228 """An object to handle requests for one (and only one) Chrome Sync account.
1230 TestServer consumes the sync command messages that are the outermost
1231 layers of the protocol, performs the corresponding actions on its
1232 SyncDataModel, and constructs an appropriate response message.
1235 def __init__(self):
1236 # The implementation supports exactly one account; its state is here.
1237 self.account = SyncDataModel()
1238 self.account_lock = threading.Lock()
1239 # Clients that have talked to us: a map from the full client ID
1240 # to its nickname.
1241 self.clients = {}
1242 self.client_name_generator = ('+' * times + chr(c)
1243 for times in xrange(0, sys.maxint) for c in xrange(ord('A'), ord('Z')))
1244 self.transient_error = False
1245 self.sync_count = 0
1246 # Gaia OAuth2 Token fields and their default values.
1247 self.response_code = 200
1248 self.request_token = 'rt1'
1249 self.access_token = 'at1'
1250 self.expires_in = 3600
1251 self.token_type = 'Bearer'
1252 # The ClientCommand to send back on each ServerToClientResponse. If set to
1253 # None, no ClientCommand should be sent.
1254 self._client_command = None
1257 def GetShortClientName(self, query):
1258 parsed = cgi.parse_qs(query[query.find('?')+1:])
1259 client_id = parsed.get('client_id')
1260 if not client_id:
1261 return '?'
1262 client_id = client_id[0]
1263 if client_id not in self.clients:
1264 self.clients[client_id] = self.client_name_generator.next()
1265 return self.clients[client_id]
1267 def CheckStoreBirthday(self, request):
1268 """Raises StoreBirthdayError if the request's birthday is a mismatch."""
1269 if not request.HasField('store_birthday'):
1270 return
1271 if self.account.StoreBirthday() != request.store_birthday:
1272 raise StoreBirthdayError
1274 def CheckTransientError(self):
1275 """Raises TransientError if transient_error variable is set."""
1276 if self.transient_error:
1277 raise TransientError
1279 def CheckSendError(self):
1280 """Raises SyncInducedError if needed."""
1281 if (self.account.induced_error.error_type !=
1282 sync_enums_pb2.SyncEnums.UNKNOWN):
1283 # Always means return the given error for all requests.
1284 if self.account.induced_error_frequency == ERROR_FREQUENCY_ALWAYS:
1285 raise SyncInducedError
1286 # This means the FIRST 2 requests of every 3 requests
1287 # return an error. Don't switch the order of failures. There are
1288 # test cases that rely on the first 2 being the failure rather than
1289 # the last 2.
1290 elif (self.account.induced_error_frequency ==
1291 ERROR_FREQUENCY_TWO_THIRDS):
1292 if (((self.sync_count -
1293 self.account.sync_count_before_errors) % 3) != 0):
1294 raise SyncInducedError
1295 else:
1296 raise InducedErrorFrequencyNotDefined
1298 def HandleMigrate(self, path):
1299 query = urlparse.urlparse(path)[4]
1300 code = 200
1301 self.account_lock.acquire()
1302 try:
1303 datatypes = [DataTypeStringToSyncTypeLoose(x)
1304 for x in urlparse.parse_qs(query).get('type',[])]
1305 if datatypes:
1306 self.account.TriggerMigration(datatypes)
1307 response = 'Migrated datatypes %s' % (
1308 ' and '.join(SyncTypeToString(x).upper() for x in datatypes))
1309 else:
1310 response = 'Please specify one or more <i>type=name</i> parameters'
1311 code = 400
1312 except DataTypeIdNotRecognized, error:
1313 response = 'Could not interpret datatype name'
1314 code = 400
1315 finally:
1316 self.account_lock.release()
1317 return (code, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
1318 (code, code, response))
1320 def HandleSetInducedError(self, path):
1321 query = urlparse.urlparse(path)[4]
1322 self.account_lock.acquire()
1323 code = 200
1324 response = 'Success'
1325 error = sync_pb2.ClientToServerResponse.Error()
1326 try:
1327 error_type = urlparse.parse_qs(query)['error']
1328 action = urlparse.parse_qs(query)['action']
1329 error.error_type = int(error_type[0])
1330 error.action = int(action[0])
1331 try:
1332 error.url = (urlparse.parse_qs(query)['url'])[0]
1333 except KeyError:
1334 error.url = ''
1335 try:
1336 error.error_description =(
1337 (urlparse.parse_qs(query)['error_description'])[0])
1338 except KeyError:
1339 error.error_description = ''
1340 try:
1341 error_frequency = int((urlparse.parse_qs(query)['frequency'])[0])
1342 except KeyError:
1343 error_frequency = ERROR_FREQUENCY_ALWAYS
1344 self.account.SetInducedError(error, error_frequency, self.sync_count)
1345 response = ('Error = %d, action = %d, url = %s, description = %s' %
1346 (error.error_type, error.action,
1347 error.url,
1348 error.error_description))
1349 except error:
1350 response = 'Could not parse url'
1351 code = 400
1352 finally:
1353 self.account_lock.release()
1354 return (code, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
1355 (code, code, response))
1357 def HandleCreateBirthdayError(self):
1358 self.account.ResetStoreBirthday()
1359 return (
1360 200,
1361 '<html><title>Birthday error</title><H1>Birthday error</H1></html>')
1363 def HandleSetTransientError(self):
1364 self.transient_error = True
1365 return (
1366 200,
1367 '<html><title>Transient error</title><H1>Transient error</H1></html>')
1369 def HandleSetSyncTabFavicons(self):
1370 """Set 'sync_tab_favicons' field of the nigori node for this account."""
1371 self.account.TriggerSyncTabFavicons()
1372 return (
1373 200,
1374 '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
1376 def HandleCreateSyncedBookmarks(self):
1377 """Create the Synced Bookmarks folder under Bookmarks."""
1378 self.account.TriggerCreateSyncedBookmarks()
1379 return (
1380 200,
1381 '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
1383 def HandleEnableKeystoreEncryption(self):
1384 """Enables the keystore encryption experiment."""
1385 self.account.TriggerEnableKeystoreEncryption()
1386 return (
1387 200,
1388 '<html><title>Enable Keystore Encryption</title>'
1389 '<H1>Enable Keystore Encryption</H1></html>')
1391 def HandleRotateKeystoreKeys(self):
1392 """Rotate the keystore encryption keys."""
1393 self.account.TriggerRotateKeystoreKeys()
1394 return (
1395 200,
1396 '<html><title>Rotate Keystore Keys</title>'
1397 '<H1>Rotate Keystore Keys</H1></html>')
1399 def HandleEnableManagedUserAcknowledgement(self):
1400 """Enable acknowledging newly created managed users."""
1401 self.account.acknowledge_managed_users = True
1402 return (
1403 200,
1404 '<html><title>Enable Managed User Acknowledgement</title>'
1405 '<h1>Enable Managed User Acknowledgement</h1></html>')
1407 def HandleEnablePreCommitGetUpdateAvoidance(self):
1408 """Enables the pre-commit GU avoidance experiment."""
1409 self.account.TriggerEnablePreCommitGetUpdateAvoidance()
1410 return (
1411 200,
1412 '<html><title>Enable pre-commit GU avoidance</title>'
1413 '<H1>Enable pre-commit GU avoidance</H1></html>')
1415 def HandleCommand(self, query, raw_request):
1416 """Decode and handle a sync command from a raw input of bytes.
1418 This is the main entry point for this class. It is safe to call this
1419 method from multiple threads.
1421 Args:
1422 raw_request: An iterable byte sequence to be interpreted as a sync
1423 protocol command.
1424 Returns:
1425 A tuple (response_code, raw_response); the first value is an HTTP
1426 result code, while the second value is a string of bytes which is the
1427 serialized reply to the command.
1429 self.account_lock.acquire()
1430 self.sync_count += 1
1431 def print_context(direction):
1432 print '[Client %s %s %s.py]' % (self.GetShortClientName(query), direction,
1433 __name__),
1435 try:
1436 request = sync_pb2.ClientToServerMessage()
1437 request.MergeFromString(raw_request)
1438 contents = request.message_contents
1440 response = sync_pb2.ClientToServerResponse()
1441 response.error_code = sync_enums_pb2.SyncEnums.SUCCESS
1443 if self._client_command:
1444 response.client_command.CopyFrom(self._client_command)
1446 self.CheckStoreBirthday(request)
1447 response.store_birthday = self.account.store_birthday
1448 self.CheckTransientError()
1449 self.CheckSendError()
1451 print_context('->')
1453 if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE:
1454 print 'Authenticate'
1455 # We accept any authentication token, and support only one account.
1456 # TODO(nick): Mock out the GAIA authentication as well; hook up here.
1457 response.authenticate.user.email = 'syncjuser@chromium'
1458 response.authenticate.user.display_name = 'Sync J User'
1459 elif contents == sync_pb2.ClientToServerMessage.COMMIT:
1460 print 'Commit %d item(s)' % len(request.commit.entries)
1461 self.HandleCommit(request.commit, response.commit)
1462 elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES:
1463 print 'GetUpdates',
1464 self.HandleGetUpdates(request.get_updates, response.get_updates)
1465 print_context('<-')
1466 print '%d update(s)' % len(response.get_updates.entries)
1467 else:
1468 print 'Unrecognizable sync request!'
1469 return (400, None) # Bad request.
1470 return (200, response.SerializeToString())
1471 except MigrationDoneError, error:
1472 print_context('<-')
1473 print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error.datatypes))
1474 response = sync_pb2.ClientToServerResponse()
1475 response.store_birthday = self.account.store_birthday
1476 response.error_code = sync_enums_pb2.SyncEnums.MIGRATION_DONE
1477 response.migrated_data_type_id[:] = [
1478 SyncTypeToProtocolDataTypeId(x) for x in error.datatypes]
1479 return (200, response.SerializeToString())
1480 except StoreBirthdayError, error:
1481 print_context('<-')
1482 print 'NOT_MY_BIRTHDAY'
1483 response = sync_pb2.ClientToServerResponse()
1484 response.store_birthday = self.account.store_birthday
1485 response.error_code = sync_enums_pb2.SyncEnums.NOT_MY_BIRTHDAY
1486 return (200, response.SerializeToString())
1487 except TransientError, error:
1488 ### This is deprecated now. Would be removed once test cases are removed.
1489 print_context('<-')
1490 print 'TRANSIENT_ERROR'
1491 response.store_birthday = self.account.store_birthday
1492 response.error_code = sync_enums_pb2.SyncEnums.TRANSIENT_ERROR
1493 return (200, response.SerializeToString())
1494 except SyncInducedError, error:
1495 print_context('<-')
1496 print 'INDUCED_ERROR'
1497 response.store_birthday = self.account.store_birthday
1498 error = self.account.GetInducedError()
1499 response.error.error_type = error.error_type
1500 response.error.url = error.url
1501 response.error.error_description = error.error_description
1502 response.error.action = error.action
1503 return (200, response.SerializeToString())
1504 finally:
1505 self.account_lock.release()
1507 def HandleCommit(self, commit_message, commit_response):
1508 """Respond to a Commit request by updating the user's account state.
1510 Commit attempts stop after the first error, returning a CONFLICT result
1511 for any unattempted entries.
1513 Args:
1514 commit_message: A sync_pb.CommitMessage protobuf holding the content
1515 of the client's request.
1516 commit_response: A sync_pb.CommitResponse protobuf into which a reply
1517 to the client request will be written.
1519 commit_response.SetInParent()
1520 batch_failure = False
1521 session = {} # Tracks ID renaming during the commit operation.
1522 guid = commit_message.cache_guid
1524 self.account.ValidateCommitEntries(commit_message.entries)
1526 for entry in commit_message.entries:
1527 server_entry = None
1528 if not batch_failure:
1529 # Try to commit the change to the account.
1530 server_entry = self.account.CommitEntry(entry, guid, session)
1532 # An entryresponse is returned in both success and failure cases.
1533 reply = commit_response.entryresponse.add()
1534 if not server_entry:
1535 reply.response_type = sync_pb2.CommitResponse.CONFLICT
1536 reply.error_message = 'Conflict.'
1537 batch_failure = True # One failure halts the batch.
1538 else:
1539 reply.response_type = sync_pb2.CommitResponse.SUCCESS
1540 # These are the properties that the server is allowed to override
1541 # during commit; the client wants to know their values at the end
1542 # of the operation.
1543 reply.id_string = server_entry.id_string
1544 if not server_entry.deleted:
1545 # Note: the production server doesn't actually send the
1546 # parent_id_string on commit responses, so we don't either.
1547 reply.position_in_parent = server_entry.position_in_parent
1548 reply.version = server_entry.version
1549 reply.name = server_entry.name
1550 reply.non_unique_name = server_entry.non_unique_name
1551 else:
1552 reply.version = entry.version + 1
1554 def HandleGetUpdates(self, update_request, update_response):
1555 """Respond to a GetUpdates request by querying the user's account.
1557 Args:
1558 update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
1559 of the client's request.
1560 update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
1561 to the client request will be written.
1563 update_response.SetInParent()
1564 update_sieve = UpdateSieve(update_request, self.account.migration_history)
1566 print CallerInfoToString(update_request.caller_info.source),
1567 print update_sieve.SummarizeRequest()
1569 update_sieve.CheckMigrationState()
1571 new_timestamp, entries, remaining = self.account.GetChanges(update_sieve)
1573 update_response.changes_remaining = remaining
1574 sending_nigori_node = False
1575 for entry in entries:
1576 if entry.name == 'Nigori':
1577 sending_nigori_node = True
1578 reply = update_response.entries.add()
1579 reply.CopyFrom(entry)
1580 update_sieve.SaveProgress(new_timestamp, update_response)
1582 if update_request.need_encryption_key or sending_nigori_node:
1583 update_response.encryption_keys.extend(self.account.GetKeystoreKeys())
1585 def HandleGetOauth2Token(self):
1586 return (int(self.response_code),
1587 '{\n'
1588 ' \"refresh_token\": \"' + self.request_token + '\",\n'
1589 ' \"access_token\": \"' + self.access_token + '\",\n'
1590 ' \"expires_in\": ' + str(self.expires_in) + ',\n'
1591 ' \"token_type\": \"' + self.token_type +'\"\n'
1592 '}')
1594 def HandleSetOauth2Token(self, response_code, request_token, access_token,
1595 expires_in, token_type):
1596 if response_code != 0:
1597 self.response_code = response_code
1598 if request_token != '':
1599 self.request_token = request_token
1600 if access_token != '':
1601 self.access_token = access_token
1602 if expires_in != 0:
1603 self.expires_in = expires_in
1604 if token_type != '':
1605 self.token_type = token_type
1607 return (200,
1608 '<html><title>Set OAuth2 Token</title>'
1609 '<H1>This server will now return the OAuth2 Token:</H1>'
1610 '<p>response_code: ' + str(self.response_code) + '</p>'
1611 '<p>request_token: ' + self.request_token + '</p>'
1612 '<p>access_token: ' + self.access_token + '</p>'
1613 '<p>expires_in: ' + str(self.expires_in) + '</p>'
1614 '<p>token_type: ' + self.token_type + '</p>'
1615 '</html>')
1617 def CustomizeClientCommand(self, sessions_commit_delay_seconds):
1618 """Customizes the value of the ClientCommand of ServerToClientResponse.
1620 Currently, this only allows for changing the sessions_commit_delay_seconds
1621 field.
1623 Args:
1624 sessions_commit_delay_seconds: The desired sync delay time for sessions.
1626 if not self._client_command:
1627 self._client_command = client_commands_pb2.ClientCommand()
1629 self._client_command.sessions_commit_delay_seconds = \
1630 sessions_commit_delay_seconds
1631 return self._client_command