Adding the orphaned options pages to the navigation
[chromium-blink-merge.git] / chrome / common / extensions / docs / server2 / features_bundle.py
blob1cceafda61c2ae111ec284b9c5c4ffafbd496a1d
1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 from copy import copy
7 from branch_utility import BranchUtility
8 from compiled_file_system import SingleFile, Unicode
9 from docs_server_utils import StringIdentity
10 from extensions_paths import API_PATHS, JSON_TEMPLATES
11 from file_system import FileNotFoundError
12 from future import All, Future
13 from path_util import Join
14 from platform_util import GetExtensionTypes, PlatformToExtensionType
15 from third_party.json_schema_compiler.json_parse import Parse
18 _API_FEATURES = '_api_features.json'
19 _MANIFEST_FEATURES = '_manifest_features.json'
20 _PERMISSION_FEATURES = '_permission_features.json'
23 def HasParent(feature_name, feature, all_feature_names):
24 # A feature has a parent if it has a . in its name, its parent exists,
25 # and it does not explicitly specify that it has no parent.
26 return ('.' in feature_name and
27 feature_name.rsplit('.', 1)[0] in all_feature_names and
28 not feature.get('noparent'))
31 def GetParentName(feature_name, feature, all_feature_names):
32 '''Returns the name of the parent feature, or None if it does not have a
33 parent.
34 '''
35 if not HasParent(feature_name, feature, all_feature_names):
36 return None
37 return feature_name.rsplit('.', 1)[0]
40 def _CreateFeaturesFromJSONFutures(json_futures):
41 '''Returns a dict of features. The value of each feature is a list with
42 all of its possible values.
43 '''
44 def ignore_feature(name, value):
45 '''Returns true if this feature should be ignored. Features are ignored if
46 they are only available to whitelisted apps or component extensions/apps, as
47 in these cases the APIs are not available to public developers.
49 Private APIs are also unavailable to public developers, but logic elsewhere
50 makes sure they are not listed. So they shouldn't be ignored via this
51 mechanism.
52 '''
53 if name.endswith('Private'):
54 return False
55 return value.get('location') == 'component' or 'whitelist' in value
57 features = {}
59 for json_future in json_futures:
60 try:
61 features_json = Parse(json_future.Get())
62 except FileNotFoundError:
63 # Not all file system configurations have the extra files.
64 continue
65 for name, rawvalue in features_json.iteritems():
66 if name not in features:
67 features[name] = []
68 for value in (rawvalue if isinstance(rawvalue, list) else (rawvalue,)):
69 if not ignore_feature(name, value):
70 features[name].append(value)
72 return features
75 def _CopyParentFeatureValues(child, parent):
76 '''Takes data from feature dict |parent| and copies/merges it
77 into feature dict |child|. Two passes are run over the features,
78 and on the first pass features are not resolved across caches,
79 so a None value for |parent| may be passed in.
80 '''
81 if parent is None:
82 return child
83 merged = copy(parent)
84 merged.pop('noparent', None)
85 merged.pop('name', None)
86 merged.update(child)
87 return merged
90 def _ResolveFeature(feature_name,
91 feature_values,
92 extra_feature_values,
93 platform,
94 features_type,
95 features_map):
96 '''Filters and combines the possible values for a feature into one dict.
98 It uses |features_map| to resolve dependencies for each value and inherit
99 unspecified platform and channel data. |feature_values| is then filtered
100 by platform and all values with the most stable platform are merged into one
101 dict. All values in |extra_feature_values| get merged into this dict.
103 Returns |resolve_successful| and |feature|. |resolve_successful| is False
104 if the feature's dependencies have not been merged yet themselves, meaning
105 that this feature can not be reliably resolved yet. |feature| is the
106 resulting feature dict, or None if the feature does not exist on the
107 platform specified.
109 feature = None
110 most_stable_channel = None
111 for value in feature_values:
112 # If 'extension_types' or 'channel' is unspecified, these values should
113 # be inherited from dependencies. If they are specified, these values
114 # should override anything specified by dependencies.
115 inherit_valid_platform = 'extension_types' not in value
116 if inherit_valid_platform:
117 valid_platform = None
118 else:
119 valid_platform = (value['extension_types'] == 'all' or
120 platform in value['extension_types'])
121 inherit_channel = 'channel' not in value
122 channel = value.get('channel')
124 dependencies = value.get('dependencies', [])
125 parent = GetParentName(
126 feature_name, value, features_map[features_type]['all_names'])
127 if parent is not None:
128 # The parent data needs to be resolved so the child can inherit it.
129 if parent in features_map[features_type].get('unresolved', ()):
130 return False, None
131 value = _CopyParentFeatureValues(
132 value, features_map[features_type]['resolved'].get(parent))
133 # Add the parent as a dependency to ensure proper platform filtering.
134 dependencies.append(features_type + ':' + parent)
136 for dependency in dependencies:
137 dep_type, dep_name = dependency.split(':')
138 if (dep_type not in features_map or
139 dep_name in features_map[dep_type].get('unresolved', ())):
140 # The dependency itself has not been merged yet or the features map
141 # does not have the needed data. Fail to resolve.
142 return False, None
144 dep = features_map[dep_type]['resolved'].get(dep_name)
145 if inherit_valid_platform and (valid_platform is None or valid_platform):
146 # If dep is None, the dependency does not exist because it has been
147 # filtered out by platform. This feature value does not explicitly
148 # specify platform data, so filter this feature value out.
149 # Only run this check if valid_platform is True or None so that it
150 # can't be reset once it is False.
151 valid_platform = dep is not None
152 if inherit_channel and dep and 'channel' in dep:
153 if channel is None or BranchUtility.NewestChannel(
154 (dep['channel'], channel)) != channel:
155 # Inherit the least stable channel from the dependencies.
156 channel = dep['channel']
158 # Default to stable on all platforms.
159 if valid_platform is None:
160 valid_platform = True
161 if valid_platform and channel is None:
162 channel = 'stable'
164 if valid_platform:
165 # The feature value is valid. Merge it into the feature dict.
166 if feature is None or BranchUtility.NewestChannel(
167 (most_stable_channel, channel)) != channel:
168 # If this is the first feature value to be merged, copy the dict.
169 # If this feature value has a more stable channel than the most stable
170 # channel so far, replace the old dict so that it only merges values
171 # from the most stable channel.
172 feature = copy(value)
173 most_stable_channel = channel
174 elif channel == most_stable_channel:
175 feature.update(value)
177 if feature is None:
178 # Nothing was left after filtering the values, but all dependency resolves
179 # were successful. This feature does not exist on |platform|.
180 return True, None
182 # Merge in any extra values.
183 for value in extra_feature_values:
184 feature.update(value)
186 # Cleanup, fill in missing fields.
187 if 'name' not in feature:
188 feature['name'] = feature_name
189 feature['channel'] = most_stable_channel
190 return True, feature
193 class _FeaturesCache(object):
194 def __init__(self,
195 file_system,
196 compiled_fs_factory,
197 json_paths,
198 extra_paths,
199 platform,
200 features_type):
201 self._cache = compiled_fs_factory.Create(
202 file_system, self._CreateCache, type(self), category=platform)
203 self._text_cache = compiled_fs_factory.ForUnicode(file_system)
204 self._json_paths = json_paths
205 self._extra_paths = extra_paths
206 self._platform = platform
207 self._features_type = features_type
209 @Unicode
210 def _CreateCache(self, _, features_json):
211 json_path_futures = [self._text_cache.GetFromFile(path)
212 for path in self._json_paths[1:]]
213 extra_path_futures = [self._text_cache.GetFromFile(path)
214 for path in self._extra_paths]
216 features_values = _CreateFeaturesFromJSONFutures(
217 [Future(value=features_json)] + json_path_futures)
219 extra_features_values = _CreateFeaturesFromJSONFutures(extra_path_futures)
221 features = {
222 'resolved': {},
223 'unresolved': copy(features_values),
224 'extra': extra_features_values,
225 'all_names': set(features_values.keys())
228 # Merges as many feature values as possible without resolving dependencies
229 # from other FeaturesCaches. Pass in a features_map with just this
230 # FeatureCache's features_type. Makes repeated passes until no new
231 # resolves are successful.
232 new_resolves = True
233 while new_resolves:
234 new_resolves = False
235 for feature_name, feature_values in features_values.iteritems():
236 if feature_name not in features['unresolved']:
237 continue
238 resolve_successful, feature = _ResolveFeature(
239 feature_name,
240 feature_values,
241 extra_features_values.get(feature_name, ()),
242 self._platform,
243 self._features_type,
244 {self._features_type: features})
245 if resolve_successful:
246 del features['unresolved'][feature_name]
247 new_resolves = True
248 if feature is not None:
249 features['resolved'][feature_name] = feature
251 return features
253 def GetFeatures(self):
254 if not self._json_paths:
255 return Future(value={})
256 return self._cache.GetFromFile(self._json_paths[0])
259 class FeaturesBundle(object):
260 '''Provides access to properties of API, Manifest, and Permission features.
262 def __init__(self,
263 file_system,
264 compiled_fs_factory,
265 object_store_creator,
266 platform):
267 def create_features_cache(features_type, feature_file, *extra_paths):
268 return _FeaturesCache(
269 file_system,
270 compiled_fs_factory,
271 [Join(path, feature_file) for path in API_PATHS],
272 extra_paths,
273 self._platform,
274 features_type)
276 if platform not in GetExtensionTypes():
277 self._platform = PlatformToExtensionType(platform)
278 else:
279 self._platform = platform
281 self._caches = {
282 'api': create_features_cache('api', _API_FEATURES),
283 'manifest': create_features_cache(
284 'manifest',
285 _MANIFEST_FEATURES,
286 Join(JSON_TEMPLATES, 'manifest.json')),
287 'permission': create_features_cache(
288 'permission',
289 _PERMISSION_FEATURES,
290 Join(JSON_TEMPLATES, 'permissions.json'))
292 # Namespace the object store by the file system ID because this class is
293 # used by the availability finder cross-channel.
294 self._object_store = object_store_creator.Create(
295 _FeaturesCache,
296 category=StringIdentity(file_system.GetIdentity(), self._platform))
298 def GetPermissionFeatures(self):
299 return self.GetFeatures('permission', ('permission',))
301 def GetManifestFeatures(self):
302 return self.GetFeatures('manifest', ('manifest',))
304 def GetAPIFeatures(self):
305 return self.GetFeatures('api', ('api', 'manifest', 'permission'))
307 def GetFeatures(self, features_type, dependencies):
308 '''Resolves all dependencies in the categories specified by |dependencies|.
309 Returns the features in the |features_type| category.
311 def next_(features):
312 if features is not None:
313 return Future(value=features)
315 dependency_futures = []
316 cache_types = []
317 for cache_type in dependencies:
318 cache_types.append(cache_type)
319 dependency_futures.append(self._object_store.Get(cache_type))
321 def load_features(dependency_features_list):
322 futures = []
323 for dependency_features, cache_type in zip(dependency_features_list,
324 cache_types):
325 if dependency_features is not None:
326 # Get cached dependencies if possible. If it has been cached, all
327 # of its features have been resolved, so the other fields are
328 # unnecessary.
329 futures.append(Future(value={'resolved': dependency_features}))
330 else:
331 futures.append(self._caches[cache_type].GetFeatures())
333 def resolve(features):
334 features_map = {}
335 for cache_type, feature in zip(cache_types, features):
336 # Copy down to features_map level because the 'resolved' and
337 # 'unresolved' dicts will be modified.
338 features_map[cache_type] = dict((c, copy(d))
339 for c, d in feature.iteritems())
341 def has_unresolved():
342 '''Determines if there are any unresolved features left over in any
343 of the categories in |dependencies|.
345 return any(cache.get('unresolved')
346 for cache in features_map.itervalues())
348 # Iterate until everything is resolved. If dependencies are multiple
349 # levels deep, it might take multiple passes to inherit data to the
350 # topmost feature.
351 while has_unresolved():
352 for cache_type, cache in features_map.iteritems():
353 if 'unresolved' not in cache:
354 continue
355 to_remove = []
356 for name, values in cache['unresolved'].iteritems():
357 resolve_successful, feature = _ResolveFeature(
358 name,
359 values,
360 cache['extra'].get(name, ()),
361 self._platform,
362 cache_type,
363 features_map)
364 if not resolve_successful:
365 continue # Try again on the next iteration of the while loop
367 # When successfully resolved, remove it from the unresolved
368 # dict. Add it to the resolved dict if it didn't get deleted.
369 to_remove.append(name)
370 if feature is not None:
371 cache['resolved'][name] = feature
373 for key in to_remove:
374 del cache['unresolved'][key]
376 for cache_type, cache in features_map.iteritems():
377 self._object_store.Set(cache_type, cache['resolved'])
378 return features_map[features_type]['resolved']
379 return All(futures).Then(resolve)
380 return All(dependency_futures).Then(load_features)
381 return self._object_store.Get(features_type).Then(next_)