Remove <br/> from safe string indicating that CSRF cooking is missing.
[larjonas-mediagoblin.git] / mediagoblin / storage / cloudfiles.py
blob61665ea03cd6c7b85d951ceec1fd901ce0616dd2
1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 '''
18 Make it so that ``import cloudfiles`` does not pick THIS file, but the
19 python-cloudfiles one.
21 http://docs.python.org/whatsnew/2.5.html#pep-328-absolute-and-relative-imports
22 '''
23 from __future__ import absolute_import
25 from mediagoblin.storage import StorageInterface, clean_listy_filepath
27 import cloudfiles
28 import mimetypes
29 import logging
31 _log = logging.getLogger(__name__)
34 class CloudFilesStorage(StorageInterface):
35 '''
36 OpenStack/Rackspace Cloud's Swift/CloudFiles support
37 '''
39 local_storage = False
41 def __init__(self, **kwargs):
42 self.param_container = kwargs.get('cloudfiles_container')
43 self.param_user = kwargs.get('cloudfiles_user')
44 self.param_api_key = kwargs.get('cloudfiles_api_key')
45 self.param_host = kwargs.get('cloudfiles_host')
46 self.param_use_servicenet = kwargs.get('cloudfiles_use_servicenet')
48 # the Mime Type webm doesn't exists, let's add it
49 mimetypes.add_type("video/webm", "webm")
51 if not self.param_host:
52 _log.info('No CloudFiles host URL specified, '
53 'defaulting to Rackspace US')
55 self.connection = cloudfiles.get_connection(
56 username=self.param_user,
57 api_key=self.param_api_key,
58 servicenet=True if self.param_use_servicenet == 'true' or \
59 self.param_use_servicenet == True else False)
61 _log.debug('Connected to {0} (auth: {1})'.format(
62 self.connection.connection.host,
63 self.connection.auth.host))
65 if not self.param_container == \
66 self.connection.get_container(self.param_container):
67 self.container = self.connection.create_container(
68 self.param_container)
69 self.container.make_public(
70 ttl=60 * 60 * 2)
71 else:
72 self.container = self.connection.get_container(
73 self.param_container)
75 _log.debug('Container: {0}'.format(
76 self.container.name))
78 self.container_uri = self.container.public_ssl_uri()
80 def _resolve_filepath(self, filepath):
81 return '/'.join(
82 clean_listy_filepath(filepath))
84 def file_exists(self, filepath):
85 try:
86 self.container.get_object(self._resolve_filepath(filepath))
87 return True
88 except cloudfiles.errors.NoSuchObject:
89 return False
91 def get_file(self, filepath, *args, **kwargs):
92 """
93 - Doesn't care about the "mode" argument.
94 """
95 try:
96 obj = self.container.get_object(
97 self._resolve_filepath(filepath))
98 except cloudfiles.errors.NoSuchObject:
99 obj = self.container.create_object(
100 self._resolve_filepath(filepath))
102 # Detect the mimetype ourselves, since some extensions (webm)
103 # may not be universally accepted as video/webm
104 mimetype = mimetypes.guess_type(
105 filepath[-1])
107 if mimetype[0]:
108 # Set the mimetype on the CloudFiles object
109 obj.content_type = mimetype[0]
110 obj.metadata = {'mime-type': mimetype[0]}
111 else:
112 obj.content_type = 'application/octet-stream'
113 obj.metadata = {'mime-type': 'application/octet-stream'}
115 return CloudFilesStorageObjectWrapper(obj, *args, **kwargs)
117 def delete_file(self, filepath):
118 # TODO: Also delete unused directories if empty (safely, with
119 # checks to avoid race conditions).
120 try:
121 self.container.delete_object(
122 self._resolve_filepath(filepath))
123 except cloudfiles.container.ResponseError:
124 pass
125 finally:
126 pass
128 def file_url(self, filepath):
129 return '/'.join([
130 self.container_uri,
131 self._resolve_filepath(filepath)])
134 def copy_locally(self, filepath, dest_path):
136 Copy this file locally.
138 A basic working method for this is provided that should
139 function both for local_storage systems and remote storge
140 systems, but if more efficient systems for copying locally
141 apply to your system, override this method with something more
142 appropriate.
144 # Override this method, using the "stream" iterator for efficient streaming
145 with self.get_file(filepath, 'rb') as source_file:
146 with open(dest_path, 'wb') as dest_file:
147 for data in source_file:
148 dest_file.write(data)
150 def copy_local_to_storage(self, filename, filepath):
152 Copy this file from locally to the storage system.
154 This is kind of the opposite of copy_locally. It's likely you
155 could override this method with something more appropriate to
156 your storage system.
158 # It seems that (our implementation of) cloudfiles.write() takes
159 # all existing data and appends write(data) to it, sending the
160 # full monty over the wire everytime. This would of course
161 # absolutely kill chunked writes with some O(1^n) performance
162 # and bandwidth usage. So, override this method and use the
163 # Cloudfile's "send" interface instead.
164 # TODO: Fixing write() still seems worthwhile though.
165 _log.debug('Sending {0} to cloudfiles...'.format(filepath))
166 with self.get_file(filepath, 'wb') as dest_file:
167 with open(filename, 'rb') as source_file:
168 # Copy to storage system in 4096 byte chunks
169 dest_file.send(source_file)
171 def get_file_size(self, filepath):
172 """Returns the file size in bytes"""
173 obj = self.container.get_object(
174 self._resolve_filepath(filepath))
175 return obj.total_bytes
177 class CloudFilesStorageObjectWrapper():
179 Wrapper for python-cloudfiles's cloudfiles.storage_object.Object
180 used to circumvent the mystic `medium.jpg` corruption issue, where
181 we had both python-cloudfiles and PIL doing buffering on both
182 ends and causing breakage.
184 This wrapper currently meets mediagoblin's needs for a public_store
185 file-like object.
187 def __init__(self, storage_object, *args, **kwargs):
188 self.storage_object = storage_object
190 def read(self, *args, **kwargs):
191 _log.debug('Reading {0}'.format(
192 self.storage_object.name))
193 return self.storage_object.read(*args, **kwargs)
195 def write(self, data, *args, **kwargs):
196 self.storage_object.write(data, *args, **kwargs)
198 def send(self, *args, **kw):
199 self.storage_object.send(*args, **kw)
201 def close(self):
203 Not sure we need anything here.
205 pass
207 def __enter__(self):
209 Context Manager API implementation
210 http://docs.python.org/library/stdtypes.html#context-manager-types
212 return self
214 def __exit__(self, *exc_info):
216 Context Manger API implementation
217 see self.__enter__()
219 self.close()
222 def __iter__(self, **kwargs):
223 """Make CloudFile an iterator, yielding 8192 bytes by default
225 This returns a generator object that can be used to getting the
226 object's content in a memory efficient way.
228 Warning: The HTTP response is only complete after this generator
229 has raised a StopIteration. No other methods can be called until
230 this has occurred."""
231 return self.storage_object.stream(**kwargs)