1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 # Use an ordered dict if we can. If not, we'll just use a normal dict
20 from collections
import OrderedDict
29 from mediagoblin
import mg_globals
as mgg
30 from mediagoblin
.db
.util
import atomic_update
31 from mediagoblin
.db
.models
import MediaEntry
32 from mediagoblin
.tools
.pluginapi
import hook_handle
33 from mediagoblin
.tools
.translate
import lazy_pass_to_ugettext
as _
35 _log
= logging
.getLogger(__name__
)
38 class ProgressCallback(object):
39 def __init__(self
, entry
):
42 def __call__(self
, progress
):
44 self
.entry
.transcoding_progress
= progress
48 def create_pub_filepath(entry
, filename
):
49 return mgg
.public_store
.get_unique_filepath(
51 six
.text_type(entry
.id),
55 class FilenameBuilder(object):
56 """Easily slice and dice filenames.
58 Initialize this class with an original file path, then use the fill()
59 method to create new filenames based on the original.
62 MAX_FILENAME_LENGTH
= 255 # VFAT's maximum filename length
64 def __init__(self
, path
):
65 """Initialize a builder from an original file path."""
66 self
.dirpath
, self
.basename
= os
.path
.split(path
)
67 self
.basename
, self
.ext
= os
.path
.splitext(self
.basename
)
68 self
.ext
= self
.ext
.lower()
70 def fill(self
, fmtstr
):
71 """Build a new filename based on the original.
73 The fmtstr argument can include the following:
74 {basename} -- the original basename, with the extension removed
75 {ext} -- the original extension, always lowercase
77 If necessary, {basename} will be truncated so the filename does not
78 exceed this class' MAX_FILENAME_LENGTH in length.
81 basename_len
= (self
.MAX_FILENAME_LENGTH
-
82 len(fmtstr
.format(basename
='', ext
=self
.ext
)))
83 return fmtstr
.format(basename
=self
.basename
[:basename_len
],
88 class MediaProcessor(object):
89 """A particular processor for this media type.
91 While the ProcessingManager handles all types of MediaProcessing
92 possible for a particular media type, a MediaProcessor can be
93 thought of as a *particular* processing action for a media type.
94 For example, you may have separate MediaProcessors for:
96 - initial_processing: the intial processing of a media
97 - gen_thumb: generate a thumbnail
98 - resize: resize an image
99 - transcode: transcode a video
103 Some information on producing a new MediaProcessor for your media type:
105 - You *must* supply a name attribute. This must be a class level
106 attribute, and a string. This will be used to determine the
107 subcommand of your process
108 - It's recommended that you supply a class level description
110 - Supply a media_is_eligible classmethod. This will be used to
111 determine whether or not a media entry is eligible to use this
112 processor type. See the method documentation for details.
113 - To give "./bin/gmg reprocess run" abilities to this media type,
114 supply both gnerate_parser and parser_to_request classmethods.
115 - The process method will be what actually processes your media.
117 # You MUST override this in the child MediaProcessor!
120 # Optional, but will be used in various places to describe the
121 # action this MediaProcessor provides
124 def __init__(self
, manager
, entry
):
125 self
.manager
= manager
127 self
.entry_orig_state
= entry
.state
129 # Should be initialized at time of processing, at least
130 self
.workbench
= None
133 self
.workbench
= mgg
.workbench_manager
.create()
136 def __exit__(self
, *args
):
137 self
.workbench
.destroy()
138 self
.workbench
= None
141 def process(self
, **kwargs
):
143 Actually process this media entry.
145 raise NotImplementedError
148 def media_is_eligible(cls
, entry
=None, state
=None):
149 raise NotImplementedError
151 ###############################
152 # Command line interface things
153 ###############################
156 def generate_parser(cls
):
157 raise NotImplementedError
160 def args_to_request(cls
, args
):
161 raise NotImplementedError
163 ##########################################
164 # THE FUTURE: web interface things here :)
165 ##########################################
167 #####################
168 # Some common "steps"
169 #####################
171 def delete_queue_file(self
):
172 # Remove queued media file from storage and database.
173 # queued_filepath is in the task_id directory which should
174 # be removed too, but fail if the directory is not empty to be on
175 # the super-safe side.
176 queued_filepath
= self
.entry
.queued_media_file
178 mgg
.queue_store
.delete_file(queued_filepath
) # rm file
179 mgg
.queue_store
.delete_dir(queued_filepath
[:-1]) # rm dir
180 self
.entry
.queued_media_file
= []
183 class ProcessingKeyError(Exception): pass
184 class ProcessorDoesNotExist(ProcessingKeyError
): pass
185 class ProcessorNotEligible(ProcessingKeyError
): pass
186 class ProcessingManagerDoesNotExist(ProcessingKeyError
): pass
190 class ProcessingManager(object):
191 """Manages all the processing actions available for a media type
193 Specific processing actions, MediaProcessor subclasses, are added
194 to the ProcessingManager.
197 # Dict of all MediaProcessors of this media type
198 if OrderedDict
is not None:
199 self
.processors
= OrderedDict()
203 def add_processor(self
, processor
):
205 Add a processor class to this media type
207 name
= processor
.name
209 raise AttributeError("Processor class's .name attribute not set")
211 self
.processors
[name
] = processor
213 def list_eligible_processors(self
, entry
):
215 List all processors that this media entry is eligible to be processed
220 for processor
in self
.processors
.values()
221 if processor
.media_is_eligible(entry
=entry
)]
223 def list_all_processors_by_state(self
, state
):
225 List all processors that this media state is eligible to be processed
230 for processor
in self
.processors
.values()
231 if processor
.media_is_eligible(state
=state
)]
234 def list_all_processors(self
):
235 return self
.processors
.values()
237 def gen_process_request_via_cli(self
, subparser
):
238 # Got to figure out what actually goes here before I can write this properly
241 def get_processor(self
, key
, entry
=None):
243 Get the processor with this key.
245 If entry supplied, make sure this entry is actually compatible;
246 otherwise raise error.
249 processor
= self
.processors
[key
]
251 raise ProcessorDoesNotExist(
252 "'%s' processor does not exist for this media type" % key
)
254 if entry
and not processor
.media_is_eligible(entry
):
255 raise ProcessorNotEligible(
256 "This entry is not eligible for processor with name '%s'" % key
)
261 def request_from_args(args
, which_args
):
263 Generate a request from the values of some argparse parsed args
266 for arg
in which_args
:
267 request
[arg
] = getattr(args
, arg
)
272 class MediaEntryNotFound(Exception): pass
275 def get_processing_manager_for_type(media_type
):
277 Get the appropriate media manager for this type
279 manager_class
= hook_handle(('reprocess_manager', media_type
))
280 if not manager_class
:
281 raise ProcessingManagerDoesNotExist(
282 "A processing manager does not exist for {0}".format(media_type
))
283 manager
= manager_class()
288 def get_entry_and_processing_manager(media_id
):
290 Get a MediaEntry, its media type, and its manager all in one go.
292 Returns a tuple of: `(entry, media_type, media_manager)`
294 entry
= MediaEntry
.query
.filter_by(id=media_id
).first()
296 raise MediaEntryNotFound("Can't find media with id '%s'" % media_id
)
298 manager
= get_processing_manager_for_type(entry
.media_type
)
300 return entry
, manager
303 def mark_entry_failed(entry_id
, exc
):
305 Mark a media entry as having failed in its conversion.
307 Uses the exception that was raised to mark more information. If
308 the exception is a derivative of BaseProcessingFail then we can
309 store extra information that can be useful for users telling them
310 why their media failed to process.
312 :param entry_id: The id of the media entry
313 :param exc: An instance of BaseProcessingFail
316 # Was this a BaseProcessingFail? In other words, was this a
317 # type of error that we know how to handle?
318 if isinstance(exc
, BaseProcessingFail
):
319 # Looks like yes, so record information about that failure and any
320 # metadata the user might have supplied.
321 atomic_update(mgg
.database
.MediaEntry
,
323 {u
'state': u
'failed',
324 u
'fail_error': six
.text_type(exc
.exception_path
),
325 u
'fail_metadata': exc
.metadata
})
327 _log
.warn("No idea what happened here, but it failed: %r", exc
)
328 # Looks like no, so just mark it as failed and don't record a
329 # failure_error (we'll assume it wasn't handled) and don't record
330 # metadata (in fact overwrite it if somehow it had previous info
332 atomic_update(mgg
.database
.MediaEntry
,
334 {u
'state': u
'failed',
336 u
'fail_metadata': {}})
339 def get_process_filename(entry
, workbench
, acceptable_files
):
341 Try and get the queued file if available, otherwise return the first file
342 in the acceptable_files that we have.
344 If no acceptable_files, raise ProcessFileNotFound
346 if entry
.queued_media_file
:
347 filepath
= entry
.queued_media_file
348 storage
= mgg
.queue_store
350 for keyname
in acceptable_files
:
351 if entry
.media_files
.get(keyname
):
352 filepath
= entry
.media_files
[keyname
]
353 storage
= mgg
.public_store
357 raise ProcessFileNotFound()
359 filename
= workbench
.localized_file(
363 if not os
.path
.exists(filename
):
364 raise ProcessFileNotFound()
369 def store_public(entry
, keyname
, local_file
, target_name
=None,
370 delete_if_exists
=True):
371 if target_name
is None:
372 target_name
= os
.path
.basename(local_file
)
373 target_filepath
= create_pub_filepath(entry
, target_name
)
375 if keyname
in entry
.media_files
:
376 _log
.warn("store_public: keyname %r already used for file %r, "
377 "replacing with %r", keyname
,
378 entry
.media_files
[keyname
], target_filepath
)
380 mgg
.public_store
.delete_file(entry
.media_files
[keyname
])
382 mgg
.public_store
.copy_local_to_storage(local_file
, target_filepath
)
383 except Exception as e
:
384 _log
.error(u
'Exception happened: {0}'.format(e
))
385 raise PublicStoreFail(keyname
=keyname
)
386 # raise an error if the file failed to copy
387 if not mgg
.public_store
.file_exists(target_filepath
):
388 raise PublicStoreFail(keyname
=keyname
)
390 entry
.media_files
[keyname
] = target_filepath
393 def copy_original(entry
, orig_filename
, target_name
, keyname
=u
"original"):
394 store_public(entry
, keyname
, orig_filename
, target_name
)
397 class BaseProcessingFail(Exception):
399 Base exception that all other processing failure messages should
402 You shouldn't call this itself; instead you should subclass it
403 and provide the exception_path and general_message applicable to
406 general_message
= u
''
409 def exception_path(self
):
411 self
.__class
__.__module
__, self
.__class
__.__name
__)
413 def __init__(self
, **metadata
):
414 self
.metadata
= metadata
or {}
416 class BadMediaFail(BaseProcessingFail
):
418 Error that should be raised when an inappropriate file was given
419 for the media type specified.
421 general_message
= _(u
'Invalid file given for media type.')
424 class PublicStoreFail(BaseProcessingFail
):
426 Error that should be raised when copying to public store fails
428 general_message
= _('Copying to public storage failed.')
431 class ProcessFileNotFound(BaseProcessingFail
):
433 Error that should be raised when an acceptable file for processing
436 general_message
= _(u
'An acceptable processing file was not found')