Setting 0.8.2.dev in configure.ac as well
[larjonas-mediagoblin.git] / mediagoblin / media_types / video / migrations.py
blob2445cd4d8f7daaf5edfffd8f60191c7f7e0f93fa
1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 from mediagoblin.db.migration_tools import RegisterMigration, inspect_table
19 from sqlalchemy import MetaData, Column, Unicode
21 import json
23 MIGRATIONS = {}
26 @RegisterMigration(1, MIGRATIONS)
27 def add_orig_metadata_column(db_conn):
28 metadata = MetaData(bind=db_conn.bind)
30 vid_data = inspect_table(metadata, "video__mediadata")
32 col = Column('orig_metadata', Unicode,
33 default=None, nullable=True)
34 col.create(vid_data)
35 db_conn.commit()
38 @RegisterMigration(2, MIGRATIONS)
39 def webm_640_to_webm_video(db):
40 metadata = MetaData(bind=db.bind)
42 file_keynames = inspect_table(metadata, 'core__file_keynames')
44 for row in db.execute(file_keynames.select()):
45 if row.name == 'webm_640':
46 db.execute(
47 file_keynames.update(). \
48 where(file_keynames.c.id==row.id).\
49 values(name='webm_video'))
51 db.commit()
54 @RegisterMigration(3, MIGRATIONS)
55 def change_metadata_format(db):
56 """Change orig_metadata format for multi-stream a-v"""
57 db_metadata = MetaData(bind=db.bind)
59 vid_data = inspect_table(db_metadata, "video__mediadata")
61 for row in db.execute(vid_data.select()):
62 if not row.orig_metadata:
63 continue
65 metadata = json.loads(row.orig_metadata)
67 # before this migration there was info about only one video or audio
68 # stream. So, we store existing info as the first item in the list
69 new_metadata = {'audio': [], 'video': [], 'common': {}}
70 video_key_map = { # old: new
71 'videoheight': 'height',
72 'videowidth': 'width',
73 'videorate': 'rate',
75 audio_key_map = { # old: new
76 'audiochannels': 'channels',
78 common_key_map = {
79 'videolength': 'length',
82 new_metadata['video'] = [dict((v, metadata.get(k))
83 for k, v in video_key_map.items() if metadata.get(k))]
84 new_metadata['audio'] = [dict((v, metadata.get(k))
85 for k, v in audio_key_map.items() if metadata.get(k))]
86 new_metadata['common'] = dict((v, metadata.get(k))
87 for k, v in common_key_map.items() if metadata.get(k))
89 # 'mimetype' should be in tags
90 new_metadata['common']['tags'] = {'mimetype': metadata.get('mimetype')}
91 if 'tags' in metadata:
92 new_metadata['video'][0]['tags'] = {}
93 new_metadata['audio'][0]['tags'] = {}
95 tags = metadata['tags']
97 video_keys = ['encoder', 'encoder-version', 'video-codec']
98 audio_keys = ['audio-codec']
100 for t, v in tags.items():
101 if t in video_keys:
102 new_metadata['video'][0]['tags'][t] = tags[t]
103 elif t in audio_keys:
104 new_metadata['audio'][0]['tags'][t] = tags[t]
105 else:
106 new_metadata['common']['tags'][t] = tags[t]
107 db.execute(vid_data.update()
108 .where(vid_data.c.media_entry==row.media_entry)
109 .values(orig_metadata=json.dumps(new_metadata)))
110 db.commit()