2 # -*- coding: utf-8 -*-
3 # Author: Beining --<ACICFG>
4 # Purpose: Yet another danmaku and video file downloader of Bilibili.
7 # Biligrab is licensed under MIT license (https://github.com/cnbeining/Biligrab/blob/master/LICENSE)
9 # Copyright (c) 2013-2015
14 cnbeining[at]gmail.com
15 http://www.cnbeining.com
16 https://github.com/cnbeining/Biligrab
20 from ast
import literal_eval
23 from StringIO
import StringIO
39 from xml
.dom
.minidom
import parseString
42 from danmaku2ass2
import *
46 global vid
, cid
, partname
, title
, videourl
, part_now
, is_first_run
, APPKEY
, SECRETKEY
, LOG_LEVEL
, VER
, LOCATION_DIR
, VIDEO_FORMAT
, convert_ass
, is_export
, IS_SLIENT
, pages
, IS_M3U
, FFPROBE_USABLE
, QUALITY
, IS_FAKE_IP
, FAKE_IP
48 cookies
, VIDEO_FORMAT
= '', ''
49 LOG_LEVEL
, pages
, FFPROBE_USABLE
= 0, 0, 0
50 APPKEY
= '6f90a59ac58a4123'
51 SECRETKEY
= 'b78be1fef78c3e7fdc7633e5fd5eee90'
52 SECRETKEY_MINILOADER
= '1c15888dc316e05a15fdd0a02ed6584f'
54 FAKE_UA
= 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.52 Safari/537.36'
56 'User-Agent': FAKE_UA
,
57 'Cache-Control': 'no-cache',
59 'pianhao': '%7B%22qing%22%3A%22super%22%2C%22qtudou%22%3A%22real%22%2C%22qyouku%22%3A%22super%22%2C%22q56%22%3A%22super%22%2C%22qcntv%22%3A%22super%22%2C%22qletv%22%3A%22super2%22%2C%22qqiyi%22%3A%22real%22%2C%22qsohu%22%3A%22real%22%2C%22qqq%22%3A%22real%22%2C%22qhunantv%22%3A%22super%22%2C%22qku6%22%3A%22super%22%2C%22qyinyuetai%22%3A%22super%22%2C%22qtangdou%22%3A%22super%22%2C%22qxunlei%22%3A%22super%22%2C%22qsina%22%3A%22high%22%2C%22qpptv%22%3A%22super%22%2C%22qpps%22%3A%22high%22%2C%22qm1905%22%3A%22high%22%2C%22qbokecc%22%3A%22super%22%2C%22q17173%22%3A%22super%22%2C%22qcuctv%22%3A%22super%22%2C%22q163%22%3A%22super%22%2C%22q51cto%22%3A%22high%22%2C%22xia%22%3A%22auto%22%2C%22pop%22%3A%22no%22%2C%22open%22%3A%22no%22%7D'}
60 LOCATION_DIR
= os
.path
.dirname(os
.path
.realpath(__file__
))
62 #----------------------------------------------------------------------
63 def list_del_repeat(list):
64 """delete repeated items in a list, and keep the order.
65 http://www.cnblogs.com/infim/archive/2011/03/10/1979615.html"""
67 [l2
.append(i
) for i
in list if not i
in l2
]
70 #----------------------------------------------------------------------
71 def logging_level_reader(LOG_LEVEL
):
76 'DEBUG': logging
.DEBUG
,
77 'WARNING': logging
.WARNING
,
78 'FATAL': logging
.FATAL
81 #----------------------------------------------------------------------
82 def calc_sign(string
):
85 return str(hashlib
.md5(str(string
).encode('utf-8')).hexdigest())
87 #----------------------------------------------------------------------
88 def read_cookie(cookiepath
):
90 Original target: set the cookie
91 Target now: Set the global header"""
92 global BILIGRAB_HEADER
94 cookies_file
= open(cookiepath
, 'r')
95 cookies
= cookies_file
.readlines()
100 logging
.warning('Cannot read cookie, may affect some videos...')
103 #----------------------------------------------------------------------
104 def clean_name(name
):
106 delete all the dramas in the filename."""
107 return (str(name
).strip().replace('\\',' ').replace('/', ' ').replace('&', ' ')).replace('-', ' ')
109 #----------------------------------------------------------------------
110 def send_request(url
, header
, is_fake_ip
):
112 Send request, and return answer."""
116 header
['X-Forwarded-For'] = FAKE_IP
117 header
['Client-IP'] = FAKE_IP
118 header
['X-Real-IP'] = FAKE_IP
120 #logging.debug(header)
121 request
= urllib2
.Request(url
, headers
=header
)
122 response
= urllib2
.urlopen(request
)
123 data
= response
.read()
124 except urllib2
.HTTPError
:
125 logging
.info('ERROR!')
127 if response
.info().get('Content-Encoding') == 'gzip':
128 buf
= StringIO(response
.read())
129 f
= gzip
.GzipFile(fileobj
=buf
)
132 #raise URLOpenException('Cannot open URL! Raw output:\n\n{output}'.format(output = command_result[1]))
133 #print(request.headers)
137 #----------------------------------------------------------------------
138 def mylist_to_aid_list(mylist
):
140 data
= send_request('http://www.bilibili.com/mylist/mylist-{mylist}.js'.format(mylist
= mylist
), FAKE_HEADER
, IS_FAKE_IP
)
141 #request = urllib2.Request('http://www.bilibili.com/mylist/mylist-{mylist}.js'.format(mylist = mylist), headers = FAKE_HEADER)
142 #response = urllib2.urlopen(request)
144 #data = response.read()
145 for i
in data
.split('\n')[-3].split(','):
147 aid_list
.append(i
.split(':')[1])
152 #----------------------------------------------------------------------
153 def find_cid_api(vid
, p
, cookies
):
154 """find cid and print video detail
155 str,int?,str->str,str,str,str
157 global cid
, partname
, title
, videourl
, pages
159 title
, partname
, pages
, = '', '', ''
160 if str(p
) is '0' or str(p
) is '1':
161 #str2Hash = 'appkey={APPKEY}&id={vid}&type=xml{SECRETKEY}'.format(APPKEY = APPKEY, vid = vid, SECRETKEY = SECRETKEY)
162 #biliurl = 'https://api.bilibili.com/view?appkey={APPKEY}&id={vid}&type=xml&sign={sign}'.format(APPKEY = APPKEY, vid = vid, SECRETKEY = SECRETKEY, sign = calc_sign(str2Hash))
163 biliurl
= 'https://api.bilibili.com/view?appkey={APPKEY}&id={vid}&type=xml'.format(APPKEY
= '8e9fc618fbd41e28', vid
= vid
, SECRETKEY
= SECRETKEY
)
166 #str2Hash = 'appkey={APPKEY}&id={vid}&page={p}&type=xml{SECRETKEY}'.format(APPKEY = APPKEY, vid = vid, p = p, SECRETKEY = SECRETKEY)
167 #biliurl = 'https://api.bilibili.com/view?appkey={APPKEY}&id={vid}&page={p}&type=xml&sign={sign}'.format(APPKEY = APPKEY, vid = vid, SECRETKEY = SECRETKEY, p = p, sign = calc_sign(str2Hash))
168 biliurl
= 'https://api.bilibili.com/view?appkey={APPKEY}&id={vid}&page={p}&type=xml'.format(APPKEY
= '8e9fc618fbd41e28', vid
= vid
, SECRETKEY
= SECRETKEY
, p
= p
)
169 logging
.debug('BiliURL: ' + biliurl
)
170 videourl
= 'http://www.bilibili.com/video/av{vid}/index_{p}.html'.format(vid
= vid
, p
= p
)
171 logging
.info('Fetching api to read video info...')
174 #request = urllib2.Request(biliurl, headers=BILIGRAB_HEADER)
175 #response = urllib2.urlopen(request)
176 #data = response.read()
177 data
= send_request(biliurl
, BILIGRAB_HEADER
, IS_FAKE_IP
)
178 logging
.debug('Bilibili API: ' + data
)
179 dom
= parseString(data
)
180 for node
in dom
.getElementsByTagName('cid'):
181 if node
.parentNode
.tagName
== "info":
182 cid
= node
.toxml()[5:-6]
183 logging
.info('cid is ' + cid
)
185 for node
in dom
.getElementsByTagName('partname'):
186 if node
.parentNode
.tagName
== "info":
187 partname
= clean_name(str(node
.toxml()[10:-11]))
188 logging
.info('partname is ' + partname
)# no more /\ drama
190 for node
in dom
.getElementsByTagName('title'):
191 if node
.parentNode
.tagName
== "info":
192 title
= clean_name(str(node
.toxml()[7:-8])).decode("utf-8")
193 logging
.info((u
'Title is ' + title
).encode(sys
.stdout
.encoding
))
194 for node
in dom
.getElementsByTagName('pages'):
195 if node
.parentNode
.tagName
== "info":
196 pages
= clean_name(str(node
.toxml()[7:-8]))
197 logging
.info('Total pages is ' + str(pages
))
198 return [cid
, partname
, title
, pages
]
199 except Exception: # If API failed
200 logging
.warning('Cannot connect to API server! \nIf you think this is wrong, please open an issue at \nhttps://github.com/cnbeining/Biligrab/issues with *ALL* the screen output, \nas well as your IP address and basic system info.\nYou can get these data via "-l".')
201 logging
.debug('API Data: ' + data
)
202 return ['', '', '', '']
204 #----------------------------------------------------------------------
205 def find_cid_flvcd(videourl
):
208 global vid
, cid
, partname
, title
209 logging
.info('Fetching webpage with raw page...')
210 #request = urllib2.Request(videourl, headers=FAKE_HEADER)
211 data
= send_request(videourl
, FAKE_HEADER
, IS_FAKE_IP
)
212 #request.add_header('Accept-encoding', 'gzip')
214 #response = urllib2.urlopen(request)
215 #except urllib2.HTTPError:
216 #logging.info('ERROR!')
218 #if response.info().get('Content-Encoding') == 'gzip':
219 #buf = StringIO(response.read())
220 #f = gzip.GzipFile(fileobj=buf)
222 data_list
= data
.split('\n')
225 for lines
in data_list
:
227 cid
= lines
.split('&')
228 cid
= cid
[0].split('=')
230 logging
.info('cid is ' + str(cid
))
233 #----------------------------------------------------------------------
234 def check_dependencies(download_software
, concat_software
, probe_software
):
236 Will give softwares for concat, download and probe.
237 The detection of Python3 is located at the end of Main function."""
238 concat_software_list
= ['ffmpeg', 'avconv']
239 download_software_list
= ['aria2c', 'axel', 'wget', 'curl']
240 probe_software_list
= ['ffprobe', 'mediainfo']
241 name_list
= [[concat_software
,
242 concat_software_list
],
244 download_software_list
],
246 probe_software_list
]]
247 for name
in name_list
:
248 if name
[0].strip().lower() not in name
[1]: # Unsupported software
249 # Set a Unsupported software, not blank
250 if len(name
[0].strip()) != 0:
251 logging
.warning('Requested Software not supported!\n Biligrab only support these following software(s):\n ' + str(name
[1]) + '\n Trying to find available one...')
252 for software
in name
[1]:
253 output
= commands
.getstatusoutput(software
+ ' --help')
254 if str(output
[0]) != '32512': # If exist
258 logging
.fatal('Cannot find software in ' + str(name
[1]) + ' !')
260 return name_list
[0][0], name_list
[1][0], name_list
[2][0]
262 #----------------------------------------------------------------------
263 def download_video_link(part_number
, download_software
, video_link
, thread_single_download
):
265 logging
.info('Downloading #{part_number}...'.format(part_number
= part_number
))
266 if download_software
== 'aria2c':
267 cmd
= 'aria2c -c -U "{FAKE_UA}" -s{thread_single_download} -x{thread_single_download} -k1M --out {part_number}.flv "{video_link}"'
268 elif download_software
== 'wget':
269 cmd
= 'wget -c -A "{FAKE_UA}" -O {part_number}.flv "{video_link}"'
270 elif download_software
== 'curl':
271 cmd
= 'curl -L -C - -A "{FAKE_UA}" -o {part_number}.flv "{video_link}"'
272 elif download_software
== 'axel':
273 cmd
= 'axel -U "{FAKE_UA}" -n {thread_single_download} -o {part_number}.flv "{video_link}"'
274 cmd
= cmd
.format(part_number
= part_number
, video_link
= video_link
, thread_single_download
= thread_single_download
, FAKE_UA
= FAKE_UA
)
278 #----------------------------------------------------------------------
279 def execute_cmd(cmd
):
281 return_code
= subprocess
.call(cmd
, shell
=True, stdout
=subprocess
.PIPE
, stderr
=subprocess
.PIPE
)
283 logging
.warning('ERROR')
286 def execute_sysencode_cmd(command
):
287 """execute cmd with sysencoding"""
288 os
.system(command
.decode("utf-8").encode(sys
.stdout
.encoding
))
290 #----------------------------------------------------------------------
291 def concat_videos(concat_software
, vid_num
, filename
):
293 global VIDEO_FORMAT
,title
294 if concat_software
== 'ffmpeg':
295 f
= open('ff.txt', 'w')
298 for i
in range(vid_num
):
299 ff
+= 'file \'{cwd}/{i}.flv\'\n'.format(cwd
= cwd
, i
= i
)
300 # ff = ff.encode("utf8")
304 logging
.info('Concating videos...')
306 execute_sysencode_cmd('ffmpeg -f concat -i ff.txt -c copy "' + filename
+ '".mp4')
308 if os
.path
.isfile((str(i
) + '.mp4').decode("utf-8")):
310 # os.remove('ff.txt')
311 print((str(i
) + '.flv').decode("utf-8"))
312 os
.remove((str(i
) + '.flv').decode("utf-8"))
313 for i
in range(vid_num
):
314 os
.remove((str(i
) + '.flv').decode("utf-8"))
315 #execute_sysencode_cmd('rm -r ' + str(i) + '.flv')
316 logging
.info('Done, enjoy yourself!')
318 logging
.warning('Cannot delete temporary files!')
321 print('ERROR: Cannot concatenate files, trying to make flv...')
322 execute_sysencode_cmd('ffmpeg -f concat -i ff.txt -c copy "' + filename
+ '".flv')
324 if os
.path
.isfile((str(i
) + '.flv').decode("utf-8")):
325 logging
.warning('FLV file made. Not possible to mux to MP4, highly likely due to audio format.')
326 #execute_sysencode_cmd('rm -r ff.txt')
327 # os.remove('ff.txt')
328 print(('ff.txt').decode("utf-8"))
329 os
.remove(('ff.txt').decode("utf-8"))
330 for i
in range(vid_num
):
331 #execute_sysencode_cmd('rm -r ' + str(i) + '.flv')
332 os
.remove((str(i
) + '.flv').decode("utf-8"))
334 logging
.error('Cannot concatenate files!')
335 elif concat_software
== 'avconv':
338 #----------------------------------------------------------------------
339 def process_m3u8(url
):
343 data
= send_request(url
, FAKE_HEADER
, IS_FAKE_IP
)
345 logging
.error('Cannot download required m3u8!')
347 #request = urllib2.Request(url, headers=BILIGRAB_HEADER)
349 #response = urllib2.urlopen(request)
351 #logging.error('Cannot download required m3u8!')
353 #data = response.read()
357 return [data
[4].split('?')[0]]
359 #----------------------------------------------------------------------
360 def make_m3u8(video_list
):
363 [(VIDEO_URL, TIME_IN_SEC), ...]"""
364 TARGETDURATION
= int(max([i
[1] for i
in video_list
])) + 1
365 line
= '#EXTM3U\n#EXT-X-TARGETDURATION:{TARGETDURATION}\n#EXT-X-VERSION:2\n'.format(TARGETDURATION
= TARGETDURATION
)
367 line
+= '#EXTINF:{time}\n{url}\n'.format(time
= str(i
[1]), url
= i
[0])
368 line
+= '#EXT-X-ENDLIST'
369 logging
.debug('m3u8: ' + line
)
372 #----------------------------------------------------------------------
373 def find_video_address_html5(vid
, p
, header
):
374 """str,str,dict->list
376 api_url
= 'http://www.bilibili.com/m/html5?aid={vid}&page={p}'.format(vid
= vid
, p
= p
)
377 data
= send_request(api_url
, header
, IS_FAKE_IP
)
379 logging
.error('Cannot connect to HTML5 API!')
381 #request = urllib2.Request(api_url, headers=header)
384 #response = urllib2.urlopen(request)
386 #logging.error('Cannot connect to HTML5 API!')
388 #data = response.read()
390 #if response.info().get('Content-Encoding') == 'gzip':
391 #data = gzip.GzipFile(fileobj=StringIO(data), mode="r").read()
393 info
= json
.loads(data
.decode('utf-8'))
394 raw_url
= info
['src']
395 if 'error.mp4' in raw_url
:
396 logging
.error('HTML5 API returned ERROR or not available!')
398 if 'm3u8' in raw_url
:
399 logging
.info('Found m3u8, processing...')
400 return process_m3u8(raw_url
)
403 #----------------------------------------------------------------------
404 def find_video_address_force_original(cid
, header
):
406 Give the original URL, if possible.
409 #sign_this = calc_sign('appkey={APPKEY}&cid={cid}{SECRETKEY}'.format(APPKEY = APPKEY, cid = cid, SECRETKEY = SECRETKEY))
410 api_url
= 'http://interface.bilibili.com/player?'
411 #data = send_request(api_url + 'appkey={APPKEY}&cid={cid}&sign={sign_this}'.format(APPKEY = APPKEY, cid = cid, SECRETKEY = SECRETKEY, sign_this = sign_this), header, IS_FAKE_IP)
412 data
= send_request(api_url
+ 'appkey={APPKEY}&cid={cid}'.format(APPKEY
= APPKEY
, cid
= cid
, SECRETKEY
= SECRETKEY
), header
, IS_FAKE_IP
)
413 #request = urllib2.Request(api_url + 'appkey={APPKEY}&cid={cid}&sign={sign_this}'.format(APPKEY = APPKEY, cid = cid, SECRETKEY = SECRETKEY, sign_this = sign_this), headers=header)
414 #response = urllib2.urlopen(request)
415 #data = response.read()
416 #logging.debug('interface responce: ' + data)
417 data
= data
.split('\n')
420 originalurl
= str(l
[8:-9])
421 logging
.info('Original URL is ' + originalurl
)
423 logging
.warning('Cannot get original URL! Chances are it does not exist.')
426 #----------------------------------------------------------------------
427 def find_link_flvcd(videourl
):
429 Used in method 2 and 5."""
430 logging
.info('Finding link via Flvcd...')
431 data
= send_request('http://www.flvcd.com/parse.php?' + urllib
.urlencode([('kw', videourl
)]) + '&format=super', FAKE_HEADER
, IS_FAKE_IP
)
433 #request = urllib2.Request('http://www.flvcd.com/parse.php?' +
434 #urllib.urlencode([('kw', videourl)]) + '&format=super', headers=FAKE_HEADER)
435 #request.add_header('Accept-encoding', 'gzip')
436 #response = urllib2.urlopen(request)
437 #data = response.read()
438 #if response.info().get('Content-Encoding') == 'gzip':
439 #buf = StringIO(data)
440 #f = gzip.GzipFile(fileobj=buf)
442 data_list
= data
.split('\n')
444 for items
in data_list
:
445 if 'name' in items
and 'inf' in items
and 'input' in items
:
447 rawurlflvcd
= c
[59:-5]
448 rawurlflvcd
= rawurlflvcd
.split('|')
451 #----------------------------------------------------------------------
452 def find_video_address_pr(cid
, quality
, header
):
454 The API provided by BilibiliPr."""
455 logging
.info('Finding link via BilibiliPr...')
456 api_url
= 'http://pr.lolly.cc/P{quality}?cid={cid}'.format(quality
= quality
, cid
= cid
)
457 data
= send_request(api_url
, header
, IS_FAKE_IP
)
459 #request = urllib2.Request(api_url, headers=header)
461 #response = urllib2.urlopen(request, timeout=3)
462 #data = response.read()
464 #logging.warning('No response!')
466 #logging.debug('BilibiliPr API: ' + data)
468 logging
.warning('API returned 404!')
473 dom
= parseString(data
)
474 for node
in dom
.getElementsByTagName('durl'):
475 url
= node
.getElementsByTagName('url')[0]
476 rawurl
.append(url
.childNodes
[0].data
)
479 #----------------------------------------------------------------------
480 def find_video_address_normal_api(cid
, header
, method
, convert_m3u
= False):
482 Change in 0.98: Return the file list directly.
486 2: Original URL API - Divided in another function
487 3: Mobile API - Divided in another function
488 4: Flvcd - Divided in another function
490 [(VIDEO_URL, TIME_IN_SEC), ...]
493 api_url
= 'http://interface.bilibili.com/v_cdn_play?'
494 else: #Method 0 or other
495 api_url
= 'http://interface.bilibili.com/playurl?'
497 sign_this
= calc_sign('cid={cid}&from=miniplay&player=1{SECRETKEY_MINILOADER}'.format(APPKEY
= APPKEY
, cid
= cid
, SECRETKEY_MINILOADER
= SECRETKEY_MINILOADER
))
498 interface_url
= api_url
+ 'cid={cid}&from=miniplay&player=1&sign={sign_this}'.format(cid
= cid
, sign_this
= sign_this
)
499 #interface_url = api_url + 'appkey={APPKEY}&cid={cid}'.format(APPKEY = APPKEY, cid = cid, SECRETKEY = SECRETKEY)
501 sign_this
= calc_sign('cid={cid}&from=miniplay&player=1&quality={QUALITY}{SECRETKEY_MINILOADER}'.format(APPKEY
= APPKEY
, cid
= cid
, SECRETKEY_MINILOADER
= SECRETKEY_MINILOADER
, QUALITY
= QUALITY
))
502 interface_url
= api_url
+ 'cid={cid}&from=miniplay&player=1&quality={QUALITY}&sign={sign_this}'.format(cid
= cid
, sign_this
= sign_this
, QUALITY
= QUALITY
)
504 logging
.info(interface_url
)
505 data
= send_request(interface_url
, header
, IS_FAKE_IP
)
506 #request = urllib2.Request(interface_url, headers=header)
507 #logging.debug('Interface: ' + interface_url)
508 #response = urllib2.urlopen(request)
509 #data = response.read()
510 #logging.debug('interface API: ' + data)
511 for l
in data
.split('\n'): # In case shit happens
512 if 'error.mp4' in l
or 'copyright.mp4' in l
:
513 logging
.warning('API header may be blocked!')
514 return ['API_BLOCKED']
517 dom
= parseString(data
)
519 for node
in dom
.getElementsByTagName('durl'):
520 length
= node
.getElementsByTagName('length')[0]
521 url
= node
.getElementsByTagName('url')[0]
522 rawurl
.append((url
.childNodes
[0].data
, int(int(length
.childNodes
[0].data
) / 1000) + 1))
524 for node
in dom
.getElementsByTagName('durl'):
525 url
= node
.getElementsByTagName('url')[0]
526 rawurl
.append(url
.childNodes
[0].data
)
529 #----------------------------------------------------------------------
530 def find_link_you_get(videourl
):
532 Extract urls with you-get."""
533 command_result
= commands
.getstatusoutput('you-get -u {videourl}'.format(videourl
= videourl
))
534 logging
.debug(command_result
)
535 if command_result
[0] != 0:
536 raise YougetURLException('You-get failed somehow! Raw output:\n\n{output}'.format(output
= command_result
[1]))
538 url_list
= command_result
[1].split('\n')
539 for k
, v
in enumerate(url_list
):
540 if v
.startswith('http'):
541 url_list
= url_list
[k
:]
543 #url_list = literal_eval(url_list_str)
544 logging
.debug('URL_LIST:{url_list}'.format(url_list
= url_list
))
545 return list(url_list
)
547 #----------------------------------------------------------------------
548 def get_video(oversea
, convert_m3u
= False):
550 A full parser for getting video.
551 convert_m3u: [(URL, time_in_sec)]
555 raw_link
= find_video_address_force_original(cid
, BILIGRAB_HEADER
)
556 rawurl
= find_link_flvcd(raw_link
)
558 rawurl
= find_video_address_html5(vid
, p
, BILIGRAB_HEADER
)
559 if rawurl
== []: #As in #11
560 rawurl
= find_video_address_html5(vid
, p
, FAKE_HEADER
)
562 rawurl
= find_link_flvcd(videourl
)
564 rawurl
= find_video_address_pr(cid
, 1080, BILIGRAB_HEADER
)
565 if '404' in rawurl
[0]:
566 logging
.info('Using lower quality...')
567 rawurl
= find_video_address_pr(cid
, 720, BILIGRAB_HEADER
)
568 if '404' in rawurl
[0]:
569 logging
.error('Failed!')
573 elif 'ERROR' in rawurl
[0]:
574 logging
.info('Wait a little bit...')
576 rawurl
= find_video_address_pr(cid
, 1080, BILIGRAB_HEADER
)
578 raw_link
= find_video_address_force_original(cid
, BILIGRAB_HEADER
)
579 rawurl
= find_link_you_get(raw_link
)
581 rawurl
= find_video_address_normal_api(cid
, BILIGRAB_HEADER
, oversea
, convert_m3u
)
582 if 'API_BLOCKED' in rawurl
[0]:
583 logging
.warning('API header may be blocked! Using fake one instead...')
584 rawurl
= find_video_address_normal_api(cid
, FAKE_HEADER
, oversea
, convert_m3u
)
587 #----------------------------------------------------------------------
588 def get_resolution(filename
, probe_software
):
591 filename
= filename
+ '.' + VIDEO_FORMAT
593 if probe_software
== 'mediainfo':
594 resolution
= get_resolution_mediainfo(filename
)
595 if probe_software
== 'ffprobe':
596 resolution
= get_resolution_ffprobe(filename
)
597 logging
.debug('Software: {probe_software}, resolution {resolution}'.format(probe_software
= probe_software
, resolution
= resolution
))
599 except Exception: # magic number
602 #----------------------------------------------------------------------
603 def get_resolution_mediainfo(filename
):
607 resolution
= str(os
.popen('mediainfo \'--Inform=Video;%Width%x%Height%\' "' +filename
+'"').read()).strip().split('x')
608 return [int(resolution
[0]), int(resolution
[1])]
610 #----------------------------------------------------------------------
611 def get_resolution_ffprobe(filename
):
616 cmnd
= ['ffprobe', '-show_format', '-show_streams', '-pretty', '-loglevel', 'quiet', filename
]
617 p
= subprocess
.Popen(cmnd
, stdout
=subprocess
.PIPE
, stderr
=subprocess
.PIPE
)
619 out
, err
= p
.communicate()
624 for line
in out
.split():
626 width
= line
.split('=')[1]
627 if 'height=' in line
:
628 height
= line
.split('=')[1]
631 # return width + 'x' + height
632 return [int(width
), int(height
)]
634 #----------------------------------------------------------------------
635 def get_url_size(url
):
637 Get remote URL size by reading Content-Length.
639 site
= urllib
.urlopen(url
)
641 return int(meta
.getheaders("Content-Length")[0])
643 #----------------------------------------------------------------------
644 def getvideosize(url
, verbose
=False):
646 if url
.startswith('http:') or url
.startswith('https:'):
647 ffprobe_command
= ['ffprobe', '-icy', '0', '-loglevel', 'repeat+warning' if verbose
else 'repeat+error', '-print_format', 'json', '-select_streams', 'v', '-show_format', '-show_streams', '-timeout', '60000000', '-user-agent', BILIGRAB_UA
, url
]
649 ffprobe_command
= ['ffprobe', '-loglevel', 'repeat+warning' if verbose
else 'repeat+error', '-print_format', 'json', '-select_streams', 'v', '-show_streams', url
]
650 logcommand(ffprobe_command
)
651 ffprobe_process
= subprocess
.Popen(ffprobe_command
, stdout
=subprocess
.PIPE
)
653 ffprobe_output
= json
.loads(ffprobe_process
.communicate()[0].decode('utf-8', 'replace'))
654 except KeyboardInterrupt:
655 logging
.warning('Cancelling getting video size, press Ctrl-C again to terminate.')
656 ffprobe_process
.terminate()
658 width
, height
, widthxheight
, duration
, total_bitrate
= 0, 0, 0, 0, 0
660 if dict.get(ffprobe_output
, 'format')['duration'] > duration
:
661 duration
= dict.get(ffprobe_output
, 'format')['duration']
664 for stream
in dict.get(ffprobe_output
, 'streams', []):
666 if duration
== 0 and (dict.get(stream
, 'duration') > duration
):
667 duration
= dict.get(stream
, 'duration')
668 if dict.get(stream
, 'width')*dict.get(stream
, 'height') > widthxheight
:
669 width
, height
= dict.get(stream
, 'width'), dict.get(stream
, 'height')
670 if dict.get(stream
, 'bit_rate') > total_bitrate
:
671 total_bitrate
+= int(dict.get(stream
, 'bit_rate'))
675 duration
= int(get_url_size(url
) * 8 / total_bitrate
)
676 return [[int(width
), int(height
)], int(float(duration
))+1]
677 except Exception as e
:
681 #----------------------------------------------------------------------
682 def convert_ass_py3(filename
, probe_software
, resolution
= [0, 0]):
684 With danmaku2ass, branch master.
685 https://github.com/m13253/danmaku2ass/
688 A simple way to do that.
689 resolution_str:1920x1080"""
690 xml_name
= os
.path
.abspath(filename
+ '.xml')
691 ass_name
= filename
+ '.ass'
692 logging
.info('Converting danmaku to ASS file with danmaku2ass(main)...')
693 logging
.info('Resolution is %dx%d' % (resolution
[0], resolution
[1]))
694 if resolution
== [0, 0]:
695 logging
.info('Trying to get resolution...')
696 resolution
= get_resolution(filename
, probe_software
)
697 logging
.info('Resolution is %dx%d' % (resolution
[0], resolution
[1]))
698 if execute_sysencode_cmd('python3 %s/danmaku2ass3.py -o %s -s %dx%d -fs %d -a 0.8 -dm 8 %s' % (LOCATION_DIR
, ass_name
, resolution
[0], resolution
[1], int(math
.ceil(resolution
[1] / 21.6)), xml_name
)) == 0:
699 logging
.info('The ASS file should be ready!')
701 logging
.error('''Danmaku2ASS failed.
702 Head to https://github.com/m13253/danmaku2ass/issues to complain about this.''')
704 #----------------------------------------------------------------------
705 def convert_ass_py2(filename
, probe_software
, resolution
= [0, 0]):
707 With danmaku2ass, branch py2.
708 https://github.com/m13253/danmaku2ass/tree/py2
711 logging
.info('Converting danmaku to ASS file with danmaku2ass(py2)...')
712 xml_name
= filename
+ '.xml'
713 if resolution
== [0, 0]:
714 logging
.info('Trying to get resolution...')
715 resolution
= get_resolution(filename
, probe_software
)
716 logging
.info('Resolution is {width}x{height}'.format(width
= resolution
[0], height
= resolution
[1]))
717 #convert_ass(xml_name, filename + '.ass', resolution)
719 Danmaku2ASS(xml_name
, filename
+ '.ass', resolution
[0], resolution
[1],
720 font_size
= int(math
.ceil(resolution
[1] / 21.6)), text_opacity
=0.8, duration_marquee
=8.0)
721 logging
.info('INFO: The ASS file should be ready!')
722 except Exception as e
:
723 logging
.error('''Danmaku2ASS failed: %s
724 Head to https://github.com/m13253/danmaku2ass/issues to complain about this.'''% e
)
725 logging
.debug(traceback
.print_exc())
726 pass #Or it may stop leaving lots of lines unprocessed
728 #----------------------------------------------------------------------
729 def download_danmaku(cid
, filename
):
731 Download XML file, and convert to ASS(if required)
732 Used to be in main(), but replaced due to the merge of -m (BiligrabLite).
733 If danmaku only, will see whether need to export ASS."""
734 logging
.info('Fetching XML...')
735 execute_sysencode_cmd('curl -o "{filename}.xml" --compressed http://comment.bilibili.com/{cid}.xml'.format(filename
= filename
, cid
= cid
))
736 #execute_sysencode_cmd('gzip -d '+cid+'.xml.gz')
737 logging
.info('The XML file, {filename}.xml should be ready...enjoy!'.format(filename
= filename
.decode("utf-8").encode(sys
.stdout
.encoding
)))
739 #----------------------------------------------------------------------
740 def logcommand(command_line
):
741 logging
.debug('Executing: '+' '.join('\''+i
+'\'' if ' ' in i
or '&' in i
or '"' in i
else i
for i
in command_line
))
743 #----------------------------------------------------------------------
744 def logorraise(message
, debug
=False):
748 logging
.error(str(message
))
750 ########################################################################
751 class DanmakuOnlyException(Exception):
753 '''Deal with DanmakuOnly to stop the main() function.'''
754 #----------------------------------------------------------------------
756 def __init__(self
, value
):
758 #----------------------------------------------------------------------
761 return repr(self
.value
)
763 ########################################################################
764 class Danmaku2Ass2Exception(Exception):
766 '''Deal with Danmaku2ASS2 to stop the main() function.'''
767 #----------------------------------------------------------------------
769 def __init__(self
, value
):
771 #----------------------------------------------------------------------
774 return repr(self
.value
)
776 ########################################################################
777 class NoCidException(Exception):
779 '''Deal with no cid to stop the main() function.'''
780 #----------------------------------------------------------------------
782 def __init__(self
, value
):
784 #----------------------------------------------------------------------
787 return repr(self
.value
)
789 ########################################################################
790 class NoVideoURLException(Exception):
792 '''Deal with no video URL to stop the main() function.'''
793 #----------------------------------------------------------------------
795 def __init__(self
, value
):
797 #----------------------------------------------------------------------
800 return repr(self
.value
)
802 ########################################################################
803 class ExportM3UException(Exception):
805 '''Deal with export to m3u to stop the main() function.'''
806 #----------------------------------------------------------------------
808 def __init__(self
, value
):
810 #----------------------------------------------------------------------
813 return repr(self
.value
)
815 ########################################################################
816 class YougetURLException(Exception):
818 '''you-get cannot get URL somehow'''
819 #----------------------------------------------------------------------
821 def __init__(self
, value
):
823 #----------------------------------------------------------------------
826 return repr(self
.value
)
828 ########################################################################
829 class URLOpenException(Exception):
831 '''cannot get URL somehow'''
832 #----------------------------------------------------------------------
834 def __init__(self
, value
):
836 #----------------------------------------------------------------------
839 return repr(self
.value
)
842 ########################################################################
843 class DownloadVideo(threading
.Thread
):
844 """Threaded Download Video"""
845 #----------------------------------------------------------------------
846 def __init__(self
, queue
):
847 threading
.Thread
.__init
__(self
)
849 #----------------------------------------------------------------------
852 #grabs start time from queue
853 down_set
= self
.queue
.get()
854 #return_value = download_video(down_set)
855 cmd
= download_video_link(*down_set
)
856 return_value
= execute_cmd(cmd
)
857 self
.queue
.task_done()
859 #----------------------------------------------------------------------
860 def main_threading(download_thread
= 3, video_list
= [], thread_single_download
= 16):
862 command_pool
= [(video_list
.index(url_this
), download_software
, url_this
, thread_single_download
) for url_this
in video_list
]
863 #spawn a pool of threads, and pass them queue instance
864 for i
in range(int(download_thread
)):
865 t
= DownloadVideo(queue
)
868 #populate queue with data
869 for command_single
in command_pool
:
870 queue
.put(command_single
)
871 #wait on the queue until everything has been processed
874 #----------------------------------------------------------------------
875 def main(vid
, p
, oversea
, cookies
, download_software
, concat_software
, is_export
, probe_software
, danmaku_only
, time_fetch
=5, download_thread
= 16, thread_single_download
= 16):
876 global cid
, partname
, title
, videourl
, is_first_run
877 videourl
= 'http://www.bilibili.com/video/av{vid}/index_{p}.html'.format(vid
= vid
, p
= p
)
878 # Check both software
879 logging
.debug(concat_software
+ ', ' + download_software
)
880 # Start to find cid, api
881 cid
, partname
, title
, pages
= find_cid_api(vid
, p
, cookies
)
883 #logging.warning('Cannot find cid, trying to do it brutely...')
884 #find_cid_flvcd(videourl)
887 logging
.warning('Strange, still cannot find cid... ')
888 is_black3
= str(raw_input('Type y for trying the unpredictable way, or input the cid by yourself; Press ENTER to quit.'))
891 if 'y' in str(is_black3
):
892 vid
= str(int(vid
) - 1)
894 find_cid_api(int(vid
) - 1, p
)
896 elif str(is_black3
) is '':
897 raise NoCidException('FATAL: Cannot get cid anyway!')
900 # start to make folders...
905 if len(partname
) is not 0:
907 elif title
is not '':
911 #In case cannot find which s which
912 filename
= str(p
) + ' - ' + filename
913 # In case make too much folders
914 folder_to_make
= os
.getcwd() + '/' + folder
915 if is_first_run
== 0:
916 if not os
.path
.exists(folder_to_make
):
917 os
.makedirs(folder_to_make
)
919 os
.chdir(folder_to_make
)
921 download_danmaku(cid
, filename
)
922 if is_export
>= 1 and IS_M3U
!= 1 and danmaku_only
== 1:
923 rawurl
= get_video(oversea
, convert_m3u
= True)
924 check_dependencies_remote_resolution('ffprobe')
925 resolution
= getvideosize(rawurl
[0])[0]
926 convert_ass(filename
, probe_software
, resolution
= resolution
)
929 #M3U export, then stop
930 if oversea
in {'0', '1'}:
931 rawurl
= get_video(oversea
, convert_m3u
= True)
934 rawurl
= get_video(oversea
, convert_m3u
= False)
936 duration_list
.append(getvideosize(url
)[1])
937 rawurl
= map(lambda x
,y
: (x
, y
), rawurl
, duration_list
)
939 resolution
= getvideosize(rawurl
[0][0])[0]
940 m3u_file
= make_m3u8(rawurl
)
941 f
= open(filename
+ '.m3u', 'w')
943 m3u_file
= m3u_file
.encode("utf8")
946 convert_ass(filename
, probe_software
, resolution
= resolution
)
947 logging
.debug(m3u_file
)
948 raise ExportM3UException('INFO: Export to M3U')
949 if danmaku_only
== 1:
950 raise DanmakuOnlyException('INFO: Danmaku only')
951 # Find video location
952 logging
.info('Finding video location...')
957 logging
.info('Trying to get download URL...')
958 rawurl
= get_video(oversea
, convert_m3u
= False)
959 if len(rawurl
) == 0 and oversea
!= '4': # hope this never happen
960 logging
.warning('API failed, using falloff plan...')
961 rawurl
= find_link_flvcd(videourl
)
962 vid_num
= len(rawurl
)
963 if IS_SLIENT
== 0 and vid_num
== 0:
964 logging
.warning('Cannot get download URL!')
965 rawurl
= list(str(raw_input('If you know the url, please enter it now: URL1|URL2...'))).split('|')
966 vid_num
= len(rawurl
)
967 if vid_num
is 0: # shit really hit the fan
968 raise NoVIdeoURLException('FATAL: Cannot get video URL anyway!')
969 logging
.info('{vid_num} videos in part {part_now} to download, fetch yourself a cup of coffee...'.format(vid_num
= vid_num
, part_now
= part_now
))
972 cmd
= download_video_link(0,download_software
,rawurl
[0], thread_single_download
)
973 execute_sysencode_cmd(cmd
)
976 queue
= Queue
.Queue()
977 main_threading(download_thread
, rawurl
, thread_single_download
)
979 concat_videos(concat_software
, vid_num
, filename
)
982 convert_ass(filename
, probe_software
)
984 logging
.warning('Problem with ASS conversion!')
986 logging
.info('Part Done!')
988 #----------------------------------------------------------------------
989 def get_full_p(p_raw
):
992 p_raw
= p_raw
.split(',')
998 item
= item
.split('~')
1001 lower
= int(item
[0])
1003 logging
.warning('Cannot read lower!')
1005 higher
= int(item
[1])
1007 logging
.warning('Cannot read higher!')
1008 if lower
== 0 or higher
== 0:
1009 if lower
== 0 and higher
!= 0:
1011 elif lower
!= 0 and higher
== 0:
1014 logging
.warning('Cannot find any higher or lower, ignoring...')
1021 p_list
.append(lower
)
1022 while lower
< higher
:
1024 p_list
.append(lower
)
1028 p_list
.append(int(item
))
1030 logging
.warning('Cannot read "{item}", abandon it.'.format(item
= item
))
1032 p_list
= list_del_repeat(p_list
)
1035 #----------------------------------------------------------------------
1036 def check_dependencies_remote_resolution(software
):
1038 if 'ffprobe' in software
:
1039 output
= commands
.getstatusoutput('ffprobe --help')
1040 if str(output
[0]) == '32512':
1045 #----------------------------------------------------------------------
1046 def check_dependencies_exportm3u(IS_M3U
):
1047 """int,str->int,str"""
1049 output
= commands
.getstatusoutput('ffprobe --help')
1050 if str(output
[0]) == '32512':
1051 logging
.error('ffprobe DNE, python3 does not exist or not callable!')
1052 err_input
= str(raw_input('Do you want to exit, ignore or stop the conversion?(e/i/s)'))
1053 if err_input
== 'e':
1055 elif err_input
== '2':
1057 elif err_input
== 's':
1060 logging
.warning('Cannot read input, stop the conversion!')
1066 #----------------------------------------------------------------------
1067 def check_dependencies_danmaku2ass(is_export
):
1068 """int,str->int,str"""
1070 convert_ass
= convert_ass_py3
1071 output
= commands
.getstatusoutput('python3 --help')
1072 if str(output
[0]) == '32512' or not os
.path
.exists(os
.path
.join(LOCATION_DIR
, 'danmaku2ass3.py')):
1073 logging
.warning('danmaku2ass3.py DNE, python3 does not exist or not callable!')
1074 err_input
= str(raw_input('Do you want to exit, use Python 2.x or stop the conversion?(e/2/s)'))
1075 if err_input
== 'e':
1077 elif err_input
== '2':
1078 convert_ass
= convert_ass_py2
1080 elif err_input
== 's':
1083 logging
.warning('Cannot read input, stop the conversion!')
1085 elif is_export
== 2 or is_export
== 1:
1086 convert_ass
= convert_ass_py2
1087 if not os
.path
.exists(os
.path
.join(LOCATION_DIR
, 'danmaku2ass2.py')):
1088 logging
.warning('danmaku2ass2.py DNE!')
1089 err_input
= str(raw_input('Do you want to exit, use Python 3.x or stop the conversion?(e/3/s)'))
1090 if err_input
== 'e':
1092 elif err_input
== '3':
1093 convert_ass
= convert_ass_py3
1095 elif err_input
== 's':
1098 logging
.warning('Cannot read input, stop the conversion!')
1101 convert_ass
= convert_ass_py2
1102 return is_export
, convert_ass
1104 #----------------------------------------------------------------------
1110 https://github.com/cnbeining/Biligrab
1111 http://www.cnbeining.com/
1119 python biligrab.py (-h) (-a) (-p) (-s) (-c) (-d) (-v) (-l) (-e) (-b) (-m) (-n) (-u) (-t) (-q) (-r) (-g)
1122 Print this usage file.
1126 If not set, Biligrab will use the fallback interactive mode.
1127 Support "~", "," and mix use.
1137 Able to use the same syntax as "-a".
1138 If set to 0, Biligrab will download all the available parts in the video.
1142 0: The original API source, can be Letv backup,
1143 and can fail if the original video is not available(e.g., deleted)
1144 1: The CDN API source, "oversea accelerate".
1145 Can be MINICDN backup in Mainland China or oversea.
1146 Good to bypass some bangumi's restrictions.
1147 2: Force to use the original source.
1148 Use Flvcd to parse the video, but would fail if
1149 1) The original source DNE, e.g., some old videos
1150 2) The original source is Letvcloud itself.
1151 3) Other unknown reason(s) that stops Flvcd from parsing the video.
1152 For any video that failed to parse, Biligrab will try to use Flvcd.
1153 (Mainly for oversea users regarding to copyright-restricted bangumies.)
1154 If the API is blocked, Biligrab would fake the UA.
1155 3: (Not stable) Use the HTML5 API.
1156 This works for downloading some cached Letvcloud videos, but is slow, and would fail for no reason sometimes.
1157 Will retry if unavailable.
1159 Good to fight with oversea and copyright restriction, but not working with iQiyi.
1160 May retrieve better quality video, especially for Youku.
1162 Good to fight with some copyright restriction that BilibiliPr can fix.
1163 Not always working though.
1164 6: Use You-get (https://github.com/soimort/you-get).
1165 You need a you-get callable directly like "you-get -u blahblah".
1167 -c: Default: ./bilicookies
1168 The path of cookies.
1169 Use cookies to visit member-only videos.
1172 Set the desired download software.
1173 Biligrab supports aria2c(16 threads), axel(20 threads), wget and curl by far.
1174 If not set, Biligrab will detect an available one;
1175 If none of those is available, Biligrab will quit.
1176 For more software support, please open an issue at https://github.com/cnbeining/Biligrab/issues/
1179 Set the desired concatenate software.
1180 Biligrab supports ffmpeg by far.
1181 If not set, Biligrab will detect an available one;
1182 If none of those is available, Biligrab will quit.
1183 For more software support, please open an issue at https://github.com/cnbeining/Biligrab/issues/
1184 Make sure you include a *working* command line example of this software!
1187 Dump the log of the output for better debugging.
1188 Can be set to debug.
1191 Export Danmaku to ASS file.
1192 Fulfilled with danmaku2ass(https://github.com/m13253/danmaku2ass/tree/py2),
1193 Author: @m13253, GPLv3 License.
1194 *For issue with this function, if you think the problem lies on the danmaku2ass side,
1195 please open the issue at both projects.*
1196 If set to 1 or 2, Biligrab will use Danmaku2ass's py2 branch.
1197 If set to 3, Biligrab will use Danmaku2ass's master branch, which would require
1198 a python3 callable via 'python3'.
1199 If python3 not callable or danmaku2ass2/3 DNE, Biligrab will ask for action.
1202 Set the probe software.
1203 Biligrab supports Mediainfo and FFprobe.
1204 If not set, Biligrab will detect an available one;
1205 If none of those is available, Biligrab will quit.
1206 For more software support, please open an issue at https://github.com/cnbeining/Biligrab/issues/
1207 Make sure you include a *working* command line example of this software!
1210 Only download the danmaku.
1214 Biligrab will not ask any question.
1217 Export video link to .m3u file, which can be used with MPlayer, mpc, VLC, etc.
1218 Biligrab will export a m3u8 instead of downloading any video(s).
1219 Can be broken with sources other than 0 or 1.
1222 The number of Mylist.
1223 Biligrab will process all the videos in this list.
1226 The thread number for downloading.
1227 Good to fix overhead problem.
1230 Select video quality.
1231 Only works with Source 0 or 1.
1232 Range: 0~4, higher for better quality.
1235 Threads for downloading every part.
1236 Works with aria2 and axel.
1242 #----------------------------------------------------------------------
1243 if __name__
== '__main__':
1244 is_first_run
, is_export
, danmaku_only
, IS_SLIENT
, IS_M3U
, mylist
, time_fetch
, download_thread
, QUALITY
, thread_single_download
= 0, 1, 0, 0, 0, 0, 5, 16, -1, 16
1245 argv_list
,av_list
= [], []
1246 argv_list
= sys
.argv
[1:]
1247 p_raw
, vid
, oversea
, cookiepath
, download_software
, concat_software
, probe_software
, vid_raw
, LOG_LEVEL
, FAKE_IP
, IS_FAKE_IP
= '', '', '', '', '', '', '', '', 'INFO', '', 0
1248 convert_ass
= convert_ass_py2
1250 opts
, args
= getopt
.getopt(argv_list
, "ha:p:s:c:d:v:l:e:b:m:n:u:t:q:r:g:i:",
1251 ['help', "av=", 'part=', 'source=', 'cookie=', 'download=', 'concat=', 'log=', 'export=', 'probe=', 'danmaku=', 'slient=', 'm3u=', 'mylist=', 'thread=', 'quality=', 'thread_single=', 'fake-ip='])
1252 except getopt
.GetoptError
:
1256 if o
in ('-h', '--help'):
1259 if o
in ('-a', '--av'):
1261 if o
in ('-p', '--part'):
1263 if o
in ('-s', '--source'):
1265 if o
in ('-c', '--cookie'):
1267 if cookiepath
== '':
1268 logging
.warning('No cookie path set, use default: ./bilicookies')
1269 cookiepath
= './bilicookies'
1270 if o
in ('-d', '--download'):
1271 download_software
= a
1272 if o
in ('-v', '--concat'):
1274 if o
in ('-l', '--log'):
1279 if o
in ('-e', '--export'):
1281 if o
in ('-b', '--probe'):
1283 if o
in ('-m', '--danmaku'):
1284 danmaku_only
= int(a
)
1285 if o
in ('-n', '--slient'):
1287 if o
in ('-u', '--m3u'):
1289 if o
in ('-t', '--mylist'):
1291 if o
in ('-q', '--thread'):
1292 download_thread
= int(a
)
1293 if o
in ('-r', '--quality'):
1295 if o
in ('-g', '--thread_single'):
1296 thread_single_download
= int(a
)
1297 if o
in ('-i', '--fake-ip'):
1300 if len(vid_raw
) == 0:
1301 vid_raw
= str(raw_input('av'))
1302 p_raw
= str(raw_input('P'))
1303 oversea
= str(raw_input('Source?'))
1304 cookiepath
= './bilicookies'
1305 logging
.basicConfig(level
= logging_level_reader(LOG_LEVEL
))
1306 logging
.debug('FAKE IP: ' + str(IS_FAKE_IP
) + ' ' + FAKE_IP
)
1307 av_list
= get_full_p(vid_raw
)
1309 av_list
+= mylist_to_aid_list(mylist
)
1310 logging
.debug('av_list')
1311 if len(cookiepath
) == 0:
1312 cookiepath
= './bilicookies'
1314 logging
.info('No part number set, download all the parts.')
1316 if len(oversea
) == 0:
1318 logging
.info('Oversea not set, use original API(methon 0).')
1319 IS_M3U
= check_dependencies_exportm3u(IS_M3U
)
1320 if IS_M3U
== 1 and oversea
not in {'0', '1'}:
1322 logging
.info('M3U exporting with source other than 0 or 1 can be broken, and lead to wrong duration!')
1324 input_raw
= str(raw_input('Enter "q" to quit, or enter the source you want.'))
1325 if input_raw
== 'q':
1329 concat_software
, download_software
, probe_software
= check_dependencies(download_software
, concat_software
, probe_software
)
1330 p_list
= get_full_p(p_raw
)
1331 if len(av_list
) > 1 and len(p_list
) > 1:
1332 logging
.warning('You are downloading multi parts from multiple videos! This may result in unpredictable outputs!')
1334 input_raw
= str(raw_input('Enter "y" to continue, "n" to only download the first part, "q" to quit, or enter the part number you want.'))
1335 if input_raw
== 'y':
1337 elif input_raw
== 'n':
1339 elif input_raw
== 'q':
1342 p_list
= get_full_p(input_raw
)
1343 cookies
= read_cookie(cookiepath
)
1344 global BILIGRAB_HEADER
, BILIGRAB_UA
1345 # deal with danmaku2ass's drama / Twice in case someone failed to check dependencies
1346 is_export
, convert_ass
= check_dependencies_danmaku2ass(is_export
)
1347 is_export
, convert_ass
= check_dependencies_danmaku2ass(is_export
)
1348 python_ver_str
= '.'.join([str(i
) for i
in sys
.version_info
[:2]])
1349 BILIGRAB_UA
= 'Biligrab/{VER} (cnbeining@gmail.com) (Python-urllib/{python_ver_str}, like libcurl/1.0 NSS-Mozilla/2.0)'.format(VER
= VER
, python_ver_str
= python_ver_str
)
1351 #BILIGRAB_UA = 'Biligrab / ' + str(VER) + ' (cnbeining@gmail.com) (like )'
1352 BILIGRAB_HEADER
= {'User-Agent': BILIGRAB_UA
, 'Cache-Control': 'no-cache', 'Pragma': 'no-cache', 'Cookie': cookies
[0]}
1353 if LOG_LEVEL
== 'DEBUG':
1354 logging
.debug('!!!!!!!!!!!!!!!!!!!!!!!\nWARNING: This log contains some sensitive data. You may want to delete some part of the data before you post it publicly!\n!!!!!!!!!!!!!!!!!!!!!!!')
1355 logging
.debug('BILIGRAB_HEADER')
1357 request
= urllib2
.Request('http://ipinfo.io/json', headers
=FAKE_HEADER
)
1358 response
= urllib2
.urlopen(request
)
1359 data
= response
.read()
1360 print('!!!!!!!!!!!!!!!!!!!!!!!\nWARNING: This log contains some sensitive data. You may want to delete some part of the data before you post it publicly!\n!!!!!!!!!!!!!!!!!!!!!!!')
1361 print('=======================DUMP DATA==================')
1363 print('========================DATA END==================')
1364 print('DEBUG: ' + str(av_list
))
1366 print('WARNING: Cannot connect to IP-geo database server!')
1370 if str(p_raw
) == '0':
1371 logging
.info('You are downloading all the parts in this video...')
1373 p_raw
= str('1~' + find_cid_api(vid
, p_raw
, cookies
)[3])
1374 p_list
= get_full_p(p_raw
)
1376 logging
.info('Error when reading all the parts!')
1378 input_raw
= str(raw_input('Enter the part number you want, or "q" to quit.'))
1379 if input_raw
== '0':
1380 print('ERROR: Cannot use all the parts!')
1382 elif input_raw
== 'q':
1385 p_list
= get_full_p(input_raw
)
1387 logging
.info('Download the first part of the video...')
1390 logging
.info('Your target download is av{vid}, part {p_raw}, from source {oversea}'.format(vid
= vid
, p_raw
= p_raw
, oversea
= oversea
))
1393 sys
.setdefaultencoding('utf-8')
1396 logging
.info('Downloading part {p} ...'.format(p
= p
))
1397 main(vid
, p
, oversea
, cookies
, download_software
, concat_software
, is_export
, probe_software
, danmaku_only
, time_fetch
, download_thread
, thread_single_download
)
1398 except DanmakuOnlyException
:
1400 except ExportM3UException
:
1402 except Exception as e
:
1403 print('ERROR: Biligrab failed: %s' % e
)
1404 print(' If you think this should not happen, please dump your log using "-l", and open a issue at https://github.com/cnbeining/Biligrab/issues .')
1405 print(' Make sure you delete all the sensitive data before you post it publicly.')
1406 traceback
.print_exc()