Deal with big images
This commit is contained in:
parent
b66da5612b
commit
a1207d1c11
27
redditbot.py
27
redditbot.py
|
@ -126,17 +126,21 @@ async def main():
|
||||||
break
|
break
|
||||||
file.write(chunk)
|
file.write(chunk)
|
||||||
|
|
||||||
|
async def _get_file_mimetype(filename):
|
||||||
|
mimetype = mimetypes.guess_type(filename)[0]
|
||||||
|
if not mimetype:
|
||||||
|
proc = await asyncio.create_subprocess_exec('file', '--brief', '--mime-type', filename, stdout=asyncio.subprocess.PIPE)
|
||||||
|
stdout, _ = await proc.communicate()
|
||||||
|
mimetype = stdout.decode().strip()
|
||||||
|
return mimetype or ''
|
||||||
|
|
||||||
async def _get_file_ext(filename):
|
async def _get_file_ext(filename):
|
||||||
proc = await asyncio.create_subprocess_exec('file', '--brief', '--extension', filename, stdout=asyncio.subprocess.PIPE)
|
proc = await asyncio.create_subprocess_exec('file', '--brief', '--extension', filename, stdout=asyncio.subprocess.PIPE)
|
||||||
stdout, _ = await proc.communicate()
|
stdout, _ = await proc.communicate()
|
||||||
ext = stdout.decode().strip().split('/', maxsplit=1)[0]
|
ext = stdout.decode().strip().split('/', maxsplit=1)[0]
|
||||||
if not ext or ext == '???':
|
if not ext or ext == '???':
|
||||||
mimetype = mimetypes.guess_type(filename)[0]
|
mimetype = await _get_file_mimetype(filename)
|
||||||
if not mimetype:
|
ext = mimetypes.guess_extension(mimetype) or '.bin'
|
||||||
proc = await asyncio.create_subprocess_exec('file', '--brief', '--mime-type', filename, stdout=asyncio.subprocess.PIPE)
|
|
||||||
stdout, _ = await proc.communicate()
|
|
||||||
mimetype = stdout.decode().strip()
|
|
||||||
ext = mimetypes.guess_extension(mimetype or '') or '.bin'
|
|
||||||
if not ext.startswith('.'):
|
if not ext.startswith('.'):
|
||||||
ext = '.' + ext
|
ext = '.' + ext
|
||||||
return ext
|
return ext
|
||||||
|
@ -225,10 +229,17 @@ async def main():
|
||||||
if i in preview:
|
if i in preview:
|
||||||
url = preview[i]['source']['url']
|
url = preview[i]['source']['url']
|
||||||
break
|
break
|
||||||
elif random_post.is_reddit_media_domain and preview and preview['enabled'] and not random_post.is_video:
|
|
||||||
url = preview['images'][0]['source']['url']
|
|
||||||
if url:
|
if url:
|
||||||
await _download_file(filename, url)
|
await _download_file(filename, url)
|
||||||
|
mimetype = await _get_file_mimetype(filename)
|
||||||
|
if mimetype.startswith('image') and preview and preview['enabled']:
|
||||||
|
preview = preview['images'][0]
|
||||||
|
urls = [preview['source']['url']]
|
||||||
|
urls.extend(reversed(i['url'] for i in preview['resolutions']))
|
||||||
|
for url in urls:
|
||||||
|
if os.path.getsize(filename) < 10000000:
|
||||||
|
break
|
||||||
|
await _download_file(filename, url)
|
||||||
ext = await _get_file_ext(filename)
|
ext = await _get_file_ext(filename)
|
||||||
if ext.startswith('.htm'):
|
if ext.startswith('.htm'):
|
||||||
files = []
|
files = []
|
||||||
|
|
Loading…
Reference in New Issue