More fixes
This commit is contained in:
		
							parent
							
								
									eeb8ed0cc2
								
							
						
					
					
						commit
						e2bae2efd5
					
				| 
						 | 
					@ -190,3 +190,14 @@ async def get_file_mimetype(filename):
 | 
				
			||||||
        stdout, _ = await proc.communicate()
 | 
					        stdout, _ = await proc.communicate()
 | 
				
			||||||
        mimetype = stdout.decode().strip()
 | 
					        mimetype = stdout.decode().strip()
 | 
				
			||||||
    return mimetype or ''
 | 
					    return mimetype or ''
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					async def get_file_ext(filename):
 | 
				
			||||||
 | 
					    proc = await asyncio.create_subprocess_exec('file', '--brief', '--extension', filename, stdout=asyncio.subprocess.PIPE)
 | 
				
			||||||
 | 
					    stdout, _ = await proc.communicate()
 | 
				
			||||||
 | 
					    ext = stdout.decode().strip().split('/', maxsplit=1)[0]
 | 
				
			||||||
 | 
					    if not ext or ext == '???':
 | 
				
			||||||
 | 
					        mimetype = await get_file_mimetype(filename)
 | 
				
			||||||
 | 
					        ext = mimetypes.guess_extension(mimetype) or '.bin'
 | 
				
			||||||
 | 
					    if not ext.startswith('.'):
 | 
				
			||||||
 | 
					        ext = '.' + ext
 | 
				
			||||||
 | 
					    return ext
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -6,7 +6,7 @@ from decimal import Decimal
 | 
				
			||||||
from urllib.parse import urlparse, urlunparse, quote as urlencode
 | 
					from urllib.parse import urlparse, urlunparse, quote as urlencode
 | 
				
			||||||
from bs4 import BeautifulSoup
 | 
					from bs4 import BeautifulSoup
 | 
				
			||||||
from pyrogram import Client, filters
 | 
					from pyrogram import Client, filters
 | 
				
			||||||
from .. import config, help_dict, log_errors, public_log_errors, session, get_file_mimetype, progress_callback
 | 
					from .. import config, help_dict, log_errors, public_log_errors, session, get_file_mimetype, progress_callback, get_file_ext
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def download_file(url, filename):
 | 
					async def download_file(url, filename):
 | 
				
			||||||
    async with session.get(url) as resp:
 | 
					    async with session.get(url) as resp:
 | 
				
			||||||
| 
						 | 
					@ -44,7 +44,7 @@ async def saucenao(client, message):
 | 
				
			||||||
            await proc.communicate()
 | 
					            await proc.communicate()
 | 
				
			||||||
            filename = new_path
 | 
					            filename = new_path
 | 
				
			||||||
        with open(filename, 'rb') as file:
 | 
					        with open(filename, 'rb') as file:
 | 
				
			||||||
            async with session.post(f'https://saucenao.com/search.php?db=999&output_type=2&numres=5&api_key={urlencode(config["config"]["saucenao_api"])}', data={'file': file}) as resp:
 | 
					            async with session.post(f'https://saucenao.com/search.php?db=999&output_type=2&api_key={urlencode(config["config"]["saucenao_api"])}', data={'file': file}) as resp:
 | 
				
			||||||
                json = await resp.json()
 | 
					                json = await resp.json()
 | 
				
			||||||
        if json['header']['status']:
 | 
					        if json['header']['status']:
 | 
				
			||||||
            await reply.edit_text(f'<b>{json["header"]["status"]}:</b> {html.escape(json["header"].get("message", "No message"))}')
 | 
					            await reply.edit_text(f'<b>{json["header"]["status"]}:</b> {html.escape(json["header"].get("message", "No message"))}')
 | 
				
			||||||
| 
						 | 
					@ -54,44 +54,50 @@ async def saucenao(client, message):
 | 
				
			||||||
        to_image = False
 | 
					        to_image = False
 | 
				
			||||||
        filename = os.path.join(tempdir, '0')
 | 
					        filename = os.path.join(tempdir, '0')
 | 
				
			||||||
        for result in json['results']:
 | 
					        for result in json['results']:
 | 
				
			||||||
 | 
					            if not result['data'].get('ext_urls'):
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
            atext = f'<b>{html.escape(result["header"]["index_name"])}'
 | 
					            atext = f'<b>{html.escape(result["header"]["index_name"])}'
 | 
				
			||||||
            if Decimal(result['header']['similarity']) < minimum_similarity:
 | 
					            if Decimal(result['header']['similarity']) < minimum_similarity:
 | 
				
			||||||
                atext += ' (low similarity result)'
 | 
					                atext += ' (low similarity result)'
 | 
				
			||||||
            atext += '</b>'
 | 
					            atext += '</b>'
 | 
				
			||||||
            if result['data'].get('ext_urls'):
 | 
					            atext += '\n<b>URL'
 | 
				
			||||||
                atext += '\n<b>URL'
 | 
					            if len(result['data']['ext_urls']) > 1:
 | 
				
			||||||
                if len(result['data']['ext_urls']) > 1:
 | 
					                atext += 's:</b>\n'
 | 
				
			||||||
                    atext += 's:</b>\n'
 | 
					                atext += '\n'.join(map(html.escape, result['data']['ext_urls']))
 | 
				
			||||||
                    atext += '\n'.join(map(html.escape, result['data']['ext_urls']))
 | 
					            else:
 | 
				
			||||||
                else:
 | 
					                atext += f':</b> {html.escape(result["data"]["ext_urls"][0])}'
 | 
				
			||||||
                    atext += f':</b> {html.escape(result["data"]["ext_urls"][0])}'
 | 
					            if not to_image:
 | 
				
			||||||
                if not to_image:
 | 
					                for url in result['data']['ext_urls']:
 | 
				
			||||||
                    for url in result['data']['ext_urls']:
 | 
					                    if await download_file(url, filename):
 | 
				
			||||||
                        if await download_file(url, filename):
 | 
					                        with open(filename) as file:
 | 
				
			||||||
                            with open(filename) as file:
 | 
					                            soup = BeautifulSoup(file.read())
 | 
				
			||||||
                                soup = BeautifulSoup(file.read())
 | 
					                        pimg = soup.find(lambda tag: tag.name == 'meta' and tag.attrs.get('property') == 'og:image' and tag.attrs.get('content'))
 | 
				
			||||||
                            pimg = soup.find(lambda tag: tag.name == 'meta' and tag.attrs.get('property') == 'og:image' and tag.attrs.get('content'))
 | 
					                        if pimg:
 | 
				
			||||||
 | 
					                            pimg = pimg.attrs.get('content', '').strip()
 | 
				
			||||||
                            if pimg:
 | 
					                            if pimg:
 | 
				
			||||||
                                pimg = pimg.attrs.get('content', '').strip()
 | 
					                                parsed = list(urlparse(pimg))
 | 
				
			||||||
                                if pimg:
 | 
					                                if not parsed[0]:
 | 
				
			||||||
                                    parsed = list(urlparse(pimg))
 | 
					                                    parsed[0] = 'https'
 | 
				
			||||||
                                    if not parsed[0]:
 | 
					                                    pimg = urlunparse(parsed)
 | 
				
			||||||
                                        parsed[0] = 'https'
 | 
					                                if parsed[0] not in ('http', 'https'):
 | 
				
			||||||
                                        pimg = urlunparse(parsed)
 | 
					                                    continue
 | 
				
			||||||
                                    if parsed[0] not in ('http', 'https'):
 | 
					                                if await download_file(pimg, filename):
 | 
				
			||||||
                                        continue
 | 
					                                    to_image = True
 | 
				
			||||||
                                    if await download_file(pimg, filename):
 | 
					                                    break
 | 
				
			||||||
                                        to_image = True
 | 
					                else:
 | 
				
			||||||
                                        break
 | 
					                    await download_file(result['header']['thumbnail'], filename)
 | 
				
			||||||
                    else:
 | 
					                    to_image = True
 | 
				
			||||||
                        await download_file(result['header']['thumbnail'], filename)
 | 
					 | 
				
			||||||
                        to_image = True
 | 
					 | 
				
			||||||
            atext += '\n\n'
 | 
					            atext += '\n\n'
 | 
				
			||||||
            if len((await client.parser.parse(caption + atext, 'html'))['message']) <= 1024:
 | 
					            length = len((await client.parser.parse(caption + atext, 'html'))['message'])
 | 
				
			||||||
 | 
					            if length <= 1024:
 | 
				
			||||||
                caption += atext
 | 
					                caption += atext
 | 
				
			||||||
 | 
					            if length > 4096:
 | 
				
			||||||
 | 
					                break
 | 
				
			||||||
            text += atext
 | 
					            text += atext
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            await message.reply_photo(filename, caption=caption)
 | 
					            ext = await get_file_ext(filename)
 | 
				
			||||||
 | 
					            os.rename(filename, filename + ext)
 | 
				
			||||||
 | 
					            await message.reply_photo(filename + ext, caption=caption)
 | 
				
			||||||
        except Exception:
 | 
					        except Exception:
 | 
				
			||||||
            await reply.edit_text(text)
 | 
					            await reply.edit_text(text)
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue