More fixes
This commit is contained in:
		
							parent
							
								
									eeb8ed0cc2
								
							
						
					
					
						commit
						e2bae2efd5
					
				| 
						 | 
				
			
			@ -190,3 +190,14 @@ async def get_file_mimetype(filename):
 | 
			
		|||
        stdout, _ = await proc.communicate()
 | 
			
		||||
        mimetype = stdout.decode().strip()
 | 
			
		||||
    return mimetype or ''
 | 
			
		||||
 | 
			
		||||
async def get_file_ext(filename):
 | 
			
		||||
    proc = await asyncio.create_subprocess_exec('file', '--brief', '--extension', filename, stdout=asyncio.subprocess.PIPE)
 | 
			
		||||
    stdout, _ = await proc.communicate()
 | 
			
		||||
    ext = stdout.decode().strip().split('/', maxsplit=1)[0]
 | 
			
		||||
    if not ext or ext == '???':
 | 
			
		||||
        mimetype = await get_file_mimetype(filename)
 | 
			
		||||
        ext = mimetypes.guess_extension(mimetype) or '.bin'
 | 
			
		||||
    if not ext.startswith('.'):
 | 
			
		||||
        ext = '.' + ext
 | 
			
		||||
    return ext
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -6,7 +6,7 @@ from decimal import Decimal
 | 
			
		|||
from urllib.parse import urlparse, urlunparse, quote as urlencode
 | 
			
		||||
from bs4 import BeautifulSoup
 | 
			
		||||
from pyrogram import Client, filters
 | 
			
		||||
from .. import config, help_dict, log_errors, public_log_errors, session, get_file_mimetype, progress_callback
 | 
			
		||||
from .. import config, help_dict, log_errors, public_log_errors, session, get_file_mimetype, progress_callback, get_file_ext
 | 
			
		||||
 | 
			
		||||
async def download_file(url, filename):
 | 
			
		||||
    async with session.get(url) as resp:
 | 
			
		||||
| 
						 | 
				
			
			@ -44,7 +44,7 @@ async def saucenao(client, message):
 | 
			
		|||
            await proc.communicate()
 | 
			
		||||
            filename = new_path
 | 
			
		||||
        with open(filename, 'rb') as file:
 | 
			
		||||
            async with session.post(f'https://saucenao.com/search.php?db=999&output_type=2&numres=5&api_key={urlencode(config["config"]["saucenao_api"])}', data={'file': file}) as resp:
 | 
			
		||||
            async with session.post(f'https://saucenao.com/search.php?db=999&output_type=2&api_key={urlencode(config["config"]["saucenao_api"])}', data={'file': file}) as resp:
 | 
			
		||||
                json = await resp.json()
 | 
			
		||||
        if json['header']['status']:
 | 
			
		||||
            await reply.edit_text(f'<b>{json["header"]["status"]}:</b> {html.escape(json["header"].get("message", "No message"))}')
 | 
			
		||||
| 
						 | 
				
			
			@ -54,44 +54,50 @@ async def saucenao(client, message):
 | 
			
		|||
        to_image = False
 | 
			
		||||
        filename = os.path.join(tempdir, '0')
 | 
			
		||||
        for result in json['results']:
 | 
			
		||||
            if not result['data'].get('ext_urls'):
 | 
			
		||||
                continue
 | 
			
		||||
            atext = f'<b>{html.escape(result["header"]["index_name"])}'
 | 
			
		||||
            if Decimal(result['header']['similarity']) < minimum_similarity:
 | 
			
		||||
                atext += ' (low similarity result)'
 | 
			
		||||
            atext += '</b>'
 | 
			
		||||
            if result['data'].get('ext_urls'):
 | 
			
		||||
                atext += '\n<b>URL'
 | 
			
		||||
                if len(result['data']['ext_urls']) > 1:
 | 
			
		||||
                    atext += 's:</b>\n'
 | 
			
		||||
                    atext += '\n'.join(map(html.escape, result['data']['ext_urls']))
 | 
			
		||||
                else:
 | 
			
		||||
                    atext += f':</b> {html.escape(result["data"]["ext_urls"][0])}'
 | 
			
		||||
                if not to_image:
 | 
			
		||||
                    for url in result['data']['ext_urls']:
 | 
			
		||||
                        if await download_file(url, filename):
 | 
			
		||||
                            with open(filename) as file:
 | 
			
		||||
                                soup = BeautifulSoup(file.read())
 | 
			
		||||
                            pimg = soup.find(lambda tag: tag.name == 'meta' and tag.attrs.get('property') == 'og:image' and tag.attrs.get('content'))
 | 
			
		||||
            atext += '\n<b>URL'
 | 
			
		||||
            if len(result['data']['ext_urls']) > 1:
 | 
			
		||||
                atext += 's:</b>\n'
 | 
			
		||||
                atext += '\n'.join(map(html.escape, result['data']['ext_urls']))
 | 
			
		||||
            else:
 | 
			
		||||
                atext += f':</b> {html.escape(result["data"]["ext_urls"][0])}'
 | 
			
		||||
            if not to_image:
 | 
			
		||||
                for url in result['data']['ext_urls']:
 | 
			
		||||
                    if await download_file(url, filename):
 | 
			
		||||
                        with open(filename) as file:
 | 
			
		||||
                            soup = BeautifulSoup(file.read())
 | 
			
		||||
                        pimg = soup.find(lambda tag: tag.name == 'meta' and tag.attrs.get('property') == 'og:image' and tag.attrs.get('content'))
 | 
			
		||||
                        if pimg:
 | 
			
		||||
                            pimg = pimg.attrs.get('content', '').strip()
 | 
			
		||||
                            if pimg:
 | 
			
		||||
                                pimg = pimg.attrs.get('content', '').strip()
 | 
			
		||||
                                if pimg:
 | 
			
		||||
                                    parsed = list(urlparse(pimg))
 | 
			
		||||
                                    if not parsed[0]:
 | 
			
		||||
                                        parsed[0] = 'https'
 | 
			
		||||
                                        pimg = urlunparse(parsed)
 | 
			
		||||
                                    if parsed[0] not in ('http', 'https'):
 | 
			
		||||
                                        continue
 | 
			
		||||
                                    if await download_file(pimg, filename):
 | 
			
		||||
                                        to_image = True
 | 
			
		||||
                                        break
 | 
			
		||||
                    else:
 | 
			
		||||
                        await download_file(result['header']['thumbnail'], filename)
 | 
			
		||||
                        to_image = True
 | 
			
		||||
                                parsed = list(urlparse(pimg))
 | 
			
		||||
                                if not parsed[0]:
 | 
			
		||||
                                    parsed[0] = 'https'
 | 
			
		||||
                                    pimg = urlunparse(parsed)
 | 
			
		||||
                                if parsed[0] not in ('http', 'https'):
 | 
			
		||||
                                    continue
 | 
			
		||||
                                if await download_file(pimg, filename):
 | 
			
		||||
                                    to_image = True
 | 
			
		||||
                                    break
 | 
			
		||||
                else:
 | 
			
		||||
                    await download_file(result['header']['thumbnail'], filename)
 | 
			
		||||
                    to_image = True
 | 
			
		||||
            atext += '\n\n'
 | 
			
		||||
            if len((await client.parser.parse(caption + atext, 'html'))['message']) <= 1024:
 | 
			
		||||
            length = len((await client.parser.parse(caption + atext, 'html'))['message'])
 | 
			
		||||
            if length <= 1024:
 | 
			
		||||
                caption += atext
 | 
			
		||||
            if length > 4096:
 | 
			
		||||
                break
 | 
			
		||||
            text += atext
 | 
			
		||||
        try:
 | 
			
		||||
            await message.reply_photo(filename, caption=caption)
 | 
			
		||||
            ext = await get_file_ext(filename)
 | 
			
		||||
            os.rename(filename, filename + ext)
 | 
			
		||||
            await message.reply_photo(filename + ext, caption=caption)
 | 
			
		||||
        except Exception:
 | 
			
		||||
            await reply.edit_text(text)
 | 
			
		||||
        else:
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue