I currently am working on a program that pastes two images together with PIL, but PIL is weird, so I had to do a bunch of extra stuff so that I could use links. Anyways, now I can't use what PIL outputted because:
AttributeError: 'Image' object has no attribute 'getvalue'
here's the important chunk of my code:
async with aiohttp.ClientSession() as session:
async with session.get(avurl) as second_image:
image_bytes = await second_image.read()
with Image.open(BytesIO(image_bytes)).convert("RGB") as first_image:
output_buffer = BytesIO()
first_image.save(output_buffer, "png")
output_buffer.seek(0)
async with aiohttp.ClientSession() as session:
async with session.get("https://i.imgur.com/dNS0WJO.png") as second_image:
image_bytes = await second_image.read()
with Image.open(BytesIO(image_bytes)) as second_image:
output_buffer = BytesIO()
second_image.save(output_buffer, "png")
output_buffer.seek(0)
first_image.paste(second_image, (0, 0))
buf = io.BytesIO()
first_image.save(buf, "png")
first_image = first_image.getvalue()
could anyone tell me what line of code i'm missing to fix this? or maybe what i did incorrectly?
Image objects indeed do not have a getvalue method, it's BytesIO instances that do.
Here you should be calling buf.getvalue instead of first_image.getvalue.
buf = io.BytesIO()
first_image.save(buf, "png")
first_image = first_image.getvalue()
Your code looks a bit like this: https://stackoverflow.com/a/33117447/7051394 ; but if you look at the last three lines of that snipper, imgByteArr is still a BytesIO, so imgByteArr.getvalue() is valid.
Here's a simpler and possibly faster reformulation of your code:
async with aiohttp.ClientSession() as session:
image1data = await session.get(avurl).read()
image2data = await session.get("https://i.imgur.com/dNS0WJO.png").read()
image1 = Image.open(BytesIO(image1data)).convert("RGB")
image2 = Image.open(BytesIO(image2data))
image1.paste(image2, (0, 0))
buf = BytesIO()
image1.save(buf, "png")
composite_image_data = buf.getvalue()
You'll end up with composite_image_data containing PNG data for the composited image.
Related
I want to get the size of the image from a buffer.
import io
from PIL import Image
class Scraper:
asnyc _save_image(res):
buffer = await res.body()
img = Image.open(io.BytesIO(buffer))
img_w, img_h = img.size
async def scrape():
playwright = await async_playwright().start()
browser = await playwright.chromium.launch( headless = True, devtools = False )
page = browser.new_page()
page.on('response', _save_image)
await page.goto('https://www.example.com')
scraper = Scraper(key, id)
asyncio.run(scraper.scrape())
img = Image.open(io.BytesIO(buffer))
This code above is not an async function. I want know the size of images from the buffer asynchronously. How to so this?
So, I want the bot to send a welcome card with the user's profile picture & text saying " Welcome {user.name} ", but the text part isn't working. I have got no error in the console.
Here's my code:
from PIL import Image, ImageFilter, ImageFont, ImageDraw
from io import BytesIO
#client.event
async def on_member_join(member):
wc = Image.open("wc.jpg")
asset = member.avatar_url_as(size=128)
data = BytesIO(await asset.read())
pfp = Image.open(data)
draw = ImageDraw.Draw(wc)
font = ImageFont.truetype("Littlecandy.ttf", 24)
pfp = pfp.resize((474,382))
draw.text((549,284), f"{member.display_name}", (171, 5, 171), font=font)
wc.paste(pfp, (727,209))
wc.save("wcm.jpg")
await client.get_channel(850634788633182218).send(file = discord.File('wcm.jpg'))
So, i have not got the answer but when i removed the RGB code that is this (171, 5, 171) than i tested it & it worked.
Here's my changed code:
from PIL import Image, ImageFilter, ImageFont, ImageDraw
from io import BytesIO
#client.comamnd()
async def on_member_join(member):
wc2 = Image.open("wc2.jpg")
asset = member.avatar_url_as(size=64)
data = BytesIO(await asset.read())
pfp = Image.open(data)
draw = ImageDraw.Draw(wc2)
font = ImageFont.truetype("BalsamiqSans-BoldItalic.ttf", 45)
text = f"{member}"
pfp = pfp.resize((211,181))
wc2.paste(pfp, (30,28))
draw.text((26,235),text,font=font,fill='orange')
wc2.save("wcm.jpg")
await client.get_channel(850634788633182218).send(file = discord.File('wcm.jpg'))
My project uses socket.io to send/receive data.
I added aiohttp to help display the results on the browser.
import asyncio
from aiohttp import web
sio = socketio.AsyncServer(async_mode='`aiohttp`')
app = web.Application()
sio.attach(app)
I followed
https://us-pycon-2019-tutorial.readthedocs.io/aiohttp_file_uploading.html
to upload an image but I cannot upload a video.
def gen1():
# while True:
# if len(pm.list_image_display) > 1 :
image = cv2.imread("/home/duong/Pictures/Chess_Board.svg")
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# img = PIL.Image.new("RGB", (64, 64), color=(255,255,0))
image_pil = PIL.Image.fromarray(image)
fp = io.BytesIO()
image_pil.save(fp, format="JPEG")
content = fp.getvalue()
return content
async def send1():
print("11")
return web.Response(body=gen1(), content_type='image/jpeg')
How to display video via aiohttp on browsers?
To stream a video in aiohttp you may open a StreamResponse in response to the fetching of a img HTML node:
#routes.get('/video')
async def video_feed(request):
response = web.StreamResponse()
response.content_type = 'multipart/x-mixed-replace; boundary=frame'
await response.prepare(request)
for frame in frames('/dev/video0'):
await response.write(frame)
return response
and send your frames in the form of bytes:
def frames(path):
camera = cv2.VideoCapture(path)
if not camera.isOpened():
raise RuntimeError('Cannot open camera')
while True:
_, img = camera.read()
img = cv2.resize(img, (480, 320))
frame = cv2.imencode('.jpg', img)[1].tobytes()
yield b'--frame\r\nContent-Type: image/jpeg\r\n\r\n'+frame+b'\r\n'
This may be however network demanding as the bitrate required to send each frame individually is high. For real-time streaming with further compression you may want to use WebRTC implementations like aiortc.
So I made a webservice (based on starlette), with an endpoint that accepts a binary body. I want to feed this binary body to fastavro.
Starlette doc says, I can access the raw data as a async stream with request.stream().
async for chunk in request.stream():
# do something with chunk...
Now, I want to feed the stream to fastavro. The thing is, fastavro reader needs a file-like input stream:
with open('some-file.avro', 'rb') as fo:
avro_reader = reader(fo)
My question is, is there a clean way to transform this async stream into a file-like one?
I guess I could implement an object that has a read() method that awaits and returns the data returns by request.stream. But if the caller passes a size, I need to have a memory buffer, doesn't I? Could something based on BufferedRWPair?
Or is the only way to store the whole stream first to the disk or memory, before feeding it to fastavro?
Thanks in advance !
I ended up using a SpooledTemporaryFile:
data_file = SpooledTemporaryFile(mode='w+b',
max_size=MAX_RECEIVED_DATA_MEMORY_SIZE)
async for chunk in request.stream():
data_file.write(chunk)
data_file.seek(0)
avro_reader = reader(data_file)
It's not the ideal solution I envisonned (somehow transmit the data directly between the input and output), but still good enough...
I encountered the same problem and wrote compact class StreamingBody. It does exactly what I need.
from typing import AsyncIterator
import asyncio
class AsyncGen:
def __init__(self, block_count, block_size) -> None:
self.bc = block_count
self.bs = block_size
def __aiter__(self):
return self
async def __anext__(self):
if self.bc == 0:
raise StopAsyncIteration()
self.bc -= 1
return b"A" * self.bs
class StreamingBody:
_chunks: AsyncIterator[bytes]
_backlog: bytes
def __init__(self, chunks: AsyncIterator[bytes]):
self._chunks = chunks
self._backlog = b""
async def _read_until_end(self):
content = self._backlog
self._backlog = b""
while True:
try:
content += await self._chunks.__anext__()
except StopAsyncIteration:
break
return content
async def _read_chunk(self, size: int):
content = self._backlog
bytes_read = len(self._backlog)
while bytes_read < size:
try:
chunk = await self._chunks.__anext__()
except StopAsyncIteration:
break
content += chunk
bytes_read += len(chunk)
self._backlog = content[size:]
content = content[:size]
return content
async def read(self, size: int = -1):
if size > 0:
return await self._read_chunk(size)
elif size == -1:
return await self._read_until_end()
else:
return b""
async def main():
async_gen = AsyncGen(11, 3)
body = StreamingBody(async_gen)
res = await body.read(11)
print(f"[{len(res)}]: {res}")
res = await body.read()
print(f"[{len(res)}]: {res}")
res = await body.read()
print(f"[{len(res)}]: {res}")
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
I'm making a Get request to Dicom server, which returns a Multipart/Related Type=image/jpeg. I tried using aiohttp libararies Multipart feature to parse but it didnt work. The file saved is corrupted.
Here is my code.
import asyncio
import aiohttp
'''
async def fetch(url,session,header):
async with session.get(url,headers=header) as response:
await response
async def multiHit(urls,header):
tasks = []
async with aiohttp.ClientSession() as session:
for i,url in enumerate(urls):
tasks.append(fetch(url,session,header))
result = await asyncio.gather(*tasks)
return result
loop = asyncio.get_event_loop()
res = loop.run_until_complete(multiHit(["FRAME URL"],{"Accept":"multipart/related;type=image/jpeg"}))
print(res)
'''
async def xyz(loop):
async with aiohttp.ClientSession(loop=loop).get(url="FRAME URL",headers={"Accept":"multipart/related;type=image/jpeg"}) as response:
reader = aiohttp.MultipartReader.from_response(response)
while True:
part = await reader.next()
if part is None:
break
filedata = await part.read(decode=False)
import base64
with open('m.jpeg','wb') as outFile:
outFile.write(part.decode(filedata))
return 1
loop = asyncio.get_event_loop()
res = loop.run_until_complete(xyz(loop))
How to I parse the Multipart/related response and save the images?
I figured out that I was parsing the multi-part response properly, but I had to use another library (library name : imagecodecs , method name : jpegsof3_decode) to decompresses individual part into the images. This is give a numpy array of the image. Here is the updated code
reader = aiohttp.MultipartReader.from_response(response)
while True:
part = await reader.next()
if part is None:
break
data = await part.read()
imageDecompressed = jpegsof3_decode(data)
Further the numpy array can be converted into a image using cv2 libray
success, encoded_image = cv2.imencode('.png',imageDecompressed)
Byte version of the converted image can be obtained this way
imageInBytes = encoded_image.tobytes()