2023-11-26 15:27:32 +00:00
|
|
|
import io
|
2023-12-02 06:03:50 +00:00
|
|
|
import time
|
2023-11-26 15:27:32 +00:00
|
|
|
import PIL
|
|
|
|
import base64
|
|
|
|
import re
|
|
|
|
import random
|
|
|
|
import aiohttp
|
|
|
|
import asyncio
|
|
|
|
from aiogoogletrans import Translator
|
|
|
|
from typing import Any, Dict
|
|
|
|
from discord import File
|
2023-11-25 17:28:48 +00:00
|
|
|
|
2023-11-26 15:27:32 +00:00
|
|
|
translator = Translator()
|
|
|
|
weight = [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.7, 0.8, 0.9]
|
2023-12-02 06:03:50 +00:00
|
|
|
cfg = [6, 7, 8, 9, 10, 11]
|
2023-11-26 15:27:32 +00:00
|
|
|
#def check_curse(text: str):
|
|
|
|
# return korcen_checker.check(text)
|
|
|
|
def is_korean(string):
|
|
|
|
pattern = re.compile(r"[ㄱ-ㅎㅏ-ㅣ가-힣]")
|
|
|
|
match = pattern.search(string)
|
|
|
|
return bool(match)
|
|
|
|
|
|
|
|
async def process_prompt(prompt: str, remove: str, res: list, isnsfw: bool, style1: float, style2: float, afterprocess: float):
|
2023-12-02 06:03:50 +00:00
|
|
|
# prompt에 "남자"가 들어있거나 remove에 "여자"가 들어있으면 man = True
|
|
|
|
man = False
|
|
|
|
if "남자" in prompt or "여자" in remove:
|
|
|
|
man = True
|
2023-11-26 15:27:32 +00:00
|
|
|
if is_korean(prompt):
|
|
|
|
prompt = await translator.translate(prompt, dest="en")
|
|
|
|
prompt = prompt.text
|
|
|
|
if is_korean(remove):
|
|
|
|
remove = await translator.translate(remove, dest="en")
|
|
|
|
remove = remove.text
|
|
|
|
default_negative = """(KHFB, AuroraNegative, easynegative, negative_hand-neg, verybadimagenegative_v1.3:0.8), (Worst Quality, Low Quality:1.4), border, skimpy, grayscale, multiple_girls, 3d, realistic, string, multiple hands, chinese, thick abs, chubby abs, lowres, bad anatomy, asymmetric wings, elf, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, large areolae, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry, Multiple legs, multiple feet, tie, (necktie:1.5), several hands, three feet, four legs, three legs,animal ears"""
|
|
|
|
if style1 != 0:
|
|
|
|
prompt = prompt + f"<lora:addDetail:{style1}>"
|
|
|
|
if style2 != 0:
|
|
|
|
prompt = prompt + f"<lora:光影:{style2}>"
|
|
|
|
if remove != None:
|
|
|
|
negative_prompt = default_negative + "," + remove
|
2023-12-02 06:03:50 +00:00
|
|
|
if man == True:
|
|
|
|
prompt = prompt + "," + "(1boy)"
|
|
|
|
negative_prompt = negative_prompt + "," + "(1girl)"
|
2023-11-26 15:27:32 +00:00
|
|
|
add_prompt = random.choice([True, False])
|
|
|
|
if add_prompt == True:
|
|
|
|
qprompt = prompt + f"<lora:canistermix1.1:{random.choice(weight)}>"
|
|
|
|
payloads = {
|
|
|
|
"prompt": prompt,
|
|
|
|
"negative_prompt": negative_prompt,
|
|
|
|
"seed": random.randint(0, 1000000000),
|
|
|
|
"steps": 25,
|
|
|
|
"cfg_scale": random.choice(cfg),
|
|
|
|
"width": res[0],
|
|
|
|
"height": res[1],
|
|
|
|
"sampler_index": "DPM++ 2M Karras",
|
|
|
|
"refiner_checkpoint": "smooREFINERV2R10_half",
|
|
|
|
"refiner_switch_at": afterprocess,
|
|
|
|
"alwayson_scripts": {
|
|
|
|
"ADetailer": {
|
|
|
|
'args': [
|
|
|
|
{
|
|
|
|
'ad_model': 'face_yolov8n.pt',
|
|
|
|
'ad_inpaint_only_masked': True
|
|
|
|
}]
|
|
|
|
}}}
|
|
|
|
return payloads
|
2023-11-25 17:28:48 +00:00
|
|
|
|
|
|
|
|
2023-11-26 15:27:32 +00:00
|
|
|
async def post_gpu_server(url: str, payload: Dict[str, Any]):
|
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
try:
|
|
|
|
async with session.post(url, json=payload, timeout=300) as response:
|
|
|
|
if response.status == 200:
|
|
|
|
return {"status": True, "data": await response.json()}
|
|
|
|
else:
|
|
|
|
return {"status": False, "data": None}
|
|
|
|
except Exception as e:
|
|
|
|
if isinstance(e, asyncio.TimeoutError):
|
|
|
|
return {"status": False, "data": None}
|
|
|
|
|
|
|
|
|
|
|
|
async def image_to_base64(image) -> str:
|
|
|
|
img = PIL.Image.open(image)
|
|
|
|
img = img.convert("RGB")
|
|
|
|
img_byte_arr = io.BytesIO()
|
|
|
|
img.save(img_byte_arr, format='PNG')
|
|
|
|
img_byte_arr = img_byte_arr.getvalue()
|
|
|
|
img_str = base64.b64encode(img_byte_arr).decode("utf-8")
|
|
|
|
return img_str
|
|
|
|
|
|
|
|
|
|
|
|
async def base64_to_image(base642) -> File:
|
|
|
|
attachment = File(io.BytesIO(base64.b64decode(base642)), filename="image.png")
|
2023-12-02 06:03:50 +00:00
|
|
|
return attachment
|
|
|
|
|
|
|
|
async def get_gpuserver_status(url) -> Dict:
|
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
try:
|
|
|
|
# latency도 측정
|
|
|
|
|
|
|
|
async with session.get("http://172.30.1.49:7860/sdapi/v1/memory", timeout=10) as response:
|
|
|
|
|
|
|
|
if response.status == 200:
|
|
|
|
# latency 측정
|
|
|
|
#latency = response.headers["X-Response-Time"]
|
|
|
|
result = await response.json()
|
|
|
|
memstatus = result["ram"]["used"]
|
|
|
|
cudamemstatus = result["cuda"]["system"]["used"]
|
|
|
|
oomcount = result["cuda"]["events"]["oom"]
|
|
|
|
return {"status": "online", "system_memory_usage": bytes_to_gb(memstatus), "cuda_memory_usage": bytes_to_gb(cudamemstatus), "oom_count": oomcount}
|
|
|
|
except Exception as e:
|
|
|
|
return {"status": "offline"}
|
|
|
|
|
|
|
|
def bytes_to_gb(bytes: int) -> float:
|
|
|
|
return round(bytes / 1024 / 1024 / 1024, 2)
|
|
|
|
|
|
|
|
|
|
|
|
async def Get_Backend_latency():
|
|
|
|
start_time = time.time()
|
|
|
|
async with aiohttp.ClientSession() as client:
|
|
|
|
try:
|
|
|
|
async with client.get("http://172.30.1.49:7860/sdapi/v1/memory", timeout=10) as response:
|
|
|
|
if response.status_code == 200 or response.status_code == 404:
|
|
|
|
return round(time.time() - start_time, 2)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
except Exception as e:
|
|
|
|
return None
|