1.0.0 Inital Release

Co-authored-by: tmddn3070 <tmddn3070@gmail.com>
This commit is contained in:
tmddn3070 2024-05-19 22:17:09 +09:00
parent e06b2f795e
commit 74964b3195
23 changed files with 647 additions and 379 deletions

22
.env Normal file
View File

@ -0,0 +1,22 @@
##DATABASE
#MongoDB
MONGODB_URI=mongodb://tmddn3070:ss080826@localhost:27017,
MONGODB_NAME=RUNABOT,
#Redis
REDIS_URI=redis://localhost:6379,
##SERVER
MASTER_API_KEY=masterkey,
LISTEN_PORT=3000,
LISTEN_HOST=localhost,
API_KEYS=key1|key2|key3,
##IMAGE GENERATE
#1.5
SD15_SERVERS="http://100.64.0.45:7000|http://100.64.0.24:7860",
SDXL_SERVERS=http://100.64.0.45:7001,
#PATH
MODEL_PATH="BoraML/Modules/Analyze/Model/TaggingModel.onnx"

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DeveloperToolsToolWindowSettingsV1" lastSelectedContentNodeId="ulid-generator">
<developerToolsConfigurations />
</component>
</project>

15
.idea/git_toolbox_prj.xml Normal file
View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GitToolBoxProjectSettings">
<option name="commitMessageIssueKeyValidationOverride">
<BoolValueOverride>
<option name="enabled" value="true" />
</BoolValueOverride>
</option>
<option name="commitMessageValidationEnabledOverride">
<BoolValueOverride>
<option name="enabled" value="true" />
</BoolValueOverride>
</option>
</component>
</project>

View File

@ -0,0 +1,32 @@
import ujson
import sentry_sdk
from BoraML.Modules.Logging import Logging
from BoraML.Models.Analyze import Analyze_OUTPUT, Analyze_INPUT
from BoraML.Modules.Analyze import Analyze
class Analyzer:
def __init__(self):
self.analyzer = Analyze()
async def analyze(self, data: Analyze_INPUT) -> Analyze_OUTPUT:
try:
result = await self.analyzer.predict(data.image)
file = open("BoraML/Modules/Analyze/Model/Translated.json", "r")
json_file = ujson.load(file)
for key in list(result.tag.keys()):
if key in json_file:
result.tag[json_file[key]] = result.tag.pop(key)
for key in list(result.character.keys()):
if key in json_file:
result.character[json_file[key]] = result.character.pop(key)
if result.rating in ["explicit", "questionable"]:
result.nsfw = True
return Analyze_OUTPUT(status=1, message="Success", tags=result.tag, rating=result.rating, character=result.character, nsfw=result.nsfw)
except Exception as e:
Logging().error(str(e))
sentry_sdk.capture_exception(e)
return Analyze_OUTPUT(status=-1, message=str(e), tags=None, rating=None, character=None, nsfw=None)

View File

@ -0,0 +1,52 @@
from fastapi import APIRouter, HTTPException, status, Security, File, UploadFile
from BoraML.Models.ImageGenerate import ImageGenerate_XL_INPUT, ImageGenerate_SD15_INPUT, ImageGenerate_OUTPUT
from BoraML.Modules.ImageGenerate.SDXL import SDXL_Generation
from BoraML.Modules.ImageGenerate.SD15 import SD15_Generation
class ImageGenerate:
def __init__(self):
self.sdxl = SDXL_Generation()
self.sd15 = SD15_Generation()
async def generate_xl(self, data: ImageGenerate_XL_INPUT) -> ImageGenerate_OUTPUT:
try:
result = await self.sdxl.generate(
data.positive,
data.negative,
data.sampler,
data.resolution,
data.afterprocess,
data.style,
data.seed,
data.steps,
data.cfg_scale,
data.nsfw,
)
return result
except Exception as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e),
)
async def generate_sd15(self, data: ImageGenerate_SD15_INPUT) -> ImageGenerate_OUTPUT:
try:
result = await self.sd15.generate(
data.positive,
data.negative,
data.res,
data.afterprocess,
data.style,
data.seed,
data.steps,
data.cfg_scale,
data.nsfw,
)
return result
except Exception as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e),
)

View File

@ -0,0 +1,20 @@
import os
import dotenv
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorCollection
dotenv.load_dotenv(dotenv.find_dotenv())
class DBConnection:
def __init__(self):
self.client = AsyncIOMotorClient(os.getenv("MONGODB_URL"))
self.db = self.client[os.getenv("MONGODB_NAME")]
async def get(self, collection: str) -> AsyncIOMotorCollection:
"""
컬렉션을 입력받아 해당 컬렉션의 conn을 반환합니다.
:param collection:
:return: AsyncIOMotorCollection
"""
return self.db[collection]

View File

@ -3,22 +3,17 @@ import fastapi
from fastapi import FastAPI
from fastapi.middleware import Middleware
from typing import Any, Dict, Callable, Coroutine
from BoraML.Modules.Logging import Logging
class LoggingMiddleware:
def __init__(self, app: FastAPI):
self.app = app
self.logger = Logging()
async def __call__(self, request: fastapi.Request, call_next):
self.logger.info(f"Request({request.client.host}): {request.method} {request.url} Args: {request.query_params} Body: {await request.body()}")
def LoggingMiddleware(app: FastAPI):
@app.middleware("http")
async def logging_middleware(request: fastapi.Request, call_next: Callable[..., Coroutine[Any, Any, Any]], *args, **kwargs):
try:
response = await call_next(request)
self.logger.info(f"Response: {response.status_code}")
logger = Logging()
logger.info(f"Request: {request.url.path} {request.method} Request Body: {await request.body()}")
logger.info(f"Response: {response}")
return response
def attach(self):
self.app.add_middleware(LoggingMiddleware)
except Exception as e:
pass
return app

View File

@ -1,24 +1,19 @@
import fastapi
import sentry_sdk
from typing import Any, Dict, Callable, Coroutine
from fastapi import FastAPI
from BoraML.Modules.Logging import Logging
class SentryMiddleware:
def __init__(self, app: FastAPI):
self.app = app
self.logger = Logging()
async def __call__(self, request: fastapi.Request, call_next):
def SentryMiddleware(app: FastAPI):
@app.middleware("http")
async def sentry_middleware(request, call_next):
try:
response = await call_next(request)
return response
except Exception as e:
sentry_sdk.capture_exception(e)
self.logger.error(f"Exception: {e}")
raise
return response
def attach(self):
self.app.add_middleware(SentryMiddleware)
raise e
return app

16
BoraML/Models/Analyze.py Normal file
View File

@ -0,0 +1,16 @@
from typing import List, Tuple, Union, Optional
from pydantic import BaseModel, Field
from fastapi import UploadFile
class Analyze_INPUT(BaseModel):
image: UploadFile = Field(..., title="Image", description="Image to analyze.")
tag_threshold: Optional[float] = Field(0.35, title="Tag Threshold", description="Tag Threshold for the AI to analyze the image.")
character_threshold: Optional[float] = Field(0.85, title="Rating Threshold", description="Rating Threshold for the AI to analyze the image.")
class Analyze_OUTPUT(BaseModel):
status: int = Field(..., title="Status", description="Status of the image analysis.")
message: str = Field(..., title="Message", description="Message of the image analysis.")
tags: Union[List[str], None] = Field(None, title="Tags", description="Tags of the image analysis.")
rating: Union[str, None] = Field(None, title="Rating", description="Rating of the image analysis.")
character: Union[str, None] = Field(None, title="Character", description="Character of the image analysis.")
nsfw: Union[bool, None] = Field(None, title="NSFW", description="NSFW of the image analysis.")

View File

@ -0,0 +1,37 @@
from typing import List, Tuple, Union, Optional, Dict
from pydantic import BaseModel, Field
class ImageGenerate_XL_INPUT(BaseModel):
positive: str = Field(..., title="Positive Prompt", description="Positive Prompt for the AI to generate the image.")
negative: str = Field(..., title="Negative Prompt", description="Negative Prompt for the AI to generate the image.")
sampler: str = Field(..., title="Sampler Index", description="Sampler Index for the AI to generate the image.")
resolution: List[int] = Field(..., title="Resolution", description="Resolution for the AI to generate the image.")
afterprocess: Optional[bool] = Field(False, title="Afterprocess",
description="Afterprocess for the AI to generate the image.")
style: Optional[dict] = Field(None, title="Style", description="Style for the AI to generate the image.")
seed: Optional[int] = Field(None, title="Seed", description="Seed for the AI to generate the image.")
steps: Optional[int] = Field(28, title="Steps", description="Steps for the AI to generate the image.")
cfg_scale: Optional[float] = Field(7, title="CFG Scale", description="CFG Scale for the AI to generate the image.")
nsfw: Optional[bool] = Field(False, title="NSFW", description="NSFW for the AI to generate the image.")
class ImageGenerate_SD15_INPUT(BaseModel):
positive: str = Field(..., title="Positive Prompt", description="Positive Prompt for the AI to generate the image.")
negative: str = Field(..., title="Negative Prompt", description="Negative Prompt for the AI to generate the image.")
res: List[int] = Field(..., title="Resolution", description="Resolution for the AI to generate the image.")
afterprocess: Optional[bool] = Field(False, title="Afterprocess",
description="Afterprocess for the AI to generate the image.")
style: Optional[dict] = Field(None, title="Style", description="Style for the AI to generate the image.")
seed: Optional[int] = Field(None, title="Seed", description="Seed for the AI to generate the image.")
steps: Optional[int] = Field(28, title="Steps", description="Steps for the AI to generate the image.")
cfg_scale: Optional[float] = Field(7, title="CFG Scale", description="CFG Scale for the AI to generate the image.")
nsfw: Optional[bool] = Field(False, title="NSFW", description="NSFW for the AI to generate the image.")
class ImageGenerate_OUTPUT(BaseModel):
status: int = Field(..., title="Status", description="Status of the image generation.")
message: str = Field(..., title="Message", description="Message of the image generation.")
image: Union[str, None] = Field(None, title="Image", description="Image of the image generation.")
rating: Union[str, None] = Field(None, title="Rating", description="Rating of the image generation.")
tags: Union[List[str], None, Dict[str, float]] = Field(None, title="Tags", description="Tags of the image generation.")

23
BoraML/Modules/APIKey.py Normal file
View File

@ -0,0 +1,23 @@
import os
import ujson
import aiohttp
import asyncio
import dotenv
import random
from fastapi import HTTPException, status, Security, FastAPI
from fastapi.security import APIKeyHeader, APIKeyQuery
dotenv.load_dotenv(dotenv.find_dotenv())
api_key_header = APIKeyHeader(name="x-api-key", auto_error=False)
API_KEYS = os.getenv("API_KEYS") # list
print(API_KEYS)
API_KEY_LIST = map(str, API_KEYS.split("|"))
print(API_KEY_LIST)
def get_api_key(
api_key_header: str = Security(api_key_header)):
if api_key_header in API_KEYS:
return api_key_header
else:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Could not validate credentials")

View File

@ -8,9 +8,9 @@ import dotenv
import numpy as np
import onnxruntime as rt
import pandas as pd
from PIL import Image
from types import SimpleNamespace
from concurrent.futures import ThreadPoolExecutor
executor = ThreadPoolExecutor()
@ -43,9 +43,7 @@ class Analyze:
left, right = delta_w // 2, delta_w - (delta_w // 2)
color = [255, 255, 255]
new_im = cv2.copyMakeBorder(
img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color
)
new_im = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color)
return new_im
@staticmethod
@ -94,7 +92,6 @@ class Analyze:
labels = list(zip(tag_names, probs[0].astype(float)))
# Extract ratings, general, and character labels
ratings_names = [labels[i] for i in rating_indexes]
general_names = [labels[i] for i in general_indexes]
character_names = [labels[i] for i in character_indexes]
@ -106,18 +103,18 @@ class Analyze:
general_res_sorted = dict(sorted(general_res.items(), key=lambda item: item[1], reverse=True))
a = ", ".join(general_res_sorted.keys()).replace("_", " ").replace("(", "\(").replace(")", "\)")
c = ", ".join(general_res_sorted.keys())
#return a, c, rating, character_res, general_res_sorted
# return a, c, rating, character_res, general_res_sorted
return SimpleNamespace(
rating=rating,
tag=general_res_sorted,
character=character_res,
)
async def predict(self, image, general_threshold: typing.Optional[float] = None,
character_threshold: typing.Optional[float] = None) -> SimpleNamespace:
async def predict(
self, image, general_threshold: typing.Optional[float] = None, character_threshold: typing.Optional[float] = None
) -> SimpleNamespace:
if general_threshold is None:
general_threshold = self.general_threshold
if character_threshold is None:
character_threshold = self.character_threshold
return await asyncio.get_event_loop().run_in_executor(executor, self._predict_image, image, general_threshold,
character_threshold)
return await asyncio.get_event_loop().run_in_executor(executor, self._predict_image, image, general_threshold, character_threshold)

View File

@ -2,158 +2,101 @@ import base64
import os
import random
import re
from typing import List
from typing import Dict, Any
import aiohttp
import dotenv
import orjson
import sentry_sdk
from aiogoogletrans import Translator
from better_profanity import profanity
from IMAPI.Module.Logging import Logger
from IMAPI.Module.Tagging import Tagging
from BoraML.Modules.Logging import Logging
from BoraML.Modules.ImageGenerate.utils import analyze_process
from BoraML.Models.ImageGenerate import ImageGenerate_OUTPUT
dotenv.load_dotenv(dotenv.find_dotenv())
translator = Translator()
async def _image_evulate(image: bytes) -> List[str]:
tag = Tagging()
predict = await tag.predict(image)
print(predict)
rating = max(predict[2], key=predict[2].get)
json_file = open("IMAPI/Module/Tagging/Model/Translated.json", "r")
orjson_file = orjson.loads(json_file.read())
json_file.close()
for key, value in list(predict[4].items()):
if key in orjson_file:
predict[4][orjson_file[key]] = predict[4].pop(key)
if rating == "explicit" or rating == "questionable":
return [True, rating, predict[4]]
return [False, rating, predict[4]]
BLOCKTAG = [
"nsfw",
"nude",
"nipples",
"nipple",
"pussy",
"public hair",
"gay",
"lesbian",
"corpse",
"no panties",
"no panty",
"no bra",
"bra",
"panty",
"panties",
"underwear",
"undergarment",
"underpants",
"underpant",
"blowjob",
"sex",
"sexy",
"pennis",
"realistic",
"open breasts",
"breasts",
"bikini",
"swimsuit",
"give birth",
"slave"
]
class Image_Generation_Legacy:
class SD15_Generation:
def __init__(self):
self.url = os.getenv("IMAGE_GENERATION_HOST")
self.block_tag = BLOCKTAG
self.url = os.getenv("SD15_SERVERS")
self.aiohttp_session = aiohttp.ClientSession()
async def _before_process(self, positive: str, negative: str, res: list, afterprocess: bool = False, style: dict = None, seed : int = random.randint(0, 1000000), steps : int = 25, cfg_scale: float = 7) -> dict:
try:
if Image_Generation_Legacy._is_korean(positive):
positive = await translator.translate(positive, src="ko", dest="en")
positive = positive.text
if Image_Generation_Legacy._is_korean(negative):
negative = await translator.translate(negative, src="ko", dest="en")
negative = negative.text
except Exception as e:
sentry_sdk.capture_exception(e)
Logger().log(f"Error occurred while translating: {e}", "ERROR", exception=e)
negative = "(KHFB, AuroraNegative, easynegative, negative_hand-neg, verybadimagenegative_v1.3:0.8), (Worst Quality, Low Quality:1.4), border, skimpy, grayscale, multiple_girls, 3d, realistic, string, multiple hands, chinese, thick abs, chubby abs, lowres, bad anatomy, asymmetric wings, elf, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, large areolae, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry, Multiple legs, multiple feet, tie, (necktie:1.5), several hands, three feet, four legs, three legs,animal ears, nsfw, exposure, aissistxlneg" + negative
width = res[0]
height = res[1]
cfg_scale = round(cfg_scale, 1)
if style is not None:
for key, value in style.items():
positive = f"""{positive} <lora:{key}:{value}>"""
@staticmethod
def _is_korean(string):
return bool(re.search(r"[ㄱ-ㅎㅏ-ㅣ가-힣]", string))
async def _translate(self, text):
return (await translator.translate(text, src="ko", dest="en")).text if self._is_korean(text) else text
async def _before_process(
self,
positive: str,
negative: str,
res: list,
afterprocess: bool = False,
style: dict = None,
seed: int = random.randint(0, 1000000),
steps: int = 28,
cfg_scale: float = 7,
) -> dict:
positive = await self._translate(positive)
negative = await self._translate(negative)
negative = (
"(KHFB, AuroraNegative, easynegative, negative_hand-neg, verybadimagenegative_v1.3:0.8), (Worst Quality, Low Quality:1.4), border, skimpy, grayscale, multiple_girls, 3d, realistic, string, multiple hands, chinese, thick abs, chubby abs, lowres, bad anatomy, asymmetric wings, elf, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, large areolae, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry, Multiple legs, multiple feet, tie, (necktie:1.5), several hands, three feet, four legs, three legs,animal ears, nsfw, exposure,"
+ negative
)
if style is not None:
positive += "".join([f" <lora:{key}:{value}>" for key, value in style.items()])
payload = {
"prompt" : f"""{positive}""",
"negative_prompt" : f"""{negative}""",
"seed" : seed,
"width" : width,
"height" : height,
"cfg_scale" : cfg_scale,
"steps" : steps,
"prompt": positive,
"negative_prompt": negative,
"seed": seed,
"width": res[0],
"height": res[1],
"cfg_scale": round(cfg_scale, 1),
"steps": steps,
"sampler_index": "DPM++ 2M Karras",
"refiner_checkpoint": "smooREFINERV2R10_half",
"refiner_switch_at": 0.45,
"alwayson_scripts": {
"ADetailer": {
'args': [
{
'ad_model': 'face_yolov8n.pt',
'ad_inpaint_only_masked': True
}]
"ADetailer": {"args": [{"ad_model": "face_yolov8n.pt", "ad_inpaint_only_masked": True}]},
"sonar": {"args": [{"sampler": "Euler a", "momentum": 0.95, "momentum_hist": 0.75}]},
},
"sonar" : {
'args' : [
{
'sampler' : 'Euler a',
'momentum' : 0.95,
'momentum_hist' : 0.75
}
]
}
}
}
if afterprocess:
additional = {
"enable_hr" : True,
"hr_scale" : 1.3,
"hr_upscaler": "R-ESRGAN 4x+ Anime6B",
"denoising_strength": 0.6
}
payload.update(additional)
payload.update({"enable_hr": True, "hr_scale": 1.3, "hr_upscaler": "R-ESRGAN 4x+ Anime6B", "denoising_strength": 0.6})
return payload
@staticmethod
def _is_korean(string):
pattern = re.compile(r"[ㄱ-ㅎㅏ-ㅣ가-힣]")
match = pattern.search(string)
return bool(match)
async def generate(self, positive: str, negative: str, res: list, is_afterprocess: bool = False, style: dict = None, seed : int = random.randint(0, 1000000), steps : int = 25, cfg_scale: float = 7, nsfw : bool = False) -> dict[str, str | int]:
async def generate(
self,
positive: str,
negative: str,
res: list,
is_afterprocess: bool = False,
style: dict = None,
seed: int = random.randint(0, 1000000),
steps: int = 28,
cfg_scale: float = 7,
nsfw: bool = False,
) -> ImageGenerate_OUTPUT | dict[str, None | str | int | Any] | dict[str, str | int | Any]:
if not nsfw:
for tag in self.block_tag:
if tag in positive:
positive = positive.replace(tag, "")
positive = profanity.censor(positive, " ")
payload = await self._before_process(positive, negative, res, is_afterprocess, style, seed, steps, cfg_scale)
response = await self.aiohttp_session.post(self.url + "/sdapi/v1/txt2img", json=payload, timeout=600)
url = random.choice(map(str, self.url.split("|")))
response = await self.aiohttp_session.post(url + "/sdapi/v1/txt2img", json=payload, timeout=300)
if response.status != 200:
sentry_sdk.capture_message(f"Image generation API ERROR returned {response.status} text: {await response.text()}")
Logger().log(f"Image generation API ERROR returned {response.status} text: {await response.text()}", "ERROR")
return {"status": -1, "message": await response.text(), "image": None, "rating": None, "tags": None, "nsfw": None}
Logging.error(f"Image generation API ERROR returned {response.status} text: {await response.text()}")
#return {"status": -1, "message": await response.text(), "image": None, "rating": None, "tags": None, "nsfw": None}
return ImageGenerate_OUTPUT(status=-1, message=await response.text(), image=None, rating=None, tags=None, nsfw=None)
else:
data = await response.json()
evulate = await _image_evulate(base64.b64decode(data["images"][0]))
if not nsfw:
if evulate[0] is True:
return {"status": 2, "message": "NSFW", "rating": evulate[1], "tags": evulate[2], "image": None}
return {"status": 1, "image": data["images"][0], "rating": evulate[1], "tags": evulate[2], "message" : "success"}
evulate = await analyze_process(base64.b64decode(data["images"][0]), "BoraML/Modules/Analyze/Model/Translated.json")
if not nsfw and evulate.nsfw is True:
#return {"status": 2, "message": "NSFW", "rating": evulate[1], "tags": evulate[2], "image": None}
return ImageGenerate_OUTPUT(status=2, message="NSFW", rating=evulate.rating, tags=evulate.tag, image=None)
#return {"status": 1, "image": data["images"][0], "rating": evulate[1], "tags": evulate[2], "message": "success"}
return ImageGenerate_OUTPUT(status=1, image=data["images"][0], rating=evulate.rating, tags=evulate.tag, message="success")

View File

@ -2,164 +2,116 @@ import base64
import os
import random
import re
from typing import List
import aiohttp
import dotenv
import orjson
import sentry_sdk
from aiogoogletrans import Translator
from better_profanity import profanity
from BoraML.Modules.Logging import Logging
from BoraML.Modules.Analyze import Analyze
from BoraML.Modules.ImageGenerate.utils import analyze_process
from BoraML.Models.ImageGenerate import ImageGenerate_OUTPUT
dotenv.load_dotenv(dotenv.find_dotenv())
translator = Translator()
async def _image_evulate(image: bytes) -> List[str]:
tag = Tagging()
predict = await tag.predict(image)
print(predict)
rating = max(predict[2], key=predict[2].get)
json_file = open("IMAPI/Module/Tagging/Model/Translated.json", "r")
orjson_file = orjson.loads(json_file.read())
json_file.close()
for key, value in list(predict[4].items()):
if key in orjson_file:
predict[4][orjson_file[key]] = predict[4].pop(key)
if rating == "explicit" or rating == "questionable":
return [True, rating, predict[4]]
return [False, rating, predict[4]]
BLOCKTAG = [
"nsfw",
"nude",
"nipples",
"nipple",
"pussy",
"public hair",
"gay",
"lesbian",
"corpse",
"no panties",
"no panty",
"no bra",
"bra",
"panty",
"panties",
"underwear",
"undergarment",
"underpants",
"underpant",
"blowjob",
"sex",
"sexy",
"pennis",
"realistic",
"open breasts",
"breasts",
"bikini",
"swimsuit",
"give birth",
"slave"
]
class Image_Generation:
class SDXL_Generation:
def __init__(self):
self.url = "http://100.64.0.45:7001"
self.block_tag = BLOCKTAG
self.url = os.getenv("SDXL_SERVERS")
print(self.url)
self.aiohttp_session = aiohttp.ClientSession()
async def _before_process(self, positive: str, negative: str, res: list, afterprocess: bool = False, style: dict = None, seed : int = random.randint(0, 1000000), steps : int = 28, cfg_scale: float = 7) -> dict:
try:
if Image_Generation._is_korean(positive):
positive = await translator.translate(positive, src="ko", dest="en")
positive = positive.text
if Image_Generation._is_korean(negative):
negative = await translator.translate(negative, src="ko", dest="en")
negative = negative.text
except Exception as e:
sentry_sdk.capture_exception(e)
Logger().log(f"Error occurred while translating: {e}", "ERROR", exception=e)
negative = "(worst quality:1.3), low quality, lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract], nsfw, explicit, nude" + "," + negative
width = res[0]
height = res[1]
cfg_scale = round(cfg_scale, 1)
@staticmethod
def _is_korean(string):
return bool(re.search(r"[ㄱ-ㅎㅏ-ㅣ가-힣]", string))
async def _translate(self, text):
return (await translator.translate(text, src="ko", dest="en")).text if self._is_korean(text) else text
async def _before_process(
self,
positive: str,
negative: str,
sampler: str,
res: list,
afterprocess: bool = False,
style: dict = None,
seed: int = random.randint(0, 1000000),
steps: int = 28,
cfg_scale: float = 7,
) -> dict:
positive = await self._translate(positive)
negative = await self._translate(negative)
negative = (
"(worst quality:1.3), low quality, lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract], nsfw, explicit, nude, aissistxlneg"
+ negative
)
if style is not None:
for key, value in style.items():
positive = f"""{positive} <lora:{key}:{value}>"""
positive = f"""{positive}""" + " , " + "masterpiece, face, best quality, safe"
positive += "".join([f" <lora:{key}:{value}>" for key, value in style.items()])
positive += ",masterpiece, face, best quality, safe"
payload = {
"prompt" : f"""{positive}""",
"negative_prompt" : f"""{negative}""",
"seed" : seed,
"width" : width,
"height" : height,
"cfg_scale" : cfg_scale,
"steps" : steps,
"sampler_index": "Euler a",
"prompt": positive,
"negative_prompt": negative,
"seed": seed,
"width": res[0],
"height": res[1],
"cfg_scale": round(cfg_scale, 1),
"steps": steps,
"sampler_index": sampler,
"refiner_checkpoint": "animagine-xl-3.1",
"refiner_switch_at": 0.4,
"alwayson_scripts": {
"ADetailer": {
'args': [
"args": [
{
'ad_model': 'face_yolov8n.pt',
'ad_inpaint_only_masked': True,
'ad_use_cfg_scale': True,
'ad_cfg_scale': 7,
'ad_use_checkpoint': True,
'ad_checkpoint': "waifuart"
}]
},
"sonar" : {
'args' : [
{
'sampler' : 'Euler a',
'momentum' : 0.95,
'momentum_hist' : 0.75
"ad_model": "face_yolov8n.pt",
"ad_inpaint_only_masked": True,
"ad_use_cfg_scale": True,
"ad_cfg_scale": 7,
"ad_use_checkpoint": True,
"ad_checkpoint": "waifuart",
}
]
}
}
},
"sonar": {"args": [{"sampler": "Euler a", "momentum": 0.95, "momentum_hist": 0.75}]},
},
}
if afterprocess:
additional = {
"enable_hr" : True,
"hr_scale" : 1.5,
"hr_upscaler": "R-ESRGAN 4x+ Anime6B",
"denoising_strength": 0.6
}
payload.update(additional)
payload.update({"enable_hr": True, "hr_scale": 1.9, "hr_upscaler": "R-ESRGAN 4x+ Anime6B", "denoising_strength": 0.6})
return payload
@staticmethod
def _is_korean(string):
pattern = re.compile(r"[ㄱ-ㅎㅏ-ㅣ가-힣]")
match = pattern.search(string)
return bool(match)
async def generate(self, positive: str, negative: str, res: list, is_afterprocess: bool = False, style: dict = None, seed : int = random.randint(0, 1000000), steps : int = 28, cfg_scale: float = 7, nsfw : bool = False) -> dict[str, str | int]:
async def generate(
self,
positive: str,
negative: str,
sampler: str,
res: list,
is_afterprocess: bool = False,
style: dict = None,
seed: int = random.randint(0, 1000000),
steps: int = 28,
cfg_scale: float = 7,
nsfw: bool = False,
) -> ImageGenerate_OUTPUT:
if not nsfw:
for tag in self.block_tag:
if tag in positive:
positive = positive.replace(tag, "")
payload = await self._before_process(positive, negative, res, is_afterprocess, style, seed, steps, cfg_scale)
print(payload)
response = await self.aiohttp_session.post(self.url + "/sdapi/v1/txt2img", json=payload, timeout=300)
positive = profanity.censor(positive, " ")
payload = await self._before_process(positive, negative, sampler, res, is_afterprocess, style, seed, steps, cfg_scale)
if self.url.find("|"):
url = self.url.replace(",", "")
else:
url = random.choice(self.url.split("|")).replace(",", "")
response = await self.aiohttp_session.post(url + "/sdapi/v1/txt2img", json=payload, timeout=300)
if response.status != 200:
sentry_sdk.capture_message(f"Image generation API ERROR returned {response.status} text: {await response.text()}")
Logger().log(f"Image generation API ERROR returned {response.status} text: {await response.text()}", "ERROR")
return {"status": -1, "message": await response.text(), "image": None, "rating": None, "tags": None, "nsfw": None}
Logging.error(f"Image generation API ERROR returned {response.status} text: {await response.text()}")
#return {"status": -1, "message": await response.text(), "image": None, "rating": None, "tags": None}
return ImageGenerate_OUTPUT(status=-1, message=await response.text(), image=None, rating=None, tags=None)
else:
data = await response.json()
evulate = await _image_evulate(base64.b64decode(data["images"][0]))
if not nsfw:
if evulate[0] is True:
return {"status": 2, "message": "NSFW", "rating": evulate[1], "tags": evulate[2], "image": None}
return {"status": 1, "image": data["images"][0], "rating": evulate[1], "tags": evulate[2], "message" : "success"}
evulate = await analyze_process(base64.b64decode(data["images"][0]), "BoraML/Modules/Analyze/Model/Translated.json")
if not nsfw and evulate.nsfw is True:
#return {"status": 2, "message": "NSFW", "rating": evulate[1], "tags": evulate[2], "image": None}
return ImageGenerate_OUTPUT(status=2, message="NSFW", rating=evulate.rating, tags=evulate.tag, image=None)
#return {"status": 1, "image": data["images"][0], "rating": evulate[1], "tags": evulate[2], "message": "success"}
return ImageGenerate_OUTPUT(status=1, image=data["images"][0], rating=evulate.rating, tags=evulate.tag, message="success")

View File

@ -1,7 +1,6 @@
import sentry_sdk
import ujson
from types import SimpleNamespace
from BoraML.Modules.Analyze import Analyze
from BoraML.Modules.Logging import Logging
@ -11,3 +10,13 @@ async def analyze_process(images: bytes, translate_json_path: str):
try:
result = await analyzer.predict(images)
rating = max(result.rating, key=result.rating.get)
with open(translate_json_path, "r") as file:
json_file = ujson.load(file)
for key in list(result.tag.keys()):
if key in json_file:
result.tag[json_file[key]] = result.tag.pop(key)
nsfw = rating in ["explicit", "questionable"]
return SimpleNamespace(nsfw=nsfw, rating=rating, tag=result.tag)
except Exception as e:
Logging().error(str(e))
sentry_sdk.capture_exception(e)

View File

@ -1,6 +1,5 @@
import sentry_sdk
from sentry_sdk.integrations.starlette import StarletteIntegration
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations.loguru import LoguruIntegration
@ -11,13 +10,7 @@ class Sentry:
def __init__(self, dsn: str):
self.sentry = sentry_sdk.init(
dsn=dsn,
integrations=[
StarletteIntegration(),
FastApiIntegration(),
RedisIntegration(),
LoguruIntegration(),
AsyncioIntegration()
],
integrations=[FastApiIntegration(), RedisIntegration(), LoguruIntegration(), AsyncioIntegration()],
traces_sample_rate=1.0,
enable_tracing=True
enable_tracing=True,
)

0
BoraML/Modules/json.py Normal file
View File

25
BoraML/Routes/Analyze.py Normal file
View File

@ -0,0 +1,25 @@
import sentry_sdk
from fastapi import APIRouter, HTTPException, status, Security, File, UploadFile
from BoraML.Modules.Logging import Logging
from BoraML.Modules.APIKey import get_api_key
from BoraML.Controllers.Analyze import Analyzer
from BoraML.Models.Analyze import Analyze_INPUT, Analyze_OUTPUT
router = APIRouter()
analyzer = Analyzer()
@router.post("/", response_model=Analyze_OUTPUT)
async def analyze(data: Analyze_INPUT, api_key: str = Security(get_api_key)):
try:
result = await analyzer.analyze(data)
return result
except Exception as e:
Logging().error(str(e))
sentry_sdk.capture_exception(e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e),
)

View File

@ -0,0 +1,39 @@
import sentry_sdk
from fastapi import APIRouter, HTTPException, status, Security, File, UploadFile
from BoraML.Modules.Logging import Logging
from BoraML.Modules.APIKey import get_api_key
from BoraML.Controllers.ImageGenerate import ImageGenerate
from BoraML.Models.ImageGenerate import ImageGenerate_XL_INPUT, ImageGenerate_SD15_INPUT, ImageGenerate_OUTPUT
router = APIRouter()
image_generate = ImageGenerate()
@router.post("/xl", response_model=ImageGenerate_OUTPUT)
async def generate_xl(data: ImageGenerate_XL_INPUT, api_key: str = Security(get_api_key)):
try:
result = await image_generate.generate_xl(data)
return result
except Exception as e:
Logging().error(str(e))
sentry_sdk.capture_exception(e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e),
)
@router.post("/sd15", response_model=ImageGenerate_OUTPUT)
async def generate_sd15(data: ImageGenerate_SD15_INPUT, api_key: str = Security(get_api_key)):
try:
result = await image_generate.generate_sd15(data)
return result
except Exception as e:
Logging().error(str(e))
sentry_sdk.capture_exception(e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e),
)

View File

@ -0,0 +1,9 @@
from fastapi import APIRouter
from BoraML.Routes.ImageGenerate import router as image_generate_router
from BoraML.Routes.Analyze import router as analyze_router
router = APIRouter()
router.include_router(image_generate_router, tags=["Image Generation"], prefix="/generate")
router.include_router(analyze_router, tags=["Image Analysis"], prefix="/analyze")

View File

@ -1,2 +1,38 @@
from fastapi import FastAPI, Request, Response, HTTPException, status, Depends, APIRouter
from fastapi.responses import JSONResponse
from fastapi.openapi.docs import get_swagger_ui_html
from functools import partial
from BoraML.Modules.Logging import Logging
from BoraML.Middlewares.LoggingMiddleware import LoggingMiddleware
from BoraML.Middlewares.SentryMiddleware import SentryMiddleware
from BoraML.Routes import router as routes
app = FastAPI(
title="BoraML",
description="Machine Learning API",
version="0.1.0",
)
#app = SentryMiddleware(app)
#app = LoggingMiddleware(app)
@app.get("/")
async def root():
return {"message": "Hello World"}
@app.get("/docs")
async def get_documentation():
return get_swagger_ui_html(openapi_url="/openapi.json", title="docs")
@app.exception_handler(HTTPException)
async def http_exception_handler(request: Request, exc: HTTPException):
return JSONResponse(
status_code=exc.status_code,
content={"message": exc.detail},
)
app.include_router(routes)

190
poetry.lock generated
View File

@ -1016,6 +1016,17 @@ MarkupSafe = ">=2.0"
[package.extras]
i18n = ["Babel (>=2.7)"]
[[package]]
name = "joblib"
version = "1.4.2"
description = "Lightweight pipelining with Python functions"
optional = false
python-versions = ">=3.8"
files = [
{file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"},
{file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"},
]
[[package]]
name = "korcen"
version = "0.3.8"
@ -1211,6 +1222,30 @@ files = [
intel-openmp = "==2021.*"
tbb = "==2021.*"
[[package]]
name = "motor"
version = "3.4.0"
description = "Non-blocking MongoDB driver for Tornado or asyncio"
optional = false
python-versions = ">=3.7"
files = [
{file = "motor-3.4.0-py3-none-any.whl", hash = "sha256:4b1e1a0cc5116ff73be2c080a72da078f2bb719b53bc7a6bb9e9a2f7dcd421ed"},
{file = "motor-3.4.0.tar.gz", hash = "sha256:c89b4e4eb2e711345e91c7c9b122cb68cce0e5e869ed0387dd0acb10775e3131"},
]
[package.dependencies]
pymongo = ">=4.5,<5"
[package.extras]
aws = ["pymongo[aws] (>=4.5,<5)"]
encryption = ["pymongo[encryption] (>=4.5,<5)"]
gssapi = ["pymongo[gssapi] (>=4.5,<5)"]
ocsp = ["pymongo[ocsp] (>=4.5,<5)"]
snappy = ["pymongo[snappy] (>=4.5,<5)"]
srv = ["pymongo[srv] (>=4.5,<5)"]
test = ["aiohttp (!=3.8.6)", "mockupdb", "motor[encryption]", "pytest (>=7)", "tornado (>=5)"]
zstd = ["pymongo[zstd] (>=4.5,<5)"]
[[package]]
name = "mpmath"
version = "1.3.0"
@ -2161,6 +2196,87 @@ files = [
[package.extras]
windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pymongo"
version = "4.7.2"
description = "Python driver for MongoDB <http://www.mongodb.org>"
optional = false
python-versions = ">=3.7"
files = [
{file = "pymongo-4.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:268d8578c0500012140c5460755ea405cbfe541ef47c81efa9d6744f0f99aeca"},
{file = "pymongo-4.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:827611beb6c483260d520cfa6a49662d980dfa5368a04296f65fa39e78fccea7"},
{file = "pymongo-4.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a754e366c404d19ff3f077ddeed64be31e0bb515e04f502bf11987f1baa55a16"},
{file = "pymongo-4.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c44efab10d9a3db920530f7bcb26af8f408b7273d2f0214081d3891979726328"},
{file = "pymongo-4.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35b3f0c7d49724859d4df5f0445818d525824a6cd55074c42573d9b50764df67"},
{file = "pymongo-4.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e37faf298a37ffb3e0809e77fbbb0a32b6a2d18a83c59cfc2a7b794ea1136b0"},
{file = "pymongo-4.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1bcd58669e56c08f1e72c5758868b5df169fe267501c949ee83c418e9df9155"},
{file = "pymongo-4.7.2-cp310-cp310-win32.whl", hash = "sha256:c72d16fede22efe7cdd1f422e8da15760e9498024040429362886f946c10fe95"},
{file = "pymongo-4.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:12d1fef77d25640cb78893d07ff7d2fac4c4461d8eec45bd3b9ad491a1115d6e"},
{file = "pymongo-4.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc5af24fcf5fc6f7f40d65446400d45dd12bea933d0299dc9e90c5b22197f1e9"},
{file = "pymongo-4.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:730778b6f0964b164c187289f906bbc84cb0524df285b7a85aa355bbec43eb21"},
{file = "pymongo-4.7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47a1a4832ef2f4346dcd1a10a36ade7367ad6905929ddb476459abb4fd1b98cb"},
{file = "pymongo-4.7.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6eab12c6385526d386543d6823b07187fefba028f0da216506e00f0e1855119"},
{file = "pymongo-4.7.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37e9ea81fa59ee9274457ed7d59b6c27f6f2a5fe8e26f184ecf58ea52a019cb8"},
{file = "pymongo-4.7.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e9d9d2c0aae73aa4369bd373ac2ac59f02c46d4e56c4b6d6e250cfe85f76802"},
{file = "pymongo-4.7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6e00a79dff22c9a72212ad82021b54bdb3b85f38a85f4fc466bde581d7d17a"},
{file = "pymongo-4.7.2-cp311-cp311-win32.whl", hash = "sha256:02efd1bb3397e24ef2af45923888b41a378ce00cb3a4259c5f4fc3c70497a22f"},
{file = "pymongo-4.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:87bb453ac3eb44db95cb6d5a616fbc906c1c00661eec7f55696253a6245beb8a"},
{file = "pymongo-4.7.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:12c466e02133b7f8f4ff1045c6b5916215c5f7923bc83fd6e28e290cba18f9f6"},
{file = "pymongo-4.7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f91073049c43d14e66696970dd708d319b86ee57ef9af359294eee072abaac79"},
{file = "pymongo-4.7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87032f818bf5052ab742812c715eff896621385c43f8f97cdd37d15b5d394e95"},
{file = "pymongo-4.7.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a87eef394039765679f75c6a47455a4030870341cb76eafc349c5944408c882"},
{file = "pymongo-4.7.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d275596f840018858757561840767b39272ac96436fcb54f5cac6d245393fd97"},
{file = "pymongo-4.7.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82102e353be13f1a6769660dd88115b1da382447672ba1c2662a0fbe3df1d861"},
{file = "pymongo-4.7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194065c9d445017b3c82fb85f89aa2055464a080bde604010dc8eb932a6b3c95"},
{file = "pymongo-4.7.2-cp312-cp312-win32.whl", hash = "sha256:db4380d1e69fdad1044a4b8f3bb105200542c49a0dde93452d938ff9db1d6d29"},
{file = "pymongo-4.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:fadc6e8db7707c861ebe25b13ad6aca19ea4d2c56bf04a26691f46c23dadf6e4"},
{file = "pymongo-4.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2cb77d09bd012cb4b30636e7e38d00b5f9be5eb521c364bde66490c45ee6c4b4"},
{file = "pymongo-4.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56bf8b706946952acdea0fe478f8e44f1ed101c4b87f046859e6c3abe6c0a9f4"},
{file = "pymongo-4.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcf337d1b252405779d9c79978d6ca15eab3cdaa2f44c100a79221bddad97c8a"},
{file = "pymongo-4.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ffd1519edbe311df73c74ec338de7d294af535b2748191c866ea3a7c484cd15"},
{file = "pymongo-4.7.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d59776f435564159196d971aa89422ead878174aff8fe18e06d9a0bc6d648c"},
{file = "pymongo-4.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:347c49cf7f0ba49ea87c1a5a1984187ecc5516b7c753f31938bf7b37462824fd"},
{file = "pymongo-4.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:84bc00200c3cbb6c98a2bb964c9e8284b641e4a33cf10c802390552575ee21de"},
{file = "pymongo-4.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fcaf8c911cb29316a02356f89dbc0e0dfcc6a712ace217b6b543805690d2aefd"},
{file = "pymongo-4.7.2-cp37-cp37m-win32.whl", hash = "sha256:b48a5650ee5320d59f6d570bd99a8d5c58ac6f297a4e9090535f6561469ac32e"},
{file = "pymongo-4.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5239ef7e749f1326ea7564428bf861d5250aa39d7f26d612741b1b1273227062"},
{file = "pymongo-4.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2dcf608d35644e8d276d61bf40a93339d8d66a0e5f3e3f75b2c155a421a1b71"},
{file = "pymongo-4.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:25eeb2c18ede63891cbd617943dd9e6b9cbccc54f276e0b2e693a0cc40f243c5"},
{file = "pymongo-4.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9349f0bb17a31371d4cacb64b306e4ca90413a3ad1fffe73ac7cd495570d94b5"},
{file = "pymongo-4.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ffd4d7cb2e6c6e100e2b39606d38a9ffc934e18593dc9bb326196afc7d93ce3d"},
{file = "pymongo-4.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a8bd37f5dabc86efceb8d8cbff5969256523d42d08088f098753dba15f3b37a"},
{file = "pymongo-4.7.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c78f156edc59b905c80c9003e022e1a764c54fd40ac4fea05b0764f829790e2"},
{file = "pymongo-4.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d892fb91e81cccb83f507cdb2ea0aa026ec3ced7f12a1d60f6a5bf0f20f9c1f"},
{file = "pymongo-4.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87832d6076c2c82f42870157414fd876facbb6554d2faf271ffe7f8f30ce7bed"},
{file = "pymongo-4.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ce1a374ea0e49808e0380ffc64284c0ce0f12bd21042b4bef1af3eb7bdf49054"},
{file = "pymongo-4.7.2-cp38-cp38-win32.whl", hash = "sha256:eb0642e5f0dd7e86bb358749cc278e70b911e617f519989d346f742dc9520dfb"},
{file = "pymongo-4.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:4bdb5ffe1cd3728c9479671a067ef44dacafc3743741d4dc700c377c4231356f"},
{file = "pymongo-4.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:743552033c63f0afdb56b9189ab04b5c1dbffd7310cf7156ab98eebcecf24621"},
{file = "pymongo-4.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5239776633f7578b81207e5646245415a5a95f6ae5ef5dff8e7c2357e6264bfc"},
{file = "pymongo-4.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727ad07952c155cd20045f2ce91143c7dc4fb01a5b4e8012905a89a7da554b0c"},
{file = "pymongo-4.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9385654f01a90f73827af4db90c290a1519f7d9102ba43286e187b373e9a78e9"},
{file = "pymongo-4.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d833651f1ba938bb7501f13e326b96cfbb7d98867b2d545ca6d69c7664903e0"},
{file = "pymongo-4.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf17ea9cea14d59b0527403dd7106362917ced7c4ec936c4ba22bd36c912c8e0"},
{file = "pymongo-4.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cecd2df037249d1c74f0af86fb5b766104a5012becac6ff63d85d1de53ba8b98"},
{file = "pymongo-4.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65b4c00dedbd333698b83cd2095a639a6f0d7c4e2a617988f6c65fb46711f028"},
{file = "pymongo-4.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d9b6cbc037108ff1a0a867e7670d8513c37f9bcd9ee3d2464411bfabf70ca002"},
{file = "pymongo-4.7.2-cp39-cp39-win32.whl", hash = "sha256:cf28430ec1924af1bffed37b69a812339084697fd3f3e781074a0148e6475803"},
{file = "pymongo-4.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:e004527ea42a6b99a8b8d5b42b42762c3bdf80f88fbdb5c3a9d47f3808495b86"},
{file = "pymongo-4.7.2.tar.gz", hash = "sha256:9024e1661c6e40acf468177bf90ce924d1bc681d2b244adda3ed7b2f4c4d17d7"},
]
[package.dependencies]
dnspython = ">=1.16.0,<3.0.0"
[package.extras]
aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"]
encryption = ["certifi", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.6.0,<2.0.0)"]
gssapi = ["pykerberos", "winkerberos (>=0.5.0)"]
ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
snappy = ["python-snappy"]
test = ["pytest (>=7)"]
zstd = ["zstandard"]
[[package]]
name = "pyreadline3"
version = "3.4.1"
@ -2623,8 +2739,11 @@ files = [
]
[package.dependencies]
aiohttp = {version = ">=3.5", optional = true, markers = "extra == \"aiohttp\""}
certifi = "*"
fastapi = {version = ">=0.79.0", optional = true, markers = "extra == \"fastapi\""}
loguru = {version = ">=0.5", optional = true, markers = "extra == \"loguru\""}
pymongo = {version = ">=3.1", optional = true, markers = "extra == \"pymongo\""}
urllib3 = ">=1.26.11"
[package.extras]
@ -2927,75 +3046,6 @@ typing-extensions = ">=4.8.0"
opt-einsum = ["opt-einsum (>=3.3)"]
optree = ["optree (>=0.9.1)"]
[[package]]
name = "torchaudio"
version = "2.3.0"
description = "An audio package for PyTorch"
optional = false
python-versions = "*"
files = [
{file = "torchaudio-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:342108da83aa19a457c9a128b1206fadb603753b51cca022b9f585aac2f4754c"},
{file = "torchaudio-2.3.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:73fedb2c631e01fa10feaac308540b836aefe758e55ca3ee026335e5d01e8e30"},
{file = "torchaudio-2.3.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e5bb50b7a4874ed97086c9e516dd90b103d954edcb5ed4b36f4fc22c4000a5a7"},
{file = "torchaudio-2.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4cc9cef5c98ed37e9405c4e0b0e6413bc101f3f49d45dc4f1d4e927757fe41e"},
{file = "torchaudio-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:341ca3048ce6edcc731519b30187f0b13acb245c4efe16f925f69f9d533546e1"},
{file = "torchaudio-2.3.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:8f2e0a28740bb0ee66369f92c811f33c0a47e6fcfc2de9cee89746472d713906"},
{file = "torchaudio-2.3.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:61edb02ae9c0efea4399f9c1f899601136b24f35d430548284ea8eaf6ccbe3be"},
{file = "torchaudio-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:04bc960cf1aef3b469b095a432a25496bc28197850fc2d90b7b52d6b5255487b"},
{file = "torchaudio-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:535144a2fbba95fbb3b883224ffcf44788e4cecbabbe49c4a1ae3e7a74f71485"},
{file = "torchaudio-2.3.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fb3f52ed1d63b272c240d9bf051705312cb172212051b8a6a2f64d42e3cc1633"},
{file = "torchaudio-2.3.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:668a8b694e5522cff28cd5e02d01aa1b75ce940aa9fb40480892bdc623b1735d"},
{file = "torchaudio-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:6c1f538018b85d7766835d042e555de2f096f7a69bba6b16031bf42a914dd9e1"},
{file = "torchaudio-2.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ba93265455dc363385e98c0cfcaeb586b7401af8a2c824811ee1466134a4f30"},
{file = "torchaudio-2.3.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:21bb6d1b384fc8895133f01489133d575d4a715cd81734b89651fb0264bd8b80"},
{file = "torchaudio-2.3.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:ed1866f508dc689c4f682d330b2ed4c83108d35865e4fb89431819364d8ad9ed"},
{file = "torchaudio-2.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:a3cbb230e2bb38ad1a1dd74aea242a154a9f76ab819d9c058b2c5074a9f5d7d2"},
{file = "torchaudio-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f4b933776f20a36af5ddc57968fcb3da34dd03881db8d6760f3e1176803b9cf8"},
{file = "torchaudio-2.3.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c5e63cc2dbf179088b6cdfd21ecdbb943aa003c780075aa440162f231ee72db2"},
{file = "torchaudio-2.3.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d243bb8a1ee263c2cdafb9feed1569c3742d8135731e8f7818de12f4e0c83e28"},
{file = "torchaudio-2.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6cd6d45cf8a45c89953e35434d9a461feb418e51e760adafc606a903dcbb9bd5"},
]
[package.dependencies]
torch = "2.3.0"
[[package]]
name = "torchvision"
version = "0.18.0"
description = "image and video datasets and models for torch deep learning"
optional = false
python-versions = ">=3.8"
files = [
{file = "torchvision-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dd61628a3d189c6852a12dc5ed4cd2eece66d2d67f35a866cb16f1dcb06c8c62"},
{file = "torchvision-0.18.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:493c45f9937dad37aa1b64b14da17c7a589c72b91adc4837d431009cfe29bd53"},
{file = "torchvision-0.18.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:5337f6acfa1fe959d5cb340d01a00614d6b31ce7a4824ccb95435a85c5273b95"},
{file = "torchvision-0.18.0-cp310-cp310-win_amd64.whl", hash = "sha256:bd8e6f3b5beb49965f15c461302488edfa3d8c2d01d3bb79b150d6fb62711e3a"},
{file = "torchvision-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6896a52168befe1105fb3c9335287390ed227e71d1e4ec4d68b62e8a3099fc09"},
{file = "torchvision-0.18.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:3d7955398d4ceaad77c487c2c44f6f7813112402c9bab8cd906d346005891048"},
{file = "torchvision-0.18.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:e5a24d620cea14a4bb89f24aa2b506230c0a16a3ada57fc53ad80cfd256a2128"},
{file = "torchvision-0.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:6ad70ddfa879bda5ed886b2518fe562640e0059787cbd65cb2bffa7674541410"},
{file = "torchvision-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:eb9d83c0e1dbb54ecb0fb04c87f786333e3a6fb8b9c400aca7c31081f9aa5707"},
{file = "torchvision-0.18.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:b657d052d146f24cb3b2a78219bfc82ae70a9706671c50f632528907d10cccec"},
{file = "torchvision-0.18.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a964afbc7ddf50a46b941477f6c35729b416deedd139756befd488245e2e226d"},
{file = "torchvision-0.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:7c770f0f748e0b17f57c0297508d7254f686cdf03fc2e2949f422b20574f4c0f"},
{file = "torchvision-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2115a1906c015f5da9ceedc40a983313b0fd6e2c8a17108a92991706f51f6987"},
{file = "torchvision-0.18.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6323f7e5423ff2594d5891863b919deb9d0de95f01c36bf26fbd879036b6ed08"},
{file = "torchvision-0.18.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:925d0a82cccf6f986c18b29b4392a942db65cbdb73c13a129c8493822eb9e36f"},
{file = "torchvision-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:95b42d0dc599b47a01530c7439a5751e67e45b85e3a67113989cf7c7c70f2039"},
{file = "torchvision-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75e22ecf44a13b8f95b8ad421c0261282d859c61816badaca1959e073ccdd691"},
{file = "torchvision-0.18.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4c334b3e719ba0a9ba6e15d4aff1178f5e6d029174f346163fed525f0ccfffd3"},
{file = "torchvision-0.18.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:36efd87001c6bee2383e043e46a025affb03179747c8f4777b9918527ffce756"},
{file = "torchvision-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:ccc292e093771d5baacf5535ac4416306b6b5f15676341cd4d010d8542eace25"},
]
[package.dependencies]
numpy = "*"
pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0"
torch = "2.3.0"
[package.extras]
scipy = ["scipy"]
[[package]]
name = "tqdm"
version = "4.66.4"
@ -3684,4 +3734,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "e42ed69102f662e3a6bd7b1949594f4277bea3460f38427b331ddbc439e455d7"
content-hash = "45dc66654b5fa9ba530a5371278397b5a07449b0b1e23a84f736250411e06686"

View File

@ -11,7 +11,7 @@ uvicorn = {extras = ["standard"], version = "^0.29.0"}
pendulum = "^3.0.0"
onnxruntime = "^1.18.0"
aiohttp = "^3.9.5"
sentry-sdk = {extras = ["loguru"], version = "^2.2.0"}
sentry-sdk = {extras = ["pymongo", "fastapi", "loguru", "aiohttp"], version = "^2.2.0"}
redis = {extras = ["hiredis"], version = "^5.0.4"}
rocketry = "^2.5.1"
fastapi = "^0.111.0"
@ -22,12 +22,14 @@ korcen = "^0.3.8"
opencv-python = "^4.9.0.80"
psutil = "^5.9.8"
transformers = "^4.41.0"
torch = "^2.3.0"
torchvision = "^0.18.0"
torchaudio = "^2.3.0"
loguru = "^0.7.2"
slowapi = "^0.1.9"
aiogoogletrans = "^3.3.3"
motor = "^3.4.0"
python-dotenv = "^1.0.1"
better-profanity = "^0.7.0"
joblib = "^1.4.2"
torch = "^2.3.0"
[tool.poetry.group.dev.dependencies]