can chat now

This commit is contained in:
Jeremy Yin 2024-03-14 18:19:59 +08:00
parent a06c27df0a
commit 012f23a745
12 changed files with 125 additions and 10 deletions

View File

@ -5,7 +5,7 @@
groups = ["default"]
strategy = ["cross_platform", "inherit_metadata"]
lock_version = "4.4.1"
content_hash = "sha256:a4bebf92f9599ac0bbd9330c74e76f2aff5b1ca700909112d8bab68efb50e1ea"
content_hash = "sha256:7722c3d89cc1895a68d1ca6e1fc303c8cd178fa8fca91410deb7ecb53027a9ac"
[[package]]
name = "annotated-types"
@ -64,7 +64,7 @@ version = "0.4.6"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
summary = "Cross-platform colored terminal text."
groups = ["default"]
marker = "platform_system == \"Windows\""
marker = "sys_platform == \"win32\" or platform_system == \"Windows\""
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
@ -163,6 +163,21 @@ files = [
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
]
[[package]]
name = "loguru"
version = "0.7.2"
requires_python = ">=3.5"
summary = "Python logging made (stupidly) simple"
groups = ["default"]
dependencies = [
"colorama>=0.3.4; sys_platform == \"win32\"",
"win32-setctime>=1.0.0; sys_platform == \"win32\"",
]
files = [
{file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
{file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
]
[[package]]
name = "openai"
version = "1.14.0"
@ -262,6 +277,17 @@ files = [
{file = "pymongo-4.6.2.tar.gz", hash = "sha256:ab7d01ac832a1663dad592ccbd92bb0f0775bc8f98a1923c5e1a7d7fead495af"},
]
[[package]]
name = "python-dotenv"
version = "1.0.1"
requires_python = ">=3.8"
summary = "Read key-value pairs from a .env file and set them as environment variables"
groups = ["default"]
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
]
[[package]]
name = "sniffio"
version = "1.3.1"
@ -326,3 +352,15 @@ files = [
{file = "uvicorn-0.28.0-py3-none-any.whl", hash = "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1"},
{file = "uvicorn-0.28.0.tar.gz", hash = "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067"},
]
[[package]]
name = "win32-setctime"
version = "1.1.0"
requires_python = ">=3.5"
summary = "A small Python utility to set file creation time on Windows"
groups = ["default"]
marker = "sys_platform == \"win32\""
files = [
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
]

View File

@ -11,6 +11,8 @@ dependencies = [
"httpx>=0.27.0",
"uvicorn>=0.28.0",
"openai>=1.14.0",
"loguru>=0.7.2",
"python-dotenv>=1.0.1",
]
requires-python = "==3.12.*"
readme = "README.md"

View File

@ -1,15 +1,18 @@
from typing import Union
from fastapi import FastAPI
from dotenv import load_dotenv
from .entity import GetAiChatResponseInput
from .entity import GetAiChatResponseOutput
from .entity import GetUserChatHistoryInput
from .entity import GetUserChatHistoryOutput
from .entity import GetChatStatusTodayInput
from .entity import GetChatStatusTodayOutput
from .entity import UserChatMessage
from simplylab.entity import GetAiChatResponseInput
from simplylab.entity import GetAiChatResponseOutput
from simplylab.entity import GetUserChatHistoryInput
from simplylab.entity import GetUserChatHistoryOutput
from simplylab.entity import GetChatStatusTodayInput
from simplylab.entity import GetChatStatusTodayOutput
from simplylab.entity import UserChatMessage
from simplylab.services import Services
load_dotenv()
app = FastAPI()
@ -20,7 +23,9 @@ async def read_root():
@app.post("/api/v1/get_ai_chat_response")
async def get_ai_chat_response(req: GetAiChatResponseInput) -> GetAiChatResponseOutput:
res = GetAiChatResponseOutput(response="Hello World")
svc = Services(req)
response = await svc.chat.get_ai_chat_response(req)
res = GetAiChatResponseOutput(response=response)
return res

View File

@ -0,0 +1,11 @@
from simplylab.providers.openrouter import OpenRouterProvider
class Providers:
def __init__(self):
...
@property
def openrouter(self):
return OpenRouterProvider()

View File

@ -0,0 +1,32 @@
from openai import OpenAI
from os import getenv
from loguru import logger
class OpenRouterProvider:
def __init__(self):
...
async def chat(self, content: str) -> str:
# gets API Key from environment variable OPENAI_API_KEY
api_key = getenv("OPENROUTER_API_KEY")
client = OpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=api_key,
)
completion = client.chat.completions.create(
extra_headers={
# "HTTP-Referer": $YOUR_SITE_URL, # Optional, for including your app on openrouter.ai rankings.
# "X-Title": $YOUR_APP_NAME, # Optional. Shows in rankings on openrouter.ai.
},
model="mistralai/mistral-7b-instruct:free",
messages=[
{
"role": "user",
"content": content,
},
],
)
print(completion.choices[0].message.content)
return completion.choices[0].message.content

View File

@ -0,0 +1,12 @@
from typing import Any
from simplylab.services.chat import ChatService
class Services:
def __init__(self, ctx: Any):
self.ctx = ctx
@property
def chat(self):
return ChatService(self.ctx)

View File

@ -0,0 +1,15 @@
from typing import Any
from simplylab.entity import GetAiChatResponseInput
from simplylab.providers import Providers
class ChatService:
def __init__(self, ctx: Any):
self.ctx = ctx
async def get_ai_chat_response(self, req: GetAiChatResponseInput) -> str:
pvd = Providers()
message = req.message
response_content = await pvd.openrouter.chat(content=message)
return response_content