Spaces:
Sleeping
Sleeping
Upload 20 files
Browse files- Dockerfile +22 -0
- alembic/README +1 -0
- alembic/__pycache__/env.cpython-312.pyc +0 -0
- alembic/env.py +55 -0
- alembic/script.py.mako +26 -0
- alembic/versions/8dc44341a607_initial_migration.py +41 -0
- alembic/versions/__pycache__/8dc44341a607_initial_migration.cpython-312.pyc +0 -0
- app/__pycache__/crud.cpython-312.pyc +0 -0
- app/__pycache__/database.cpython-312.pyc +0 -0
- app/__pycache__/models.cpython-312.pyc +0 -0
- app/__pycache__/router.cpython-312.pyc +0 -0
- app/crud.py +21 -0
- app/database.py +14 -0
- app/models.py +11 -0
- app/router.py +144 -0
- main.py +7 -0
- poetry.lock +0 -0
- pyproject.toml +24 -0
- utils/__pycache__/log_utils.cpython-312.pyc +0 -0
- utils/log_utils.py +26 -0
Dockerfile
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.11
|
2 |
+
USER root
|
3 |
+
|
4 |
+
WORKDIR /app
|
5 |
+
RUN rm -f /etc/apt/apt.conf.d/docker-clean \
|
6 |
+
&& echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
7 |
+
|
8 |
+
|
9 |
+
RUN curl -sSL https://install.python-poetry.org | python3 -
|
10 |
+
|
11 |
+
ENV POETRY_HOME="/root/.local"
|
12 |
+
ENV PATH="$POETRY_HOME/bin:$PATH"
|
13 |
+
COPY pyproject.toml poetry.lock* /app/
|
14 |
+
|
15 |
+
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root
|
16 |
+
# Copy project files
|
17 |
+
|
18 |
+
ADD ./app /app/app
|
19 |
+
ADD ./utils /app/utils
|
20 |
+
COPY main.py /app/main.py
|
21 |
+
|
22 |
+
CMD ["uvicorn", "main:app", "--host 0.0.0.0", "--port", "8000", "--reload"]
|
alembic/README
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Generic single-database configuration.
|
alembic/__pycache__/env.cpython-312.pyc
ADDED
Binary file (2.39 kB). View file
|
|
alembic/env.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
from alembic import context
|
4 |
+
from sqlalchemy import engine_from_config, pool
|
5 |
+
from app.models import Base
|
6 |
+
|
7 |
+
|
8 |
+
load_dotenv()
|
9 |
+
|
10 |
+
# Alembic Config object
|
11 |
+
config = context.config
|
12 |
+
|
13 |
+
db_url = os.getenv("POSTGRES_DATABASE_URL")
|
14 |
+
if not db_url:
|
15 |
+
raise RuntimeError("POSTGRES_DATABASE_URL not set in the environment!")
|
16 |
+
|
17 |
+
|
18 |
+
config.set_main_option("sqlalchemy.url", db_url)
|
19 |
+
target_metadata = Base.metadata
|
20 |
+
|
21 |
+
def run_migrations_online():
|
22 |
+
"""Run migrations in 'online' mode."""
|
23 |
+
# Create an engine from the configuration
|
24 |
+
connectable = engine_from_config(
|
25 |
+
config.get_section(config.config_ini_section, {}),
|
26 |
+
prefix="sqlalchemy.",
|
27 |
+
poolclass=pool.NullPool,
|
28 |
+
)
|
29 |
+
|
30 |
+
with connectable.connect() as connection:
|
31 |
+
# Configure the context with the connection and the metadata
|
32 |
+
context.configure(
|
33 |
+
connection=connection, target_metadata=target_metadata
|
34 |
+
)
|
35 |
+
|
36 |
+
with context.begin_transaction():
|
37 |
+
context.run_migrations()
|
38 |
+
|
39 |
+
def run_migrations_offline():
|
40 |
+
"""Run migrations in 'offline' mode."""
|
41 |
+
url = config.get_main_option("sqlalchemy.url")
|
42 |
+
context.configure(
|
43 |
+
url=url,
|
44 |
+
target_metadata=target_metadata,
|
45 |
+
literal_binds=True,
|
46 |
+
dialect_opts={"paramstyle": "named"},
|
47 |
+
)
|
48 |
+
|
49 |
+
with context.begin_transaction():
|
50 |
+
context.run_migrations()
|
51 |
+
|
52 |
+
if context.is_offline_mode():
|
53 |
+
run_migrations_offline()
|
54 |
+
else:
|
55 |
+
run_migrations_online()
|
alembic/script.py.mako
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""${message}
|
2 |
+
|
3 |
+
Revision ID: ${up_revision}
|
4 |
+
Revises: ${down_revision | comma,n}
|
5 |
+
Create Date: ${create_date}
|
6 |
+
|
7 |
+
"""
|
8 |
+
from typing import Sequence, Union
|
9 |
+
|
10 |
+
from alembic import op
|
11 |
+
import sqlalchemy as sa
|
12 |
+
${imports if imports else ""}
|
13 |
+
|
14 |
+
# revision identifiers, used by Alembic.
|
15 |
+
revision: str = ${repr(up_revision)}
|
16 |
+
down_revision: Union[str, None] = ${repr(down_revision)}
|
17 |
+
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
18 |
+
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
19 |
+
|
20 |
+
|
21 |
+
def upgrade() -> None:
|
22 |
+
${upgrades if upgrades else "pass"}
|
23 |
+
|
24 |
+
|
25 |
+
def downgrade() -> None:
|
26 |
+
${downgrades if downgrades else "pass"}
|
alembic/versions/8dc44341a607_initial_migration.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Initial migration
|
2 |
+
|
3 |
+
Revision ID: 8dc44341a607
|
4 |
+
Revises:
|
5 |
+
Create Date: 2025-02-20 23:19:04.480316
|
6 |
+
|
7 |
+
"""
|
8 |
+
from typing import Sequence, Union
|
9 |
+
|
10 |
+
from alembic import op
|
11 |
+
import sqlalchemy as sa
|
12 |
+
|
13 |
+
|
14 |
+
# revision identifiers, used by Alembic.
|
15 |
+
revision: str = '8dc44341a607'
|
16 |
+
down_revision: Union[str, None] = None
|
17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
18 |
+
depends_on: Union[str, Sequence[str], None] = None
|
19 |
+
|
20 |
+
|
21 |
+
def upgrade() -> None:
|
22 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
23 |
+
op.create_table('users',
|
24 |
+
sa.Column('id', sa.Integer(), nullable=False),
|
25 |
+
sa.Column('name', sa.String(), nullable=True),
|
26 |
+
sa.Column('sender_number', sa.String(), nullable=True),
|
27 |
+
sa.Column('chat_history', sa.JSON(), nullable=True),
|
28 |
+
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
29 |
+
sa.PrimaryKeyConstraint('id')
|
30 |
+
)
|
31 |
+
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
|
32 |
+
op.create_index(op.f('ix_users_sender_number'), 'users', ['sender_number'], unique=True)
|
33 |
+
# ### end Alembic commands ###
|
34 |
+
|
35 |
+
|
36 |
+
def downgrade() -> None:
|
37 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
38 |
+
op.drop_index(op.f('ix_users_sender_number'), table_name='users')
|
39 |
+
op.drop_index(op.f('ix_users_id'), table_name='users')
|
40 |
+
op.drop_table('users')
|
41 |
+
# ### end Alembic commands ###
|
alembic/versions/__pycache__/8dc44341a607_initial_migration.cpython-312.pyc
ADDED
Binary file (2.48 kB). View file
|
|
app/__pycache__/crud.cpython-312.pyc
ADDED
Binary file (1.75 kB). View file
|
|
app/__pycache__/database.cpython-312.pyc
ADDED
Binary file (653 Bytes). View file
|
|
app/__pycache__/models.cpython-312.pyc
ADDED
Binary file (944 Bytes). View file
|
|
app/__pycache__/router.cpython-312.pyc
ADDED
Binary file (17.1 kB). View file
|
|
app/crud.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from app import models
|
2 |
+
from sqlalchemy.orm import Session
|
3 |
+
|
4 |
+
|
5 |
+
def get_user_by_sender_number(db: Session, sender_number: str):
|
6 |
+
return db.query(models.User).filter(models.User.sender_number == sender_number).first()
|
7 |
+
|
8 |
+
def get_user_by_user_id(db: Session, user_id: int):
|
9 |
+
return db.query(models.User).filter(models.User.id == user_id).first()
|
10 |
+
|
11 |
+
def create_user(db: Session, name: str, sender_number: str):
|
12 |
+
try:
|
13 |
+
new_user = models.User(name=name, sender_number=sender_number)
|
14 |
+
db.add(new_user)
|
15 |
+
db.commit()
|
16 |
+
db.refresh(new_user)
|
17 |
+
return new_user
|
18 |
+
except Exception as e:
|
19 |
+
db.rollback()
|
20 |
+
print(f"Error creating user: {e}")
|
21 |
+
return None
|
app/database.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
from sqlalchemy import create_engine
|
4 |
+
from sqlalchemy.orm import sessionmaker
|
5 |
+
from sqlalchemy.ext.declarative import declarative_base
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
SQLALCHEMY_DATABASE_URL = os.getenv("POSTGRES_DATABASE_URL")
|
10 |
+
|
11 |
+
engine = create_engine(SQLALCHEMY_DATABASE_URL)
|
12 |
+
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
13 |
+
|
14 |
+
Base = declarative_base()
|
app/models.py
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy import Column, Integer, JSON, String, DateTime, func
|
2 |
+
from .database import Base
|
3 |
+
|
4 |
+
class User(Base):
|
5 |
+
__tablename__ = "users"
|
6 |
+
|
7 |
+
id = Column(Integer, primary_key=True, index=True)
|
8 |
+
name = Column(String, default="Unknown")
|
9 |
+
sender_number = Column(String, unique=True, index=True)
|
10 |
+
chat_history = Column(JSON, default=[])
|
11 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
app/router.py
ADDED
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
from app import models, crud
|
3 |
+
from app.database import SessionLocal, engine
|
4 |
+
from utils.log_utils import setup_logger
|
5 |
+
from fastapi import Request, Depends, APIRouter, HTTPException
|
6 |
+
import os, json, random, requests
|
7 |
+
from livekit import api
|
8 |
+
from sqlalchemy.orm import Session
|
9 |
+
from fastapi.responses import JSONResponse
|
10 |
+
from twilio.twiml.messaging_response import MessagingResponse
|
11 |
+
from fastapi.responses import Response
|
12 |
+
from pydantic import BaseModel, Field
|
13 |
+
from dotenv import load_dotenv
|
14 |
+
load_dotenv()
|
15 |
+
|
16 |
+
logger = setup_logger("router")
|
17 |
+
router = APIRouter()
|
18 |
+
|
19 |
+
models.Base.metadata.create_all(bind=engine)
|
20 |
+
|
21 |
+
def get_db():
|
22 |
+
db = SessionLocal()
|
23 |
+
try:
|
24 |
+
yield db
|
25 |
+
finally:
|
26 |
+
db.close()
|
27 |
+
|
28 |
+
|
29 |
+
class MessageRequestValidate(BaseModel):
|
30 |
+
from_phone: str = Field(default="+917389058485")
|
31 |
+
|
32 |
+
@router.post('/realtor/validate')
|
33 |
+
async def realtor_validate_user(request: MessageRequestValidate, db: Session = Depends(get_db)):
|
34 |
+
logger.info(f"request: {request}")
|
35 |
+
user = crud.get_user_by_sender_number(db, sender_number=request.from_phone)
|
36 |
+
if user:
|
37 |
+
return JSONResponse(content="User is verified")
|
38 |
+
return JSONResponse(content="user is not verified")
|
39 |
+
|
40 |
+
class MessageRequestSignUP(BaseModel):
|
41 |
+
from_phone: str = Field(default="+917389058485")
|
42 |
+
agent_id: str = Field(default="agent_id1")
|
43 |
+
full_name: str = Field(default="agent_full_name")
|
44 |
+
broker_id: str = Field(default="broker_id1")
|
45 |
+
|
46 |
+
@router.post('/realtor/singup')
|
47 |
+
async def realtor_sign_up(request: MessageRequestSignUP):
|
48 |
+
print(request)
|
49 |
+
return JSONResponse(content="user is signed up sucessfully now you can contine to ask propertry related questions")
|
50 |
+
|
51 |
+
|
52 |
+
@router.post("/create_user")
|
53 |
+
async def create_user(name: str, number: str, db: Session = Depends(get_db)):
|
54 |
+
user = crud.create_user(db, name=name, sender_number=number)
|
55 |
+
if not user:
|
56 |
+
raise HTTPException(status_code=400, detail="User creation failed")
|
57 |
+
return user
|
58 |
+
|
59 |
+
@router.post('/chatbot/')
|
60 |
+
async def whatsapp_webhook(request: Request):
|
61 |
+
form_data = await request.form()
|
62 |
+
|
63 |
+
num_media = int(form_data.get('NumMedia', 0))
|
64 |
+
sender_number = form_data.get("From")[9:]
|
65 |
+
profile_name = form_data.get("ProfileName")
|
66 |
+
user_query = form_data.get("Body")
|
67 |
+
|
68 |
+
logger.warning(f"num_media: {num_media}, sender_number: {sender_number}, ProfileName: {profile_name}, user_query: {user_query}")
|
69 |
+
|
70 |
+
payload = {
|
71 |
+
"input_value": json.dumps({"from_phone": sender_number, "msg": user_query}),
|
72 |
+
"session_id": sender_number
|
73 |
+
}
|
74 |
+
logger.info(f"Payload: {payload}")
|
75 |
+
headers = {
|
76 |
+
'Content-Type': 'application/json'
|
77 |
+
}
|
78 |
+
url = "https://workflows.kickcall.ai/api/v1/run/c94b14f5-d7a7-4fc8-89d8-eb672716c778?stream=false&user_id=1"
|
79 |
+
|
80 |
+
try:
|
81 |
+
response = requests.post(url, headers=headers, data=json.dumps(payload))
|
82 |
+
logger.info(f"[Langflow][run] Response status code received: {response.status_code}")
|
83 |
+
|
84 |
+
if response.status_code == 200:
|
85 |
+
response_json = response.json()
|
86 |
+
logger.info(f"[Langflow][run] Response: {response_json}")
|
87 |
+
output_text = response_json["outputs"][0]["outputs"][0]["results"]["message"]["data"]["text"]
|
88 |
+
|
89 |
+
bot_resp = MessagingResponse()
|
90 |
+
msg = bot_resp.message()
|
91 |
+
msg.body(output_text)
|
92 |
+
|
93 |
+
return Response(content=str(bot_resp), media_type="application/xml")
|
94 |
+
else:
|
95 |
+
logger.info(f"Error: Received status code {response.status_code}")
|
96 |
+
bot_resp = MessagingResponse()
|
97 |
+
msg = bot_resp.message()
|
98 |
+
msg.body("I'm sorry, I couldn't process your request at the moment. Please try again later.")
|
99 |
+
return Response(content=str(bot_resp), media_type="application/xml")
|
100 |
+
|
101 |
+
except Exception as e:
|
102 |
+
logger.info(f"Error in processing user query: {e}")
|
103 |
+
bot_resp = MessagingResponse()
|
104 |
+
msg = bot_resp.message()
|
105 |
+
msg.body("I'm sorry, an unexpected error occurred. Please try again later.")
|
106 |
+
return Response(content=str(bot_resp), media_type="application/xml")
|
107 |
+
|
108 |
+
|
109 |
+
class MessageRequestRunFlow(BaseModel):
|
110 |
+
from_phone: str = Field(default="+917389058485")
|
111 |
+
user_query: str = Field(default="Hello")
|
112 |
+
|
113 |
+
@router.post('/realtor/run_flow')
|
114 |
+
async def realtor_validate_user(request: MessageRequestRunFlow):
|
115 |
+
print(f"body: {request}")
|
116 |
+
user_query = request.user_query
|
117 |
+
print(f"user_query: {user_query}")
|
118 |
+
payload = {
|
119 |
+
"input_value": json.dumps({"from_phone": request.from_phone, "msg": user_query}),
|
120 |
+
"session_id": request.from_phone
|
121 |
+
}
|
122 |
+
print(f"Payload: {payload}")
|
123 |
+
headers = {
|
124 |
+
'Content-Type': 'application/json'
|
125 |
+
}
|
126 |
+
url = "https://workflows.kickcall.ai/api/v1/run/c94b14f5-d7a7-4fc8-89d8-eb672716c778?stream=false&user_id=1"
|
127 |
+
|
128 |
+
try:
|
129 |
+
response = requests.post(url, headers=headers, data=json.dumps(payload))
|
130 |
+
print(f"[Langflow][run] Response status code received: {response.status_code}")
|
131 |
+
|
132 |
+
if response.status_code == 200:
|
133 |
+
response_json = response.json()
|
134 |
+
print(f"[Langflow][run] Response: {response_json}")
|
135 |
+
output_text = response_json["outputs"][0]["outputs"][0]["results"]["message"]["data"]["text"]
|
136 |
+
return JSONResponse(content=output_text)
|
137 |
+
else:
|
138 |
+
print(f"Error: Received status code {response.status_code}")
|
139 |
+
msg ="I'm sorry, I couldn't process your request at the moment. Please try again later."
|
140 |
+
return JSONResponse(content=msg)
|
141 |
+
except Exception as e:
|
142 |
+
print(f"Error in processing user query: {e}")
|
143 |
+
msg = "I'm sorry, an unexpected error occurred. Please try again later."
|
144 |
+
return JSONResponse(content=msg)
|
main.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
+
from app.router import router
|
3 |
+
|
4 |
+
|
5 |
+
app = FastAPI()
|
6 |
+
|
7 |
+
app.include_router(router)
|
poetry.lock
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pyproject.toml
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[tool.poetry]
|
2 |
+
name = "bluebash-test1"
|
3 |
+
version = "0.1.0"
|
4 |
+
description = ""
|
5 |
+
authors = ["narendra-bluebash <[email protected]>"]
|
6 |
+
readme = "README.md"
|
7 |
+
|
8 |
+
[tool.poetry.dependencies]
|
9 |
+
python = "^3.12"
|
10 |
+
fastapi = "^0.115.6"
|
11 |
+
sqlalchemy = "^2.0.37"
|
12 |
+
python-dotenv = "^1.0.1"
|
13 |
+
uvicorn = "^0.34.0"
|
14 |
+
colorlog = "^6.9.0"
|
15 |
+
python-multipart = "^0.0.20"
|
16 |
+
twilio = "^9.4.2"
|
17 |
+
alembic = "^1.14.0"
|
18 |
+
livekit = "^0.20.1"
|
19 |
+
livekit-api = "^0.8.2"
|
20 |
+
psycopg2 = "^2.9.10"
|
21 |
+
|
22 |
+
[build-system]
|
23 |
+
requires = ["poetry-core"]
|
24 |
+
build-backend = "poetry.core.masonry.api"
|
utils/__pycache__/log_utils.cpython-312.pyc
ADDED
Binary file (1.26 kB). View file
|
|
utils/log_utils.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import colorlog
|
3 |
+
|
4 |
+
def setup_logger(logger_name: str, debug_color="white", info_color="green", propagate=False):
|
5 |
+
color_handler = colorlog.StreamHandler()
|
6 |
+
log_colors = {
|
7 |
+
"DEBUG": debug_color,
|
8 |
+
"INFO": info_color,
|
9 |
+
"WARNING": "yellow",
|
10 |
+
"ERROR": "red",
|
11 |
+
"CRITICAL": "bold_red",
|
12 |
+
}
|
13 |
+
color_handler.setFormatter(
|
14 |
+
colorlog.ColoredFormatter(
|
15 |
+
"%(log_color)s%(asctime)s [%(levelname)s] %(name)s: %(message)s",
|
16 |
+
datefmt="%Y-%m-%d %H:%M:%S",
|
17 |
+
log_colors=log_colors
|
18 |
+
)
|
19 |
+
)
|
20 |
+
logger = logging.getLogger(logger_name)
|
21 |
+
if logger.hasHandlers():
|
22 |
+
logger.handlers.clear()
|
23 |
+
logger.setLevel(logging.DEBUG)
|
24 |
+
logger.addHandler(color_handler)
|
25 |
+
logger.propagate = propagate
|
26 |
+
return logger
|