architojha commited on
Commit
4caacc1
·
1 Parent(s): a49614b

removed logger fix

Browse files
src/api/v1/build_ml_plan/eda.py CHANGED
@@ -1,7 +1,7 @@
1
- import os
2
  from fastapi import APIRouter
3
  from src.app.schemas.requests.eda import EdaRequestSchema
4
  from src.app.pipelines.eda.pipeline import EdaLoop
 
5
 
6
  eda_router = APIRouter()
7
 
 
 
1
  from fastapi import APIRouter
2
  from src.app.schemas.requests.eda import EdaRequestSchema
3
  from src.app.pipelines.eda.pipeline import EdaLoop
4
+ import os
5
 
6
  eda_router = APIRouter()
7
 
src/api/v1/build_ml_plan/task_analysis.py CHANGED
@@ -1,13 +1,10 @@
1
- import os
2
- import shutil
3
- from datetime import datetime
4
- from src.core.utils import LogManager
5
- from src.app.schemas.requests.eda import EdaRequestSchema
6
  from fastapi import APIRouter, File, UploadFile, Form, HTTPException, Request
7
  from src.app.pipelines.task_analysis import MLImplementationPlannerWorkflow, MLAnalysisWorkflow
8
-
9
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
10
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
 
 
11
 
12
  analysis_router = APIRouter()
13
 
 
 
 
 
 
 
1
  from fastapi import APIRouter, File, UploadFile, Form, HTTPException, Request
2
  from src.app.pipelines.task_analysis import MLImplementationPlannerWorkflow, MLAnalysisWorkflow
3
+ from src.app.schemas.requests.eda import EdaRequestSchema
4
+ from src.core.utils import logger
5
+ from typing import Optional
6
+ import os
7
+ import shutil
8
 
9
  analysis_router = APIRouter()
10
 
src/api/v1/eda_engine/data_quality.py CHANGED
@@ -1,14 +1,10 @@
1
  import os
2
  import shutil
3
  from fastapi import APIRouter
4
- from datetime import datetime
5
- from src.core.utils import LogManager
6
  from fastapi import APIRouter, UploadFile, File, HTTPException, Form
7
  from src.app.pipelines.modules import DataQualityAssessmentWorkflow
8
 
9
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
10
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
11
-
12
  data_quality_router = APIRouter()
13
 
14
  def delete_dir_contents(directory: str)->None:
 
1
  import os
2
  import shutil
3
  from fastapi import APIRouter
4
+ from src.core.utils import logger
 
5
  from fastapi import APIRouter, UploadFile, File, HTTPException, Form
6
  from src.app.pipelines.modules import DataQualityAssessmentWorkflow
7
 
 
 
 
8
  data_quality_router = APIRouter()
9
 
10
  def delete_dir_contents(directory: str)->None:
src/api/v1/eda_engine/data_statistics.py CHANGED
@@ -1,14 +1,10 @@
1
  import os
2
  import shutil
3
  from fastapi import APIRouter
4
- from datetime import datetime
5
- from src.core.utils import LogManager
6
  from fastapi import APIRouter, UploadFile, File, HTTPException, Form
7
  from src.app.pipelines.modules import DataStatisticsWorkflow
8
 
9
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
10
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
11
-
12
  data_statistics_router = APIRouter()
13
 
14
  def delete_dir_contents(directory: str)->None:
 
1
  import os
2
  import shutil
3
  from fastapi import APIRouter
4
+ from src.core.utils import logger
 
5
  from fastapi import APIRouter, UploadFile, File, HTTPException, Form
6
  from src.app.pipelines.modules import DataStatisticsWorkflow
7
 
 
 
 
8
  data_statistics_router = APIRouter()
9
 
10
  def delete_dir_contents(directory: str)->None:
src/api/v1/eda_engine/data_understanding.py CHANGED
@@ -1,14 +1,10 @@
1
  import os
2
  import shutil
3
  from typing import Optional
4
- from datetime import datetime
5
- from src.core.utils import LogManager
6
  from fastapi import APIRouter, UploadFile, File, HTTPException, Form
7
  from src.app.pipelines.modules import DataUnderstandingContextWorkflow
8
 
9
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
10
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
11
-
12
  data_understanding_router = APIRouter()
13
 
14
  def delete_dir_contents(directory: str) -> None:
 
1
  import os
2
  import shutil
3
  from typing import Optional
4
+ from src.core.utils import logger
 
5
  from fastapi import APIRouter, UploadFile, File, HTTPException, Form
6
  from src.app.pipelines.modules import DataUnderstandingContextWorkflow
7
 
 
 
 
8
  data_understanding_router = APIRouter()
9
 
10
  def delete_dir_contents(directory: str) -> None:
src/api/v1/eda_engine/univariate_analysis.py CHANGED
@@ -1,10 +1,9 @@
1
  import os
2
  import shutil
3
  from typing import Optional
4
- from datetime import datetime
5
- from src.core.utils import LogManager
6
- from src.app.pipelines.modules import UnivariateAnalysisWorkflow
7
  from fastapi import APIRouter, UploadFile, File, HTTPException, Form
 
8
 
9
  univariate_analysis_router = APIRouter()
10
 
 
1
  import os
2
  import shutil
3
  from typing import Optional
4
+ from src.core.utils import logger
 
 
5
  from fastapi import APIRouter, UploadFile, File, HTTPException, Form
6
+ from src.app.pipelines.modules import UnivariateAnalysisWorkflow
7
 
8
  univariate_analysis_router = APIRouter()
9
 
src/app/pipelines/modules/data_quality_assessment.py CHANGED
@@ -7,9 +7,8 @@ import numpy as np
7
  import pandas as pd
8
  from scipy import stats
9
  from typing import Union
10
- from datetime import datetime
11
  from collections import Counter
12
- from src.core.utils import LogManager
13
  from scipy.spatial.distance import pdist
14
  from scipy.stats import chi2_contingency
15
  from agno.agent import Agent, RunResponse
@@ -20,9 +19,6 @@ from sklearn.metrics.pairwise import cosine_similarity
20
  from typing import Dict, List, Union, Tuple, Any, Optional
21
  from sklearn.feature_extraction.text import TfidfVectorizer
22
 
23
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
24
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
25
-
26
  class DataQualityAssessmentWorkflow:
27
  def __init__(
28
  self, data_source: str,
 
7
  import pandas as pd
8
  from scipy import stats
9
  from typing import Union
 
10
  from collections import Counter
11
+ from src.core.utils import logger
12
  from scipy.spatial.distance import pdist
13
  from scipy.stats import chi2_contingency
14
  from agno.agent import Agent, RunResponse
 
19
  from typing import Dict, List, Union, Tuple, Any, Optional
20
  from sklearn.feature_extraction.text import TfidfVectorizer
21
 
 
 
 
22
  class DataQualityAssessmentWorkflow:
23
  def __init__(
24
  self, data_source: str,
src/app/pipelines/modules/data_statistics.py CHANGED
@@ -6,18 +6,15 @@ import psutil
6
  import numpy as np
7
  import pandas as pd
8
  import dateutil.parser
 
9
  from dateutil import parser
10
  from datetime import datetime
11
- from dotenv import load_dotenv
12
  from collections import Counter
13
- from src.core.utils import LogManager
14
  from agno.agent import Agent, RunResponse
15
  from agno.models.openai import OpenAIChat
16
  from typing import Union, List, Dict, Any, Tuple
17
 
18
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
19
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
20
-
21
  load_dotenv()
22
 
23
  class DataStatisticsWorkflow:
 
6
  import numpy as np
7
  import pandas as pd
8
  import dateutil.parser
9
+ from dotenv import load_dotenv
10
  from dateutil import parser
11
  from datetime import datetime
 
12
  from collections import Counter
13
+ from src.core.utils import logger
14
  from agno.agent import Agent, RunResponse
15
  from agno.models.openai import OpenAIChat
16
  from typing import Union, List, Dict, Any, Tuple
17
 
 
 
 
18
  load_dotenv()
19
 
20
  class DataStatisticsWorkflow:
src/app/pipelines/modules/data_understanding_context.py CHANGED
@@ -1,18 +1,14 @@
1
  import os
2
  import random
3
  import pandas as pd
4
- from datetime import datetime
5
  from dotenv import load_dotenv
6
- from src.core.utils import LogManager
7
  from pydantic import BaseModel, Field
8
  from agno.models.openai import OpenAIChat
9
  from agno.agent import Agent, RunResponse
10
  from typing import Optional, Union, List, Dict, Tuple
11
  from agno.tools.duckduckgo import DuckDuckGoTools
12
 
13
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
14
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
15
-
16
  load_dotenv()
17
 
18
  # class BCAgentResponseSchema(BaseModel):
 
1
  import os
2
  import random
3
  import pandas as pd
 
4
  from dotenv import load_dotenv
5
+ from src.core.utils import logger
6
  from pydantic import BaseModel, Field
7
  from agno.models.openai import OpenAIChat
8
  from agno.agent import Agent, RunResponse
9
  from typing import Optional, Union, List, Dict, Tuple
10
  from agno.tools.duckduckgo import DuckDuckGoTools
11
 
 
 
 
12
  load_dotenv()
13
 
14
  # class BCAgentResponseSchema(BaseModel):
src/app/pipelines/modules/univariate_analysis.py CHANGED
@@ -6,8 +6,7 @@ import pandas as pd
6
  from tqdm import tqdm
7
  import lmoments3 as lm
8
  import scipy.stats as stats
9
- from datetime import datetime
10
- from src.core.utils import LogManager
11
  from agno.models.openai import OpenAIChat
12
  from agno.agent import Agent, RunResponse
13
  from sklearn.neighbors import KernelDensity
@@ -15,9 +14,6 @@ from sklearn.model_selection import GridSearchCV
15
  from typing import Union, Tuple, Dict, Any, List, Optional
16
  from scipy.stats import norm, shapiro, anderson, kstest, normaltest
17
 
18
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
19
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
20
-
21
  class UnivariateAnalysisWorkflow:
22
  def __init__(
23
  self, data_source: str,
 
6
  from tqdm import tqdm
7
  import lmoments3 as lm
8
  import scipy.stats as stats
9
+ from src.core.utils import logger
 
10
  from agno.models.openai import OpenAIChat
11
  from agno.agent import Agent, RunResponse
12
  from sklearn.neighbors import KernelDensity
 
14
  from typing import Union, Tuple, Dict, Any, List, Optional
15
  from scipy.stats import norm, shapiro, anderson, kstest, normaltest
16
 
 
 
 
17
  class UnivariateAnalysisWorkflow:
18
  def __init__(
19
  self, data_source: str,
src/app/pipelines/task_analysis/ml_analysis_workflow.py CHANGED
@@ -1,17 +1,13 @@
 
1
  import os
2
- import json
3
- from typing import Optional
4
- from datetime import datetime
5
  from dotenv import load_dotenv
6
- from typing import Iterator, List
7
- from src.core.utils import LogManager
8
- from agno.models.openai import OpenAIChat # type: ignore
9
  from agno.agent import Agent, RunResponse # type: ignore
10
  from agno.tools.duckduckgo import DuckDuckGoTools # type: ignore
11
- from .model import RequirementsAnalysis, TechnicalResearch, ModelResponseStatus
12
-
13
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
14
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
15
 
16
  load_dotenv()
17
 
 
1
+ from agno.models.openai import OpenAIChat # type: ignore
2
  import os
 
 
 
3
  from dotenv import load_dotenv
4
+ from src.core.utils import logger
5
+ from typing import Optional
6
+ from .model import RequirementsAnalysis, TechnicalResearch, ModelResponseStatus
7
  from agno.agent import Agent, RunResponse # type: ignore
8
  from agno.tools.duckduckgo import DuckDuckGoTools # type: ignore
9
+ from typing import Iterator, List
10
+ import json
 
 
11
 
12
  load_dotenv()
13
 
src/app/pipelines/task_analysis/ml_implementation_planner_workflow.py CHANGED
@@ -1,16 +1,12 @@
 
1
  import os
2
- import json
3
- from typing import Optional
4
- from typing import Iterator
5
- from datetime import datetime
6
  from dotenv import load_dotenv
7
- from src.core.utils import LogManager
8
- from agno.models.openai import OpenAIChat # type: ignore
9
- from agno.agent import Agent, RunResponse # type: ignore
10
  from .model import RequirementsAnalysis, TechnicalResearch, ImplementationPlan
11
-
12
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
13
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
14
 
15
  load_dotenv()
16
 
 
1
+ from agno.models.openai import OpenAIChat # type: ignore
2
  import os
 
 
 
 
3
  from dotenv import load_dotenv
4
+ from src.core.utils import logger
5
+ from typing import Optional
 
6
  from .model import RequirementsAnalysis, TechnicalResearch, ImplementationPlan
7
+ from agno.agent import Agent, RunResponse # type: ignore
8
+ from typing import Iterator
9
+ import json
10
 
11
  load_dotenv()
12
 
src/app/pipelines/task_analysis/model.py CHANGED
@@ -1,7 +1,7 @@
1
- from enum import Enum
2
  from typing import List
3
- from pydantic import BaseModel, Field
4
  from typing import Iterator, List, Optional
 
 
5
 
6
  class MLTaskType(str, Enum):
7
  CLASSIFICATION = "classification"
 
 
1
  from typing import List
 
2
  from typing import Iterator, List, Optional
3
+ from enum import Enum
4
+ from pydantic import BaseModel, Field
5
 
6
  class MLTaskType(str, Enum):
7
  CLASSIFICATION = "classification"
src/app/schemas/requests/eda.py CHANGED
@@ -1,6 +1,6 @@
1
- from typing import Optional
2
  from pydantic import BaseModel, Field
3
  from src.app.pipelines.task_analysis.model import ImplementationPlan, RequirementsAnalysis, TechnicalResearch
 
4
 
5
  class EdaRequestSchema(BaseModel):
6
  dataset_path: str = Field(
 
 
1
  from pydantic import BaseModel, Field
2
  from src.app.pipelines.task_analysis.model import ImplementationPlan, RequirementsAnalysis, TechnicalResearch
3
+ from typing import Optional
4
 
5
  class EdaRequestSchema(BaseModel):
6
  dataset_path: str = Field(
src/app/schemas/responses/eda.py CHANGED
@@ -1,5 +1,5 @@
1
- from typing import List
2
  from pydantic import BaseModel, Field
 
3
  from src.app.pipelines.eda.agents.models import OrchestratorAgentResponseSchema, ExecuterAgentResponseSchema, AnalyzerAgentResponseSchema, JudgingAgentResponseSchema
4
 
5
  class IterationDetails(BaseModel):
 
 
1
  from pydantic import BaseModel, Field
2
+ from typing import List
3
  from src.app.pipelines.eda.agents.models import OrchestratorAgentResponseSchema, ExecuterAgentResponseSchema, AnalyzerAgentResponseSchema, JudgingAgentResponseSchema
4
 
5
  class IterationDetails(BaseModel):
src/core/server.py CHANGED
@@ -1,10 +1,6 @@
1
  from fastapi import FastAPI
2
  from src.api import router
3
- from src.core.utils import read_config, LogManager
4
- from datetime import datetime
5
-
6
- timestamp = datetime.now().strftime("%Y-%m-%d_%H")
7
- logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")
8
 
9
  def init_routers(app_: FastAPI) -> None:
10
  app_.include_router(router)
 
1
  from fastapi import FastAPI
2
  from src.api import router
3
+ from src.core.utils import logger, read_config
 
 
 
 
4
 
5
  def init_routers(app_: FastAPI) -> None:
6
  app_.include_router(router)
src/core/utils/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
  from .read_config import read_config
2
- from .knowledge_base import KnowledgeBaseClass
3
- from .logger import LogManager
 
1
  from .read_config import read_config
2
+ from .logger import logger
3
+ from .knowledge_base import KnowledgeBaseClass
src/core/utils/logger.py CHANGED
@@ -42,4 +42,5 @@ class LogManager:
42
  self.log(logging.DEBUG, message, log_type, console)
43
 
44
 
45
-
 
 
42
  self.log(logging.DEBUG, message, log_type, console)
43
 
44
 
45
+ timestamp = datetime.now().strftime("%Y-%m-%d_%H")
46
+ logger = LogManager(log_file_path=f"src/core/logs/log_{timestamp}.log")