Said Lfagrouche commited on
Commit
8242927
·
1 Parent(s): 6ca0914

Prepare for Hugging Face Spaces deployment with simplified configuration

Browse files
Files changed (5) hide show
  1. .env.example +10 -4
  2. Dockerfile +12 -7
  3. README.md +21 -6
  4. app.py +57 -21
  5. requirements.txt +14 -14
.env.example CHANGED
@@ -1,4 +1,10 @@
1
- OPENAI_API_KEY=
2
- LANGCHAIN_API_KEY=
3
- LANGCHAIN_TRACING_V2='true'
4
- LANGCHAIN_PROJECT=
 
 
 
 
 
 
 
1
+ # API Keys
2
+ OPENAI_API_KEY=your-openai-api-key
3
+ LANGCHAIN_API_KEY=your-langchain-api-key
4
+
5
+ # Configuration
6
+ LANGCHAIN_TRACING_V2=true
7
+ LANGCHAIN_PROJECT=MentalHealthCounselorPOC
8
+
9
+ # Deployment
10
+ HF_SPACES=true
Dockerfile CHANGED
@@ -1,13 +1,15 @@
1
- FROM python:3.13-slim
2
-
3
- RUN useradd -m -u 1000 user
4
- USER user
5
- ENV PATH="/home/user/.local/bin:$PATH"
6
 
7
  WORKDIR /app
8
 
 
 
 
 
 
 
9
  # Copy requirements file
10
- COPY --chown=user requirements.txt .
11
 
12
  # Install dependencies
13
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
@@ -17,9 +19,12 @@ RUN python -c "import nltk; nltk.download('punkt'); nltk.download('wordnet'); nl
17
 
18
  # Create necessary directories
19
  RUN mkdir -p data/users data/sessions data/conversations data/feedback
 
20
 
21
  # Copy application files
22
- COPY --chown=user . /app
 
 
23
 
24
  # Expose the port Hugging Face Spaces expects
25
  EXPOSE 7860
 
1
+ FROM python:3.9-slim
 
 
 
 
2
 
3
  WORKDIR /app
4
 
5
+ # Install git and git-lfs for downloading large files (if needed)
6
+ RUN apt-get update && \
7
+ apt-get install -y git git-lfs build-essential && \
8
+ apt-get clean && \
9
+ rm -rf /var/lib/apt/lists/*
10
+
11
  # Copy requirements file
12
+ COPY requirements.txt .
13
 
14
  # Install dependencies
15
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
 
19
 
20
  # Create necessary directories
21
  RUN mkdir -p data/users data/sessions data/conversations data/feedback
22
+ RUN mkdir -p mental_health_model_artifacts/chroma_db
23
 
24
  # Copy application files
25
+ COPY app.py .
26
+ COPY .env.example .env
27
+ COPY api_mental_health.py .
28
 
29
  # Expose the port Hugging Face Spaces expects
30
  EXPOSE 7860
README.md CHANGED
@@ -1,21 +1,29 @@
1
  ---
2
  title: Mental Health Counselor API
3
  emoji: 🧠
4
- colorFrom: purple
5
- colorTo: indigo
6
  sdk: docker
 
7
  pinned: false
8
  ---
9
 
10
  # Mental Health Counselor API
11
 
12
- This is a backend API for a mental health counseling application. It provides endpoints for analyzing patient messages, suggesting counselor responses, and managing counseling sessions.
13
 
14
  ## API Endpoints
15
 
16
- - `/`: Root endpoint showing API status
17
- - `/health`: Health check endpoint
18
- - And many more specialized endpoints from the full API
 
 
 
 
 
 
 
19
 
20
  ## Deployment
21
 
@@ -24,3 +32,10 @@ This API is deployed on Hugging Face Spaces using Docker.
24
  ## Frontend
25
 
26
  The frontend for this application is deployed separately on Vercel.
 
 
 
 
 
 
 
 
1
  ---
2
  title: Mental Health Counselor API
3
  emoji: 🧠
4
+ colorFrom: indigo
5
+ colorTo: purple
6
  sdk: docker
7
+ app_port: 7860
8
  pinned: false
9
  ---
10
 
11
  # Mental Health Counselor API
12
 
13
+ This is the backend API for a mental health counseling application. It provides endpoints for analyzing patient messages, suggesting counselor responses, and managing counseling sessions.
14
 
15
  ## API Endpoints
16
 
17
+ - `/` - Root endpoint showing API status
18
+ - `/health` - Health check endpoint
19
+ - `/metadata` - API metadata and information
20
+
21
+ ## Technology Stack
22
+
23
+ - FastAPI framework
24
+ - Scikit-Learn based ML models
25
+ - NLTK for NLP processing
26
+ - Vector database for semantic search
27
 
28
  ## Deployment
29
 
 
32
  ## Frontend
33
 
34
  The frontend for this application is deployed separately on Vercel.
35
+
36
+ ## Repository Structure
37
+
38
+ - `app.py` - Main application entry point
39
+ - `api_mental_health.py` - Core API functionality
40
+ - `data/` - Data storage directories
41
+ - `mental_health_model_artifacts/` - ML model files
app.py CHANGED
@@ -1,42 +1,78 @@
1
- from fastapi import FastAPI
 
2
  import os
 
3
  import logging
4
 
5
  # Set up logging
6
  logging.basicConfig(level=logging.INFO)
7
  logger = logging.getLogger(__name__)
8
 
 
 
 
9
  # Initialize FastAPI app
10
  app = FastAPI(title="Mental Health Counselor API")
11
 
12
- # Create necessary directories
13
- os.makedirs("data/users", exist_ok=True)
14
- os.makedirs("data/sessions", exist_ok=True)
15
- os.makedirs("data/conversations", exist_ok=True)
16
- os.makedirs("data/feedback", exist_ok=True)
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
- # Define a simple health check route
19
  @app.get("/health")
20
  async def health_check():
21
  return {"status": "ok", "message": "Mental Health Counselor API is running"}
22
 
23
- # Define a simple root route
24
- @app.get("/")
25
- async def root():
26
  return {
27
- "app": "Mental Health Counselor API",
28
- "status": "running",
29
  "endpoints": [
 
30
  "/health",
31
- "/api-docs"
32
- ]
 
 
 
 
33
  }
34
 
35
- # Import the actual API if the file exists
36
  try:
37
- from api_mental_health import app as full_app
38
- # Merge the routes from the full app
39
- app.routes.extend(full_app.routes)
40
- logger.info("Loaded full API functionality")
41
- except ImportError:
42
- logger.warning("Could not import full API functionality")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from pydantic import BaseModel
3
  import os
4
+ from dotenv import load_dotenv
5
  import logging
6
 
7
  # Set up logging
8
  logging.basicConfig(level=logging.INFO)
9
  logger = logging.getLogger(__name__)
10
 
11
+ # Load environment variables
12
+ load_dotenv()
13
+
14
  # Initialize FastAPI app
15
  app = FastAPI(title="Mental Health Counselor API")
16
 
17
+ # Initialize global storage
18
+ DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
19
+ os.makedirs(DATA_DIR, exist_ok=True)
20
+ os.makedirs(os.path.join(DATA_DIR, "users"), exist_ok=True)
21
+ os.makedirs(os.path.join(DATA_DIR, "sessions"), exist_ok=True)
22
+ os.makedirs(os.path.join(DATA_DIR, "conversations"), exist_ok=True)
23
+ os.makedirs(os.path.join(DATA_DIR, "feedback"), exist_ok=True)
24
+
25
+ # Simple health check route
26
+ @app.get("/")
27
+ async def root():
28
+ return {
29
+ "status": "ok",
30
+ "message": "Mental Health Counselor API is running",
31
+ "api_version": "1.0.0",
32
+ "backend_info": "FastAPI on Hugging Face Spaces"
33
+ }
34
 
35
+ # Health check endpoint
36
  @app.get("/health")
37
  async def health_check():
38
  return {"status": "ok", "message": "Mental Health Counselor API is running"}
39
 
40
+ # Metadata endpoint
41
+ @app.get("/metadata")
42
+ async def get_metadata():
43
  return {
44
+ "api_version": "1.0.0",
 
45
  "endpoints": [
46
+ "/",
47
  "/health",
48
+ "/metadata"
49
+ ],
50
+ "provider": "Mental Health Counselor API on Hugging Face Spaces",
51
+ "deployment_type": "Hugging Face Spaces Docker",
52
+ "description": "This API provides functionality for a mental health counseling application.",
53
+ "frontend": "Deployed separately on Vercel"
54
  }
55
 
56
+ # Try to import the full API if available
57
  try:
58
+ import api_mental_health
59
+ # If the import succeeds, try to add those routes
60
+ logger.info("Successfully imported full API module")
61
+
62
+ # Add a placeholder for full functionality
63
+ @app.get("/full-api-status")
64
+ async def full_api_status():
65
+ return {
66
+ "status": "imported",
67
+ "message": "Full API module was imported successfully, but endpoints may require additional setup"
68
+ }
69
+ except ImportError as e:
70
+ logger.warning(f"Could not import full API module: {e}")
71
+
72
+ @app.get("/full-api-status")
73
+ async def full_api_status():
74
+ return {
75
+ "status": "unavailable",
76
+ "message": "Full API module could not be imported",
77
+ "error": str(e)
78
+ }
requirements.txt CHANGED
@@ -1,12 +1,12 @@
1
  # Core dependencies for data processing and ML
2
- pandas
3
- numpy
4
- scikit-learn # Used for ML models
5
- joblib
6
 
7
  # NLP and sentiment analysis
8
- nltk
9
- vaderSentiment
10
 
11
  # Dataset downloading
12
  kagglehub
@@ -17,21 +17,21 @@ openai
17
  langchain
18
  langchain-openai
19
  langchain-chroma
20
- httpx # Required for API calls
21
 
22
  # API and tracing
23
- fastapi
24
- uvicorn
25
- pydantic
26
  langsmith
27
- python-dotenv
28
  lightgbm
29
 
30
  # New dependencies for additional features
31
- python-multipart # For file uploads
32
  fastapi-cors # For CORS support
33
- aiofiles # For async file operations
34
- jinja2 # For template rendering
35
  python-jose[cryptography] # For JWT tokens (authentication)
36
  passlib[bcrypt] # For password hashing
37
  pydub # For audio processing
 
1
  # Core dependencies for data processing and ML
2
+ pandas==2.0.3
3
+ numpy==1.24.3
4
+ scikit-learn==1.2.2
5
+ joblib==1.3.1
6
 
7
  # NLP and sentiment analysis
8
+ nltk==3.8.1
9
+ vaderSentiment==3.3.2
10
 
11
  # Dataset downloading
12
  kagglehub
 
17
  langchain
18
  langchain-openai
19
  langchain-chroma
20
+ httpx==0.24.1
21
 
22
  # API and tracing
23
+ fastapi==0.95.2
24
+ uvicorn[standard]==0.22.0
25
+ pydantic==1.10.8
26
  langsmith
27
+ python-dotenv==1.0.0
28
  lightgbm
29
 
30
  # New dependencies for additional features
31
+ python-multipart==0.0.6
32
  fastapi-cors # For CORS support
33
+ aiofiles==23.1.0
34
+ jinja2==3.1.2
35
  python-jose[cryptography] # For JWT tokens (authentication)
36
  passlib[bcrypt] # For password hashing
37
  pydub # For audio processing