Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,24 +2,6 @@ import gradio as gr
|
|
2 |
import torch
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
import datetime
|
5 |
-
from simple_salesforce import Salesforce
|
6 |
-
import os
|
7 |
-
import uuid
|
8 |
-
import tempfile
|
9 |
-
|
10 |
-
# Salesforce configuration
|
11 |
-
SF_USERNAME = os.getenv('SF_USERNAME', 'your_salesforce_username')
|
12 |
-
SF_PASSWORD = os.getenv('SF_PASSWORD', 'your_salesforce_password')
|
13 |
-
SF_SECURITY_TOKEN = os.getenv('SF_SECURITY_TOKEN', 'your_salesforce_security_token')
|
14 |
-
SF_DOMAIN = 'login' # Use 'test' for sandbox or 'login' for production
|
15 |
-
|
16 |
-
# Initialize Salesforce connection
|
17 |
-
sf = Salesforce(
|
18 |
-
username=SF_USERNAME,
|
19 |
-
password=SF_PASSWORD,
|
20 |
-
security_token=SF_SECURITY_TOKEN,
|
21 |
-
domain=SF_DOMAIN
|
22 |
-
)
|
23 |
|
24 |
# Initialize model and tokenizer (preloading them for quicker response)
|
25 |
model_name = "distilgpt2"
|
@@ -114,41 +96,6 @@ def generate_outputs(role, project_id, milestones, reflection):
|
|
114 |
quote_start = generated_text.find("Quote:") + len("Quote:")
|
115 |
quote = generated_text[quote_start:].strip()
|
116 |
|
117 |
-
# Generate a file with the processed output
|
118 |
-
output_content = f"""Checklist:
|
119 |
-
{checklist}
|
120 |
-
|
121 |
-
Suggestions:
|
122 |
-
{suggestions}
|
123 |
-
|
124 |
-
Quote:
|
125 |
-
{quote}
|
126 |
-
"""
|
127 |
-
# Create a temporary file
|
128 |
-
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.txt', mode='w', encoding='utf-8')
|
129 |
-
temp_file.write(output_content)
|
130 |
-
temp_file.close()
|
131 |
-
|
132 |
-
# Simulate a download URL (in production, upload to a file hosting service like Salesforce Content or AWS S3)
|
133 |
-
file_name = f"supervisor_coaching_{uuid.uuid4()}.txt"
|
134 |
-
download_url = f"/tmp/{file_name}" # Placeholder URL; replace with actual file hosting URL in production
|
135 |
-
os.rename(temp_file.name, os.path.join(tempfile.gettempdir(), file_name))
|
136 |
-
|
137 |
-
# Save to Salesforce Supervisor_AI_Coaching__c object
|
138 |
-
try:
|
139 |
-
sf.Supervisor_AI_Coaching__c.create({
|
140 |
-
'Role__c': role,
|
141 |
-
'Project_ID__c': project_id,
|
142 |
-
'Milestones__c': milestones,
|
143 |
-
'Reflection__c': reflection,
|
144 |
-
'Checklist__c': checklist,
|
145 |
-
'Suggestions__c': suggestions,
|
146 |
-
'Quote__c': quote,
|
147 |
-
'Download_Link__c': download_url
|
148 |
-
})
|
149 |
-
except Exception as e:
|
150 |
-
print(f"Error saving to Salesforce: {str(e)}")
|
151 |
-
|
152 |
# Return structured outputs
|
153 |
return checklist, suggestions, quote
|
154 |
|
@@ -188,4 +135,4 @@ def create_interface():
|
|
188 |
|
189 |
if __name__ == "__main__":
|
190 |
demo = create_interface()
|
191 |
-
demo.launch()
|
|
|
2 |
import torch
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
import datetime
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
# Initialize model and tokenizer (preloading them for quicker response)
|
7 |
model_name = "distilgpt2"
|
|
|
96 |
quote_start = generated_text.find("Quote:") + len("Quote:")
|
97 |
quote = generated_text[quote_start:].strip()
|
98 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
99 |
# Return structured outputs
|
100 |
return checklist, suggestions, quote
|
101 |
|
|
|
135 |
|
136 |
if __name__ == "__main__":
|
137 |
demo = create_interface()
|
138 |
+
demo.launch()
|