Spaces:
Sleeping
Sleeping
Pavan Naik
commited on
Commit
·
d12d397
1
Parent(s):
000b4ac
Use HF pipeline
Browse files- Makefile +1 -1
- recipe_genie.egg-info/PKG-INFO +9 -0
- recipe_genie.egg-info/SOURCES.txt +12 -0
- recipe_genie.egg-info/dependency_links.txt +1 -0
- recipe_genie.egg-info/requires.txt +4 -0
- recipe_genie.egg-info/top_level.txt +2 -0
- requirements.txt +17 -4
- setup.py +6 -3
- src/__init__.py +0 -0
- src/__pycache__/__init__.cpython-312.pyc +0 -0
- src/__pycache__/app.cpython-312.pyc +0 -0
- src/app.py +30 -58
- tests/__init__.py +0 -0
- tests/__pycache__/__init__.cpython-312.pyc +0 -0
- tests/__pycache__/test_app.cpython-312-pytest-8.3.4.pyc +0 -0
- tests/test_app.py +34 -0
Makefile
CHANGED
|
@@ -9,7 +9,7 @@ lint:
|
|
| 9 |
mypy src tests
|
| 10 |
|
| 11 |
test:
|
| 12 |
-
pytest tests/ -
|
| 13 |
|
| 14 |
clean:
|
| 15 |
rm -rf build/
|
|
|
|
| 9 |
mypy src tests
|
| 10 |
|
| 11 |
test:
|
| 12 |
+
pytest tests/ -vv --cov=src/ --cov-report=term-missing
|
| 13 |
|
| 14 |
clean:
|
| 15 |
rm -rf build/
|
recipe_genie.egg-info/PKG-INFO
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.2
|
| 2 |
+
Name: recipe-genie
|
| 3 |
+
Version: 0.1.0
|
| 4 |
+
License-File: LICENSE
|
| 5 |
+
Requires-Dist: gradio
|
| 6 |
+
Requires-Dist: transformers
|
| 7 |
+
Requires-Dist: torch
|
| 8 |
+
Requires-Dist: huggingface-hub
|
| 9 |
+
Dynamic: requires-dist
|
recipe_genie.egg-info/SOURCES.txt
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LICENSE
|
| 2 |
+
README.md
|
| 3 |
+
setup.py
|
| 4 |
+
recipe_genie.egg-info/PKG-INFO
|
| 5 |
+
recipe_genie.egg-info/SOURCES.txt
|
| 6 |
+
recipe_genie.egg-info/dependency_links.txt
|
| 7 |
+
recipe_genie.egg-info/requires.txt
|
| 8 |
+
recipe_genie.egg-info/top_level.txt
|
| 9 |
+
src/__init__.py
|
| 10 |
+
src/app.py
|
| 11 |
+
tests/__init__.py
|
| 12 |
+
tests/test_app.py
|
recipe_genie.egg-info/dependency_links.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
recipe_genie.egg-info/requires.txt
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
gradio
|
| 2 |
+
transformers
|
| 3 |
+
torch
|
| 4 |
+
huggingface-hub
|
recipe_genie.egg-info/top_level.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
src
|
| 2 |
+
tests
|
requirements.txt
CHANGED
|
@@ -1,4 +1,17 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
huggingface-hub
|
| 2 |
+
transformers
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
# Linting and formatting
|
| 6 |
+
black>=23.7.0
|
| 7 |
+
flake8>=6.1.0
|
| 8 |
+
pylint>=3.0.0
|
| 9 |
+
mypy>=1.5.0
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
ipython>=8.12.0
|
| 13 |
+
|
| 14 |
+
# Testing
|
| 15 |
+
pytest>=7.4.0
|
| 16 |
+
pytest-cov>=4.1.0
|
| 17 |
+
pytest-mock
|
setup.py
CHANGED
|
@@ -1,10 +1,13 @@
|
|
| 1 |
from setuptools import setup, find_packages
|
| 2 |
|
| 3 |
setup(
|
| 4 |
-
name="
|
| 5 |
-
version="0.1",
|
| 6 |
packages=find_packages(),
|
| 7 |
install_requires=[
|
| 8 |
-
|
|
|
|
|
|
|
|
|
|
| 9 |
],
|
| 10 |
)
|
|
|
|
| 1 |
from setuptools import setup, find_packages
|
| 2 |
|
| 3 |
setup(
|
| 4 |
+
name="recipe-genie",
|
| 5 |
+
version="0.1.0",
|
| 6 |
packages=find_packages(),
|
| 7 |
install_requires=[
|
| 8 |
+
"gradio",
|
| 9 |
+
"transformers",
|
| 10 |
+
"torch",
|
| 11 |
+
"huggingface-hub"
|
| 12 |
],
|
| 13 |
)
|
src/__init__.py
ADDED
|
File without changes
|
src/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (137 Bytes). View file
|
|
|
src/__pycache__/app.cpython-312.pyc
ADDED
|
Binary file (1.59 kB). View file
|
|
|
src/app.py
CHANGED
|
@@ -1,73 +1,45 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
-
from
|
|
|
|
|
|
|
| 3 |
|
| 4 |
-
|
| 5 |
-
llm = VLLM(
|
| 6 |
-
model="meta-llama/Llama-3.2-1B",
|
| 7 |
-
trust_remote_code=True,
|
| 8 |
-
max_new_tokens=512,
|
| 9 |
-
top_k=10,
|
| 10 |
-
top_p=0.95,
|
| 11 |
-
temperature=0.8,
|
| 12 |
-
)
|
| 13 |
-
|
| 14 |
-
# Recipe prompt template from previous example
|
| 15 |
-
RECIPE_PROMPT = """You are a skilled chef and culinary expert. Create a detailed recipe for {dish_name} with the following format:
|
| 16 |
-
|
| 17 |
-
Recipe: {dish_name}
|
| 18 |
-
|
| 19 |
-
INGREDIENTS: List each ingredient with exact quantity and estimated price (USD)
|
| 20 |
-
|
| 21 |
-
NUTRITIONAL INFO:
|
| 22 |
-
1. Total calories
|
| 23 |
-
2. Protein
|
| 24 |
-
3. Carbs
|
| 25 |
-
4. Fat
|
| 26 |
-
5. Serving size
|
| 27 |
-
|
| 28 |
-
COOKING INSTRUCTIONS:
|
| 29 |
-
1. Step-by-step numbered process
|
| 30 |
-
2. Include cooking temperatures and times
|
| 31 |
-
3. Note any specific techniques or tips
|
| 32 |
-
|
| 33 |
-
CULTURAL BACKGROUND:
|
| 34 |
-
1. Origin of dish
|
| 35 |
-
2. Traditional serving occasions
|
| 36 |
-
3. Cultural significance
|
| 37 |
-
4. Regional variations
|
| 38 |
|
| 39 |
-
PREPARATION TIME:
|
| 40 |
-
1. Prep time
|
| 41 |
-
2. Cooking time
|
| 42 |
-
3. Total time
|
| 43 |
-
|
| 44 |
-
DIFFICULTY LEVEL: [Easy/Medium/Hard]
|
| 45 |
-
|
| 46 |
-
TOOLS NEEDED: List essential kitchen equipment
|
| 47 |
-
|
| 48 |
-
TIPS:
|
| 49 |
-
1. Storage recommendations
|
| 50 |
-
2. Substitution options
|
| 51 |
-
3. Serving suggestions
|
| 52 |
-
|
| 53 |
-
"""
|
| 54 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 55 |
|
| 56 |
def generate_recipe(dish_name):
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 61 |
|
| 62 |
-
# Gradio interface
|
| 63 |
demo = gr.Interface(
|
| 64 |
fn=generate_recipe,
|
| 65 |
inputs=gr.Textbox(label="Enter dish name"),
|
| 66 |
outputs=gr.Textbox(label="Generated Recipe", lines=20),
|
| 67 |
-
title="
|
| 68 |
-
description="AI-powered recipe generator
|
| 69 |
-
examples=["Pad Thai", "Butter Chicken", "Paella"],
|
| 70 |
)
|
| 71 |
|
| 72 |
if __name__ == "__main__":
|
| 73 |
demo.launch()
|
|
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
+
from transformers import pipeline
|
| 3 |
+
import os
|
| 4 |
+
from huggingface_hub import login
|
| 5 |
|
| 6 |
+
login(token=os.getenv('HF_TOKEN'))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
+
def setup_pipeline():
|
| 10 |
+
# return pipeline(
|
| 11 |
+
# "text-generation",
|
| 12 |
+
# model="meta-llama/Llama-3.2-1B", # Smaller model suitable for CPU
|
| 13 |
+
# device=-1 # Force CPU
|
| 14 |
+
# )
|
| 15 |
+
return None
|
| 16 |
|
| 17 |
def generate_recipe(dish_name):
|
| 18 |
+
if not dish_name:
|
| 19 |
+
return "Please enter a dish name"
|
| 20 |
+
|
| 21 |
+
try:
|
| 22 |
+
|
| 23 |
+
prompt = f"""Create a recipe for {dish_name} including:
|
| 24 |
+
- Ingredients with quantities
|
| 25 |
+
- Steps to cook
|
| 26 |
+
- Cultural background"""
|
| 27 |
+
|
| 28 |
+
result = generator(prompt, max_length=500, num_return_sequences=1)
|
| 29 |
+
return result[0]['generated_text']
|
| 30 |
+
except Exception as e:
|
| 31 |
+
return f"Error: {str(e)}"
|
| 32 |
+
|
| 33 |
+
generator = setup_pipeline()
|
| 34 |
|
|
|
|
| 35 |
demo = gr.Interface(
|
| 36 |
fn=generate_recipe,
|
| 37 |
inputs=gr.Textbox(label="Enter dish name"),
|
| 38 |
outputs=gr.Textbox(label="Generated Recipe", lines=20),
|
| 39 |
+
title="RecipeGenie",
|
| 40 |
+
description="AI-powered recipe generator"
|
|
|
|
| 41 |
)
|
| 42 |
|
| 43 |
if __name__ == "__main__":
|
| 44 |
demo.launch()
|
| 45 |
+
|
tests/__init__.py
ADDED
|
File without changes
|
tests/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (139 Bytes). View file
|
|
|
tests/__pycache__/test_app.cpython-312-pytest-8.3.4.pyc
ADDED
|
Binary file (408 Bytes). View file
|
|
|
tests/test_app.py
CHANGED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
from unittest.mock import Mock, patch
|
| 3 |
+
from src.app import generate_recipe, setup_pipeline
|
| 4 |
+
|
| 5 |
+
# @pytest.fixture
|
| 6 |
+
# def mock_pipeline():
|
| 7 |
+
# with patch('src.app.pipeline') as mock:
|
| 8 |
+
# mock_generator = Mock()
|
| 9 |
+
# mock_generator.return_value = [{'generated_text': 'Test recipe output'}]
|
| 10 |
+
# mock.return_value = mock_generator
|
| 11 |
+
# yield mock
|
| 12 |
+
|
| 13 |
+
# def test_empty_input():
|
| 14 |
+
# result = generate_recipe("")
|
| 15 |
+
# assert "Please enter a dish name" in result
|
| 16 |
+
|
| 17 |
+
# def test_generate_recipe_success(mock_pipeline):
|
| 18 |
+
# result = generate_recipe("Pasta")
|
| 19 |
+
# assert isinstance(result, str)
|
| 20 |
+
# assert "Test recipe output" in result
|
| 21 |
+
|
| 22 |
+
# def test_generate_recipe_exception():
|
| 23 |
+
# with patch('src.app.generator', side_effect=Exception("Test error")):
|
| 24 |
+
# result = generate_recipe("Pasta")
|
| 25 |
+
# assert "Error:" in result
|
| 26 |
+
|
| 27 |
+
# def test_pipeline_creation():
|
| 28 |
+
# with patch('src.app.pipeline') as mock_pipeline:
|
| 29 |
+
# setup_pipeline()
|
| 30 |
+
# mock_pipeline.assert_called_once_with(
|
| 31 |
+
# "text-generation",
|
| 32 |
+
# model="meta-llama/Llama-3.2-1B",
|
| 33 |
+
# device=-1
|
| 34 |
+
# )
|