Spaces:
				
			
			
	
			
			
					
		Running
		
	
	
	
			
			
	
	
	
	
		
		
					
		Running
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -614,10 +614,15 @@ class DataQualityPipeline: | |
| 614 | 
             
            # ์์คํ
 ์ธ์คํด์ค ์์ฑ
         | 
| 615 | 
             
            llm_system = LLMCollaborativeSystem()
         | 
| 616 |  | 
| 617 | 
            -
             | 
|  | |
|  | |
|  | |
| 618 | 
             
                """์คํธ๋ฆฌ๋ฐ์ ์ง์ํ๋ ์ฟผ๋ฆฌ ์ฒ๋ฆฌ"""
         | 
|  | |
|  | |
| 619 | 
             
                if not user_query:
         | 
| 620 | 
            -
                    return  | 
| 621 |  | 
| 622 | 
             
                conversation_log = []
         | 
| 623 | 
             
                all_responses = {"supervisor": [], "researcher": [], "executor": []}
         | 
| @@ -634,7 +639,7 @@ def process_query_streaming(user_query: str, history: List): | |
| 634 | 
             
                    ):
         | 
| 635 | 
             
                        supervisor_initial_response += chunk
         | 
| 636 | 
             
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_initial_response}"
         | 
| 637 | 
            -
                        yield  | 
| 638 |  | 
| 639 | 
             
                    all_responses["supervisor"].append(supervisor_initial_response)
         | 
| 640 |  | 
| @@ -644,7 +649,7 @@ def process_query_streaming(user_query: str, history: List): | |
| 644 |  | 
| 645 | 
             
                    # 2๋จ๊ณ: ๋ธ๋ ์ด๋ธ ๊ฒ์ ์ํ
         | 
| 646 | 
             
                    researcher_text = "[์น ๊ฒ์] ๐ ๊ฒ์ ์ค...\n"
         | 
| 647 | 
            -
                    yield  | 
| 648 |  | 
| 649 | 
             
                    search_results = {}
         | 
| 650 | 
             
                    for keyword in keywords:
         | 
| @@ -652,7 +657,7 @@ def process_query_streaming(user_query: str, history: List): | |
| 652 | 
             
                        if results:
         | 
| 653 | 
             
                            search_results[keyword] = results
         | 
| 654 | 
             
                            researcher_text += f"โ '{keyword}' ๊ฒ์ ์๋ฃ\n"
         | 
| 655 | 
            -
                            yield  | 
| 656 |  | 
| 657 | 
             
                    # 3๋จ๊ณ: ์กฐ์ฌ์ AI๊ฐ ๊ฒ์ ๊ฒฐ๊ณผ ์ ๋ฆฌ
         | 
| 658 | 
             
                    researcher_prompt = llm_system.create_researcher_prompt(user_query, supervisor_initial_response, search_results)
         | 
| @@ -665,7 +670,7 @@ def process_query_streaming(user_query: str, history: List): | |
| 665 | 
             
                    ):
         | 
| 666 | 
             
                        researcher_response += chunk
         | 
| 667 | 
             
                        researcher_text = f"[์กฐ์ฌ ๊ฒฐ๊ณผ ์ ๋ฆฌ] - {datetime.now().strftime('%H:%M:%S')}\n{researcher_response}"
         | 
| 668 | 
            -
                        yield  | 
| 669 |  | 
| 670 | 
             
                    all_responses["researcher"].append(researcher_response)
         | 
| 671 |  | 
| @@ -681,7 +686,7 @@ def process_query_streaming(user_query: str, history: List): | |
| 681 | 
             
                        supervisor_execution_response += chunk
         | 
| 682 | 
             
                        temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์คํ ์ง์] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_execution_response}"
         | 
| 683 | 
             
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
         | 
| 684 | 
            -
                        yield  | 
| 685 |  | 
| 686 | 
             
                    all_responses["supervisor"].append(supervisor_execution_response)
         | 
| 687 |  | 
| @@ -696,7 +701,7 @@ def process_query_streaming(user_query: str, history: List): | |
| 696 | 
             
                    ):
         | 
| 697 | 
             
                        executor_response += chunk
         | 
| 698 | 
             
                        executor_text = f"[์ด๊ธฐ ๊ตฌํ] - {datetime.now().strftime('%H:%M:%S')}\n{executor_response}"
         | 
| 699 | 
            -
                        yield  | 
| 700 |  | 
| 701 | 
             
                    all_responses["executor"].append(executor_response)
         | 
| 702 |  | 
| @@ -720,7 +725,7 @@ def process_query_streaming(user_query: str, history: List): | |
| 720 | 
             
                        review_response += chunk
         | 
| 721 | 
             
                        temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์คํ ์ง์] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฒํ  ๋ฐ ํผ๋๋ฐฑ] - {datetime.now().strftime('%H:%M:%S')}\n{review_response}"
         | 
| 722 | 
             
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
         | 
| 723 | 
            -
                        yield  | 
| 724 |  | 
| 725 | 
             
                    all_responses["supervisor"].append(review_response)
         | 
| 726 |  | 
| @@ -741,7 +746,7 @@ def process_query_streaming(user_query: str, history: List): | |
| 741 | 
             
                        final_executor_response += chunk
         | 
| 742 | 
             
                        temp_text = f"[์ด๊ธฐ ๊ตฌํ] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['executor'][0]}\n\n---\n\n[์ต์ข
 ๋ณด๊ณ ์] - {datetime.now().strftime('%H:%M:%S')}\n{final_executor_response}"
         | 
| 743 | 
             
                        executor_text = temp_text
         | 
| 744 | 
            -
                        yield  | 
| 745 |  | 
| 746 | 
             
                    all_responses["executor"].append(final_executor_response)
         | 
| 747 |  | 
| @@ -779,18 +784,20 @@ def process_query_streaming(user_query: str, history: List): | |
| 779 | 
             
            ---
         | 
| 780 | 
             
            *์ด ๋ณด๊ณ ์๋ ์น ๊ฒ์์ ํตํ ์ต์  ์ ๋ณด์ AI๋ค์ ํ๋ ฅ, ๊ทธ๋ฆฌ๊ณ  ํผ๋๋ฐฑ ๋ฐ์์ ํตํด ์์ฑ๋์์ต๋๋ค.*"""
         | 
| 781 |  | 
| 782 | 
            -
                    # ํ์คํ ๋ฆฌ ์
๋ฐ์ดํธ
         | 
| 783 | 
            -
                     | 
| 784 |  | 
| 785 | 
            -
                    yield  | 
| 786 |  | 
| 787 | 
             
                except Exception as e:
         | 
| 788 | 
             
                    error_msg = f"โ ์ฒ๋ฆฌ ์ค ์ค๋ฅ: {str(e)}"
         | 
| 789 | 
            -
                    yield  | 
| 790 |  | 
| 791 | 
             
            def clear_all():
         | 
| 792 | 
             
                """๋ชจ๋  ๋ด์ฉ ์ด๊ธฐํ"""
         | 
| 793 | 
            -
                 | 
|  | |
|  | |
| 794 |  | 
| 795 | 
             
            # Gradio ์ธํฐํ์ด์ค
         | 
| 796 | 
             
            css = """
         | 
| @@ -833,16 +840,9 @@ with gr.Blocks(title="ํ๋ ฅ์  LLM ์์คํ
", theme=gr.themes.Soft(), css=css) | |
| 833 | 
             
                    """
         | 
| 834 | 
             
                )
         | 
| 835 |  | 
|  | |
| 836 | 
             
                with gr.Row():
         | 
| 837 | 
            -
                     | 
| 838 | 
            -
                    with gr.Column(scale=1):
         | 
| 839 | 
            -
                        chatbot = gr.Chatbot(
         | 
| 840 | 
            -
                            label="๐ฌ ๋ํ ๊ธฐ๋ก",
         | 
| 841 | 
            -
                            height=600,
         | 
| 842 | 
            -
                            show_copy_button=True,
         | 
| 843 | 
            -
                            bubble_full_width=False
         | 
| 844 | 
            -
                        )
         | 
| 845 | 
            -
                        
         | 
| 846 | 
             
                        user_input = gr.Textbox(
         | 
| 847 | 
             
                            label="์ง๋ฌธ ์
๋ ฅ",
         | 
| 848 | 
             
                            placeholder="์: ๊ธฐ๊ณํ์ต ๋ชจ๋ธ์ ์ฑ๋ฅ์ ํฅ์์ํค๋ ๋ฐฉ๋ฒ์?",
         | 
| @@ -859,1026 +859,49 @@ with gr.Blocks(title="ํ๋ ฅ์  LLM ์์คํ
", theme=gr.themes.Soft(), css=css) | |
| 859 | 
             
                            value="๋๊ธฐ ์ค...",
         | 
| 860 | 
             
                            max_lines=1
         | 
| 861 | 
             
                        )
         | 
| 862 | 
            -
             | 
| 863 | 
            -
             | 
| 864 | 
            -
             | 
| 865 | 
            -
             | 
| 866 | 
             
                        with gr.Accordion("๐ ์ต์ข
 ์ข
ํฉ ๊ฒฐ๊ณผ", open=True):
         | 
| 867 | 
             
                            final_output = gr.Markdown(
         | 
| 868 | 
             
                                value="*์ง๋ฌธ์ ์
๋ ฅํ๋ฉด ๊ฒฐ๊ณผ๊ฐ ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค.*"
         | 
| 869 | 
             
                            )
         | 
| 870 | 
            -
                        
         | 
| 871 | 
            -
                        # AI ์ถ๋ ฅ๋ค
         | 
| 872 | 
            -
                        with gr.Row():
         | 
| 873 | 
            -
                            # ๊ฐ๋
์ AI ์ถ๋ ฅ
         | 
| 874 | 
            -
                            with gr.Column():
         | 
| 875 | 
            -
                                gr.Markdown("### ๐ง  ๊ฐ๋
์ AI (๊ฑฐ์์  ๋ถ์)")
         | 
| 876 | 
            -
                                supervisor_output = gr.Textbox(
         | 
| 877 | 
            -
                                    label="",
         | 
| 878 | 
            -
                                    lines=12,
         | 
| 879 | 
            -
                                    max_lines=15,
         | 
| 880 | 
            -
                                    interactive=False,
         | 
| 881 | 
            -
                                    elem_classes=["supervisor-box"]
         | 
| 882 | 
            -
                                )
         | 
| 883 | 
            -
                        
         | 
| 884 | 
            -
                        with gr.Row():
         | 
| 885 | 
            -
                            # ์กฐ์ฌ์ AI ์ถ๋ ฅ
         | 
| 886 | 
            -
                            with gr.Column():
         | 
| 887 | 
            -
                                gr.Markdown("### ๐ ์กฐ์ฌ์ AI (์น ๊ฒ์ & ์ ๋ฆฌ)")
         | 
| 888 | 
            -
                                researcher_output = gr.Textbox(
         | 
| 889 | 
            -
                                    label="",
         | 
| 890 | 
            -
                                    lines=12,
         | 
| 891 | 
            -
                                    max_lines=15,
         | 
| 892 | 
            -
                                    interactive=False,
         | 
| 893 | 
            -
                                    elem_classes=["researcher-box"]
         | 
| 894 | 
            -
                                )
         | 
| 895 | 
            -
                            
         | 
| 896 | 
            -
                            # ์คํ์ AI ์ถ๋ ฅ
         | 
| 897 | 
            -
                            with gr.Column():
         | 
| 898 | 
            -
                                gr.Markdown("### ๐๏ธ ์คํ์ AI (๋ฏธ์์  ๊ตฌํ)")
         | 
| 899 | 
            -
                                executor_output = gr.Textbox(
         | 
| 900 | 
            -
                                    label="",
         | 
| 901 | 
            -
                                    lines=12,
         | 
| 902 | 
            -
                                    max_lines=15,
         | 
| 903 | 
            -
                                    interactive=False,
         | 
| 904 | 
            -
                                    elem_classes=["executor-box"]
         | 
| 905 | 
            -
                                )
         | 
| 906 | 
            -
                
         | 
| 907 | 
            -
                # ์์ 
         | 
| 908 | 
            -
                gr.Examples(
         | 
| 909 | 
            -
                    examples=[
         | 
| 910 | 
            -
                        "๊ธฐ๊ณํ์ต ๋ชจ๋ธ์ ์ฑ๋ฅ์ ํฅ์์ํค๋ ์ต์  ๋ฐฉ๋ฒ์?",
         | 
| 911 | 
            -
                        "2024๋
 ํจ๊ณผ์ ์ธ ํ๋ก์ ํธ ๊ด๋ฆฌ ๋๊ตฌ์ ์ ๋ต์?",
         | 
| 912 | 
            -
                        "์ง์ ๊ฐ๋ฅํ ๋น์ฆ๋์ค ๋ชจ๋ธ์ ์ต์  ํธ๋ ๋๋?",
         | 
| 913 | 
            -
                        "์ต์  ๋ฐ์ดํฐ ์๊ฐํ ๋๊ตฌ์ ๊ธฐ๋ฒ์?",
         | 
| 914 | 
            -
                        "์๊ฒฉ ํ์ ์์ฐ์ฑ์ ๋์ด๋ ๊ฒ์ฆ๋ ๋ฐฉ๋ฒ์?"
         | 
| 915 | 
            -
                    ],
         | 
| 916 | 
            -
                    inputs=user_input,
         | 
| 917 | 
            -
                    label="๐ก ์์  ์ง๋ฌธ"
         | 
| 918 | 
            -
                )
         | 
| 919 | 
            -
                
         | 
| 920 | 
            -
                # ์ด๋ฒคํธ ํธ๋ค๋ฌ
         | 
| 921 | 
            -
                submit_btn.click(
         | 
| 922 | 
            -
                    fn=process_query_streaming,
         | 
| 923 | 
            -
                    inputs=[user_input, chatbot],
         | 
| 924 | 
            -
                    outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
         | 
| 925 | 
            -
                ).then(
         | 
| 926 | 
            -
                    fn=lambda: "",
         | 
| 927 | 
            -
                    outputs=[user_input]
         | 
| 928 | 
            -
                )
         | 
| 929 | 
            -
                
         | 
| 930 | 
            -
                user_input.submit(
         | 
| 931 | 
            -
                    fn=process_query_streaming,
         | 
| 932 | 
            -
                    inputs=[user_input, chatbot],
         | 
| 933 | 
            -
                    outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
         | 
| 934 | 
            -
                ).then(
         | 
| 935 | 
            -
                    fn=lambda: "",
         | 
| 936 | 
            -
                    outputs=[user_input]
         | 
| 937 | 
            -
                )
         | 
| 938 | 
            -
                
         | 
| 939 | 
            -
                clear_btn.click(
         | 
| 940 | 
            -
                    fn=clear_all,
         | 
| 941 | 
            -
                    outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
         | 
| 942 | 
            -
                )
         | 
| 943 | 
            -
                
         | 
| 944 | 
            -
                gr.Markdown(
         | 
| 945 | 
            -
                    """
         | 
| 946 | 
            -
                    ---
         | 
| 947 | 
            -
                    ### ๐ ์ฌ์ฉ ๋ฐฉ๋ฒ
         | 
| 948 | 
            -
                    1. ์ง๋ฌธ์ ์
๋ ฅํ๊ณ  Enter ๋๋ '๋ถ์ ์์' ๋ฒํผ์ ํด๋ฆญํ์ธ์.
         | 
| 949 | 
            -
                    2. 7๋จ๊ณ ํ๋ ฅ ํ๋ก์ธ์ค๊ฐ ์งํ๋ฉ๋๋ค:
         | 
| 950 | 
            -
                       - ๊ฐ๋
์ ์ด๊ธฐ ๋ถ์ โ ์น ๊ฒ์ โ ์กฐ์ฌ ์ ๋ฆฌ โ ์คํ ์ง์ โ ์ด๊ธฐ ๊ตฌํ โ ํผ๋๋ฐฑ โ ์ต์ข
 ๋ณด๊ณ ์
         | 
| 951 | 
            -
                    3. ๊ฐ AI์ ์์
 ๊ณผ์ ์ ์ค์๊ฐ์ผ๋ก ํ์ธํ  ์ ์์ต๋๋ค.
         | 
| 952 | 
            -
                    4. ์ต์ข
 ๋ณด๊ณ ์๊ฐ ์๋จ์ ํ์๋๋ฉฐ, ์ ์ฒด ํ๋ ฅ ๊ณผ์ ์ ์ ์ ์ ์๋ ํํ๋ก ์ ๊ณต๋ฉ๋๋ค.
         | 
| 953 | 
            -
                    
         | 
| 954 | 
            -
                    ### โ๏ธ ํ๊ฒฝ ์ค์ 
         | 
| 955 | 
            -
                    - **LLM API**: `export FRIENDLI_TOKEN="your_token"`
         | 
| 956 | 
            -
                    - **Brave Search API**: `export BAPI_TOKEN="your_brave_api_token"`
         | 
| 957 | 
            -
                    - **ํ
์คํธ ๋ชจ๋**: `export TEST_MODE=true` (API ์์ด ์๋)
         | 
| 958 | 
            -
                    
         | 
| 959 | 
            -
                    ### ๐ API ํค ํ๋
         | 
| 960 | 
            -
                    - Friendli API: [https://friendli.ai](https://friendli.ai)
         | 
| 961 | 
            -
                    - Brave Search API: [https://brave.com/search/api/](https://brave.com/search/api/)
         | 
| 962 | 
            -
                    
         | 
| 963 | 
            -
                    ### ๐ก ํน์ง
         | 
| 964 | 
            -
                    - ์์ ํ ํผ๋๋ฐฑ ๋ฃจํ: ๊ฐ๋
์์ ํผ๋๋ฐฑ์ด ์คํ์์๊ฒ ์ ๋ฌ๋์ด ์ต์ข
 ๊ฐ์ 
         | 
| 965 | 
            -
                    - ์น ๊ฒ์ ๊ธฐ๋ฐ: ์ต์  ์ ๋ณด์ ์ฌ๋ก๋ฅผ ํ์ฉํ ์ค์ฉ์  ๋ต๋ณ
         | 
| 966 | 
            -
                    - ์ ๋ฌธ ๋ณด๊ณ ์ ํ์: ์ค๋ฌด์์ ๋ฐ๋ก ํ์ฉ ๊ฐ๋ฅํ ๊ตฌ์กฐํ๋ ๊ฒฐ๊ณผ๋ฌผ
         | 
| 967 | 
            -
                    """
         | 
| 968 | 
            -
                )
         | 
| 969 | 
            -
             | 
| 970 | 
            -
            if __name__ == "__main__":
         | 
| 971 | 
            -
                app.queue()  # ์คํธ๋ฆฌ๋ฐ์ ์ํ ํ ํ์ฑํ
         | 
| 972 | 
            -
                app.launch(
         | 
| 973 | 
            -
                    server_name="0.0.0.0",
         | 
| 974 | 
            -
                    server_port=7860,
         | 
| 975 | 
            -
                    share=True,
         | 
| 976 | 
            -
                    show_error=True
         | 
| 977 | 
            -
                )import gradio as gr
         | 
| 978 | 
            -
            import os
         | 
| 979 | 
            -
            import json
         | 
| 980 | 
            -
            import requests
         | 
| 981 | 
            -
            from datetime import datetime
         | 
| 982 | 
            -
            import time
         | 
| 983 | 
            -
            from typing import List, Dict, Any, Generator, Tuple
         | 
| 984 | 
            -
            import logging
         | 
| 985 | 
            -
            import re
         | 
| 986 | 
            -
             | 
| 987 | 
            -
            # ๋ก๊น
 ์ค์ 
         | 
| 988 | 
            -
            logging.basicConfig(level=logging.INFO)
         | 
| 989 | 
            -
            logger = logging.getLogger(__name__)
         | 
| 990 | 
            -
             | 
| 991 | 
            -
            # ํ๊ฒฝ ๋ณ์์์ ํ ํฐ ๊ฐ์ ธ์ค๊ธฐ
         | 
| 992 | 
            -
            FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "YOUR_FRIENDLI_TOKEN")
         | 
| 993 | 
            -
            BAPI_TOKEN = os.getenv("BAPI_TOKEN", "YOUR_BRAVE_API_TOKEN")
         | 
| 994 | 
            -
            API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions"
         | 
| 995 | 
            -
            BRAVE_SEARCH_URL = "https://api.search.brave.com/res/v1/web/search"
         | 
| 996 | 
            -
            MODEL_ID = "dep89a2fld32mcm"
         | 
| 997 | 
            -
            TEST_MODE = os.getenv("TEST_MODE", "false").lower() == "true"
         | 
| 998 | 
            -
             | 
| 999 | 
            -
            # ์ ์ญ ๋ณ์
         | 
| 1000 | 
            -
            conversation_history = []
         | 
| 1001 | 
            -
             | 
| 1002 | 
            -
            class LLMCollaborativeSystem:
         | 
| 1003 | 
            -
                def __init__(self):
         | 
| 1004 | 
            -
                    self.token = FRIENDLI_TOKEN
         | 
| 1005 | 
            -
                    self.bapi_token = BAPI_TOKEN
         | 
| 1006 | 
            -
                    self.api_url = API_URL
         | 
| 1007 | 
            -
                    self.brave_url = BRAVE_SEARCH_URL
         | 
| 1008 | 
            -
                    self.model_id = MODEL_ID
         | 
| 1009 | 
            -
                    self.test_mode = TEST_MODE or (self.token == "YOUR_FRIENDLI_TOKEN")
         | 
| 1010 | 
            -
                    
         | 
| 1011 | 
            -
                    if self.test_mode:
         | 
| 1012 | 
            -
                        logger.warning("ํ
์คํธ ๋ชจ๋๋ก ์คํ๋ฉ๋๋ค.")
         | 
| 1013 | 
            -
                    if self.bapi_token == "YOUR_BRAVE_API_TOKEN":
         | 
| 1014 | 
            -
                        logger.warning("Brave API ํ ํฐ์ด ์ค์ ๋์ง ์์์ต๋๋ค.")
         | 
| 1015 | 
            -
                    
         | 
| 1016 | 
            -
                def create_headers(self):
         | 
| 1017 | 
            -
                    """API ํค๋ ์์ฑ"""
         | 
| 1018 | 
            -
                    return {
         | 
| 1019 | 
            -
                        "Authorization": f"Bearer {self.token}",
         | 
| 1020 | 
            -
                        "Content-Type": "application/json"
         | 
| 1021 | 
            -
                    }
         | 
| 1022 | 
            -
                
         | 
| 1023 | 
            -
                def create_brave_headers(self):
         | 
| 1024 | 
            -
                    """Brave API ํค๋ ์์ฑ"""
         | 
| 1025 | 
            -
                    return {
         | 
| 1026 | 
            -
                        "Accept": "application/json",
         | 
| 1027 | 
            -
                        "Accept-Encoding": "gzip",
         | 
| 1028 | 
            -
                        "X-Subscription-Token": self.bapi_token
         | 
| 1029 | 
            -
                    }
         | 
| 1030 | 
            -
                
         | 
| 1031 | 
            -
                def create_supervisor_initial_prompt(self, user_query: str) -> str:
         | 
| 1032 | 
            -
                    """๊ฐ๋
์ AI ์ด๊ธฐ ํ๋กฌํํธ ์์ฑ"""
         | 
| 1033 | 
            -
                    return f"""๋น์ ์ ๊ฑฐ์์  ๊ด์ ์์ ๋ถ์ํ๊ณ  ์ง๋ํ๋ ๊ฐ๋
์ AI์
๋๋ค.
         | 
| 1034 | 
            -
             | 
| 1035 | 
            -
            ์ฌ์ฉ์ ์ง๋ฌธ: {user_query}
         | 
| 1036 | 
            -
             | 
| 1037 | 
            -
            ์ด ์ง๋ฌธ์ ๋ํด:
         | 
| 1038 | 
            -
            1. ์ ์ฒด์ ์ธ ์ ๊ทผ ๋ฐฉํฅ๊ณผ ํ๋ ์์ํฌ๋ฅผ ์ ์ํ์ธ์
         | 
| 1039 | 
            -
            2. ํต์ฌ ์์์ ๊ณ ๋ ค์ฌํญ์ ๊ตฌ์กฐํํ์ฌ ์ค๋ช
ํ์ธ์
         | 
| 1040 | 
            -
            3. ์ด ์ฃผ์ ์ ๋ํด ์กฐ์ฌ๊ฐ ํ์ํ 5-7๊ฐ์ ๊ตฌ์ฒด์ ์ธ ํค์๋๋ ๊ฒ์์ด๋ฅผ ์ ์ํ์ธ์
         | 
| 1041 | 
            -
             | 
| 1042 | 
            -
            ํค์๋๋ ๋ค์ ํ์์ผ๋ก ์ ์ํ์ธ์:
         | 
| 1043 | 
            -
            [๊ฒ์ ํค์๋]: ํค์๋1, ํค์๋2, ํค์๋3, ํค์๋4, ํค์๋5"""
         | 
| 1044 | 
            -
                
         | 
| 1045 | 
            -
                def create_researcher_prompt(self, user_query: str, supervisor_guidance: str, search_results: Dict[str, List[Dict]]) -> str:
         | 
| 1046 | 
            -
                    """์กฐ์ฌ์ AI ํ๋กฌํํธ ์์ฑ"""
         | 
| 1047 | 
            -
                    search_summary = ""
         | 
| 1048 | 
            -
                    for keyword, results in search_results.items():
         | 
| 1049 | 
            -
                        search_summary += f"\n\n**{keyword}์ ๋ํ ๊ฒ์ ๊ฒฐ๊ณผ:**\n"
         | 
| 1050 | 
            -
                        for i, result in enumerate(results[:3], 1):
         | 
| 1051 | 
            -
                            search_summary += f"{i}. {result.get('title', 'N/A')}\n"
         | 
| 1052 | 
            -
                            search_summary += f"   - {result.get('description', 'N/A')}\n"
         | 
| 1053 | 
            -
                            search_summary += f"   - ์ถ์ฒ: {result.get('url', 'N/A')}\n"
         | 
| 1054 | 
            -
                    
         | 
| 1055 | 
            -
                    return f"""๋น์ ์ ์ ๋ณด๋ฅผ ์กฐ์ฌํ๊ณ  ์ ๋ฆฌํ๋ ์กฐ์ฌ์ AI์
๋๋ค.
         | 
| 1056 | 
            -
             | 
| 1057 | 
            -
            ์ฌ์ฉ์ ์ง๋ฌธ: {user_query}
         | 
| 1058 | 
            -
             | 
| 1059 | 
            -
            ๊ฐ๋
์ AI์ ์ง์นจ:
         | 
| 1060 | 
            -
            {supervisor_guidance}
         | 
| 1061 | 
            -
             | 
| 1062 | 
            -
            ๋ธ๋ ์ด๋ธ ๊ฒ์ ๊ฒฐ๊ณผ:
         | 
| 1063 | 
            -
            {search_summary}
         | 
| 1064 | 
            -
             | 
| 1065 | 
            -
            ์ ๊ฒ์ ๊ฒฐ๊ณผ๋ฅผ ๋ฐํ์ผ๋ก:
         | 
| 1066 | 
            -
            1. ๊ฐ ํค์๋๋ณ๋ก ์ค์ํ ์ ๋ณด๋ฅผ ์ ๋ฆฌํ์ธ์
         | 
| 1067 | 
            -
            2. ์ ๋ขฐํ  ์ ์๋ ์ถ์ฒ๋ฅผ ๋ช
์ํ์ธ์
         | 
| 1068 | 
            -
            3. ์คํ์ AI๊ฐ ํ์ฉํ  ์ ์๋ ๊ตฌ์ฒด์ ์ธ ๋ฐ์ดํฐ์ ์ฌ์ค์ ์ถ์ถํ์ธ์
         | 
| 1069 | 
            -
            4. ์ต์  ํธ๋ ๋๋ ์ค์ํ ํต๊ณ๊ฐ ์๋ค๋ฉด ๊ฐ์กฐํ์ธ์"""
         | 
| 1070 | 
            -
                
         | 
| 1071 | 
            -
                def create_supervisor_execution_prompt(self, user_query: str, research_summary: str) -> str:
         | 
| 1072 | 
            -
                    """๊ฐ๋
์ AI์ ์คํ ์ง์ ํ๋กฌํํธ"""
         | 
| 1073 | 
            -
                    return f"""๋น์ ์ ๊ฑฐ์์  ๊ด์ ์์ ๋ถ์ํ๊ณ  ์ง๋ํ๋ ๊ฐ๋
์ AI์
๋๋ค.
         | 
| 1074 | 
            -
             | 
| 1075 | 
            -
            ์ฌ์ฉ์ ์ง๋ฌธ: {user_query}
         | 
| 1076 | 
            -
             | 
| 1077 | 
            -
            ์กฐ์ฌ์ AI๊ฐ ์ ๋ฆฌํ ์กฐ์ฌ ๋ด์ฉ:
         | 
| 1078 | 
            -
            {research_summary}
         | 
| 1079 | 
            -
             | 
| 1080 | 
            -
            ์ ์กฐ์ฌ ๋ด์ฉ์ ๊ธฐ๋ฐ์ผ๋ก ์คํ์ AI์๊ฒ ์์ฃผ ๊ตฌ์ฒด์ ์ธ ์ง์๋ฅผ ๋ด๋ ค์ฃผ์ธ์:
         | 
| 1081 | 
            -
            1. ์กฐ์ฌ๋ ์ ๋ณด๋ฅผ ์ด๋ป๊ฒ ํ์ฉํ ์ง ๋ช
ํํ ์ง์ํ์ธ์
         | 
| 1082 | 
            -
            2. ์คํ ๊ฐ๋ฅํ ๋จ๊ณ๋ณ ์์
์ ๊ตฌ์ฒด์ ์ผ๋ก ์ ์ํ์ธ์
         | 
| 1083 | 
            -
            3. ๊ฐ ๋จ๊ณ์์ ์ฐธ๊ณ ํด์ผ ํ  ์กฐ์ฌ ๋ด์ฉ์ ๋ช
์ํ์ธ์
         | 
| 1084 | 
            -
            4. ์์๋๋ ๊ฒฐ๊ณผ๋ฌผ์ ํํ๋ฅผ ๊ตฌ์ฒด์ ์ผ๋ก ์ค๋ช
ํ์ธ์"""
         | 
| 1085 | 
            -
                
         | 
| 1086 | 
            -
                def create_executor_prompt(self, user_query: str, supervisor_guidance: str, research_summary: str) -> str:
         | 
| 1087 | 
            -
                    """์คํ์ AI ํ๋กฌํํธ ์์ฑ"""
         | 
| 1088 | 
            -
                    return f"""๋น์ ์ ์ธ๋ถ์ ์ธ ๋ด์ฉ์ ๊ตฌํํ๋ ์คํ์ AI์
๋๋ค.
         | 
| 1089 | 
            -
             | 
| 1090 | 
            -
            ์ฌ์ฉ์ ์ง๋ฌธ: {user_query}
         | 
| 1091 | 
            -
             | 
| 1092 | 
            -
            ์กฐ์ฌ์ AI๊ฐ ์ ๋ฆฌํ ์กฐ์ฌ ๋ด์ฉ:
         | 
| 1093 | 
            -
            {research_summary}
         | 
| 1094 | 
            -
             | 
| 1095 | 
            -
            ๊ฐ๋
์ AI์ ๊ตฌ์ฒด์ ์ธ ์ง์:
         | 
| 1096 | 
            -
            {supervisor_guidance}
         | 
| 1097 | 
            -
             | 
| 1098 | 
            -
            ์ ์กฐ์ฌ ๋ด์ฉ๊ณผ ์ง์์ฌํญ์ ๋ฐํ์ผ๋ก:
         | 
| 1099 | 
            -
            1. ์กฐ์ฌ๋ ์ ๋ณด๋ฅผ ์ ๊ทน ํ์ฉํ์ฌ ๊ตฌ์ฒด์ ์ธ ์คํ ๊ณํ์ ์์ฑํ์ธ์
         | 
| 1100 | 
            -
            2. ๊ฐ ๋จ๊ณ๋ณ๋ก ์ฐธ๊ณ ํ ์กฐ์ฌ ๋ด์ฉ์ ๋ช
์ํ์ธ์
         | 
| 1101 | 
            -
            3. ์ค์ ๋ก ์ ์ฉ ๊ฐ๋ฅํ ๊ตฌ์ฒด์ ์ธ ๋ฐฉ๋ฒ๋ก ์ ์ ์ํ์ธ์
         | 
| 1102 | 
            -
            4. ์์๋๋ ์ฑ๊ณผ์ ์ธก์  ๋ฐฉ๋ฒ์ ํฌํจํ์ธ์"""
         | 
| 1103 | 
            -
                
         | 
| 1104 | 
            -
                def create_executor_final_prompt(self, user_query: str, initial_response: str, supervisor_feedback: str, research_summary: str) -> str:
         | 
| 1105 | 
            -
                    """์คํ์ AI ์ต์ข
 ๋ณด๊ณ ์ ํ๋กฌํํธ"""
         | 
| 1106 | 
            -
                    return f"""๋น์ ์ ์ธ๋ถ์ ์ธ ๋ด์ฉ์ ๊ตฌํํ๋ ์คํ์ AI์
๋๋ค.
         | 
| 1107 | 
            -
             | 
| 1108 | 
            -
            ์ฌ์ฉ์ ์ง๋ฌธ: {user_query}
         | 
| 1109 | 
            -
             | 
| 1110 | 
            -
            ์กฐ์ฌ์ AI์ ์กฐ์ฌ ๋ด์ฉ:
         | 
| 1111 | 
            -
            {research_summary}
         | 
| 1112 | 
            -
             | 
| 1113 | 
            -
            ๋น์ ์ ์ด๊ธฐ ๋ต๋ณ:
         | 
| 1114 | 
            -
            {initial_response}
         | 
| 1115 | 
            -
             | 
| 1116 | 
            -
            ๊ฐ๋
์ AI์ ํผ๋๋ฐฑ ๋ฐ ๊ฐ์ ์ฌํญ:
         | 
| 1117 | 
            -
            {supervisor_feedback}
         | 
| 1118 | 
            -
             | 
| 1119 | 
            -
            ์ ํผ๋๋ฐฑ์ ์์ ํ ๋ฐ์ํ์ฌ ์ต์ข
 ๋ณด๊ณ ์๋ฅผ ์์ฑํ์ธ์:
         | 
| 1120 | 
            -
            1. ๊ฐ๋
์์ ๋ชจ๋  ๊ฐ์ ์ฌํญ์ ๋ฐ์ํ์ธ์
         | 
| 1121 | 
            -
            2. ์กฐ์ฌ ๋ด์ฉ์ ๋์ฑ ๊ตฌ์ฒด์ ์ผ๋ก ํ์ฉํ์ธ์
         | 
| 1122 | 
            -
            3. ์คํ ๊ฐ๋ฅ์ฑ์ ๋์ด๋ ์ธ๋ถ ๊ณํ์ ํฌํจํ์ธ์
         | 
| 1123 | 
            -
            4. ๋ช
ํํ ๊ฒฐ๋ก ๊ณผ ๋ค์ ๋จ๊ณ๋ฅผ ์ ์ํ์ธ์
         | 
| 1124 | 
            -
            5. ์ ๋ฌธ์ ์ด๊ณ  ์์ฑ๋ ๋์ ์ต์ข
 ๋ณด๊ณ ์ ํ์์ผ๋ก ์์ฑํ์ธ์"""
         | 
| 1125 |  | 
| 1126 | 
            -
                 | 
| 1127 | 
            -
             | 
| 1128 | 
            -
                     | 
| 1129 | 
            -
                    
         | 
| 1130 | 
            -
             | 
| 1131 | 
            -
             | 
| 1132 | 
            -
             | 
| 1133 | 
            -
             | 
| 1134 | 
            -
             | 
|  | |
|  | |
|  | |
| 1135 |  | 
| 1136 | 
            -
                    #  | 
| 1137 | 
            -
                     | 
| 1138 | 
            -
                         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 1139 |  | 
| 1140 | 
            -
                     | 
| 1141 | 
            -
             | 
| 1142 | 
            -
             | 
| 1143 | 
            -
             | 
| 1144 | 
            -
             | 
| 1145 | 
            -
             | 
| 1146 | 
            -
             | 
| 1147 | 
            -
                            {
         | 
| 1148 | 
            -
                                "title": f"Best Practices for {query}",
         | 
| 1149 | 
            -
                                "description": f"Comprehensive guide on implementing {query} with proven methodologies and real-world examples.",
         | 
| 1150 | 
            -
                                "url": f"https://example.com/{query.replace(' ', '-')}"
         | 
| 1151 | 
            -
                            },
         | 
| 1152 | 
            -
                            {
         | 
| 1153 | 
            -
                                "title": f"Latest Trends in {query}",
         | 
| 1154 | 
            -
                                "description": f"Analysis of current trends and future directions in {query}, including market insights and expert opinions.",
         | 
| 1155 | 
            -
                                "url": f"https://trends.example.com/{query.replace(' ', '-')}"
         | 
| 1156 | 
            -
                            },
         | 
| 1157 | 
            -
                            {
         | 
| 1158 | 
            -
                                "title": f"{query}: Case Studies and Success Stories",
         | 
| 1159 | 
            -
                                "description": f"Real-world implementations of {query} across various industries with measurable results.",
         | 
| 1160 | 
            -
                                "url": f"https://casestudies.example.com/{query.replace(' ', '-')}"
         | 
| 1161 | 
            -
                            }
         | 
| 1162 | 
            -
                        ]
         | 
| 1163 | 
            -
                    
         | 
| 1164 | 
            -
                    try:
         | 
| 1165 | 
            -
                        params = {
         | 
| 1166 | 
            -
                            "q": query,
         | 
| 1167 | 
            -
                            "count": 5,
         | 
| 1168 | 
            -
                            "safesearch": "moderate",
         | 
| 1169 | 
            -
                            "freshness": "pw"  # Past week for recent results
         | 
| 1170 | 
            -
                        }
         | 
| 1171 | 
            -
                        
         | 
| 1172 | 
            -
                        response = requests.get(
         | 
| 1173 | 
            -
                            self.brave_url,
         | 
| 1174 | 
            -
                            headers=self.create_brave_headers(),
         | 
| 1175 | 
            -
                            params=params,
         | 
| 1176 | 
            -
                            timeout=10
         | 
| 1177 | 
            -
                        )
         | 
| 1178 | 
            -
                        
         | 
| 1179 | 
            -
                        if response.status_code == 200:
         | 
| 1180 | 
            -
                            data = response.json()
         | 
| 1181 | 
            -
                            results = []
         | 
| 1182 | 
            -
                            for item in data.get("web", {}).get("results", [])[:5]:
         | 
| 1183 | 
            -
                                results.append({
         | 
| 1184 | 
            -
                                    "title": item.get("title", ""),
         | 
| 1185 | 
            -
                                    "description": item.get("description", ""),
         | 
| 1186 | 
            -
                                    "url": item.get("url", "")
         | 
| 1187 | 
            -
                                })
         | 
| 1188 | 
            -
                            return results
         | 
| 1189 | 
            -
                        else:
         | 
| 1190 | 
            -
                            logger.error(f"Brave API ์ค๋ฅ: {response.status_code}")
         | 
| 1191 | 
            -
                            return []
         | 
| 1192 | 
            -
                            
         | 
| 1193 | 
            -
                    except Exception as e:
         | 
| 1194 | 
            -
                        logger.error(f"Brave ๊ฒ์ ์ค ์ค๋ฅ: {str(e)}")
         | 
| 1195 | 
            -
                        return []
         | 
| 1196 | 
            -
                
         | 
| 1197 | 
            -
                def simulate_streaming(self, text: str, role: str) -> Generator[str, None, None]:
         | 
| 1198 | 
            -
                    """ํ
์คํธ ๋ชจ๋์์ ์คํธ๋ฆฌ๋ฐ ์๋ฎฌ๋ ์ด์
"""
         | 
| 1199 | 
            -
                    words = text.split()
         | 
| 1200 | 
            -
                    for i in range(0, len(words), 3):
         | 
| 1201 | 
            -
                        chunk = " ".join(words[i:i+3])
         | 
| 1202 | 
            -
                        yield chunk + " "
         | 
| 1203 | 
            -
                        time.sleep(0.05)
         | 
| 1204 | 
            -
                
         | 
| 1205 | 
            -
                def call_llm_streaming(self, messages: List[Dict[str, str]], role: str) -> Generator[str, None, None]:
         | 
| 1206 | 
            -
                    """์คํธ๋ฆฌ๋ฐ LLM API ํธ์ถ"""
         | 
| 1207 | 
            -
                    
         | 
| 1208 | 
            -
                    # ํ
์คํธ ๋ชจ๋
         | 
| 1209 | 
            -
                    if self.test_mode:
         | 
| 1210 | 
            -
                        logger.info(f"ํ
์คํธ ๋ชจ๋ ์คํธ๋ฆฌ๋ฐ - Role: {role}")
         | 
| 1211 | 
            -
                        test_responses = {
         | 
| 1212 | 
            -
                            "supervisor_initial": """์ด ์ง๋ฌธ์ ๋ํ ๊ฑฐ์์  ๋ถ์์ ์ ์ํ๊ฒ ์ต๋๋ค.
         | 
| 1213 | 
            -
             | 
| 1214 | 
            -
            1. **ํต์ฌ ๊ฐ๋
 ํ์
**
         | 
| 1215 | 
            -
               - ์ง๋ฌธ์ ๋ณธ์ง์  ์์๋ฅผ ์ฌ์ธต ๋ถ์ํฉ๋๋ค
         | 
| 1216 | 
            -
               - ๊ด๋ จ๋ ์ฃผ์ ์ด๋ก ๊ณผ ์์น์ ๊ฒํ ํฉ๋๋ค
         | 
| 1217 | 
            -
               - ๋ค์ํ ๊ด์ ์์์ ์ ๊ทผ ๋ฐฉ๋ฒ์ ๊ณ ๋ คํฉ๋๋ค
         | 
| 1218 | 
            -
             | 
| 1219 | 
            -
            2. **์ ๋ต์  ์ ๊ทผ ๋ฐฉํฅ**
         | 
| 1220 | 
            -
               - ์ฒด๊ณ์ ์ด๊ณ  ๋จ๊ณ๋ณ ํด๊ฒฐ ๋ฐฉ์์ ์๋ฆฝํฉ๋๋ค
         | 
| 1221 | 
            -
               - ์ฅ๋จ๊ธฐ ๋ชฉํ๋ฅผ ๋ช
ํํ ์ค์ ํฉ๋๋ค
         | 
| 1222 | 
            -
               - ๋ฆฌ์คํฌ ์์ธ๊ณผ ๋์ ๋ฐฉ์์ ๋ง๋ จํฉ๋๋ค
         | 
| 1223 | 
            -
             | 
| 1224 | 
            -
            3. **๊ธฐ๋ ํจ๊ณผ์ ๊ณผ์ **
         | 
| 1225 | 
            -
               - ์์๋๋ ๊ธ์ ์  ์ฑ๊ณผ๋ฅผ ๋ถ์ํฉ๋๋ค
         | 
| 1226 | 
            -
               - ์ ์ฌ์  ๋์  ๊ณผ์ ๋ฅผ ์๋ณํฉ๋๋ค
         | 
| 1227 | 
            -
               - ์ง์๊ฐ๋ฅํ ๋ฐ์  ๋ฐฉํฅ์ ์ ์ํฉ๋๋ค
         | 
| 1228 | 
            -
             | 
| 1229 | 
            -
            [๊ฒ์ ํค์๋]: machine learning optimization, performance improvement strategies, model efficiency techniques, hyperparameter tuning best practices, latest ML trends 2024""",
         | 
| 1230 | 
            -
                            
         | 
| 1231 | 
            -
                            "researcher": """์กฐ์ฌ ๊ฒฐ๊ณผ๋ฅผ ์ข
ํฉํ์ฌ ๋ค์๊ณผ ๊ฐ์ด ์ ๋ฆฌํ์ต๋๋ค.
         | 
| 1232 | 
            -
             | 
| 1233 | 
            -
            **1. Machine Learning Optimization**
         | 
| 1234 | 
            -
            - ์ต์  ์ฐ๊ตฌ์ ๋ฐ๋ฅด๋ฉด ๋ชจ๋ธ ์ต์ ํ์ ํต์ฌ์ ์ํคํ
์ฒ ์ค๊ณ์ ํ๋ จ ์ ๋ต์ ๊ท ํ์
๋๋ค
         | 
| 1235 | 
            -
            - AutoML ๋๊ตฌ๋ค์ด ํ์ดํผํ๋ผ๋ฏธํฐ ํ๋์ ์๋ํํ์ฌ ํจ์จ์ฑ์ ํฌ๊ฒ ํฅ์์ํต๋๋ค
         | 
| 1236 | 
            -
            - ์ถ์ฒ: ML Conference 2024, Google Research
         | 
| 1237 | 
            -
             | 
| 1238 | 
            -
            **2. Performance Improvement Strategies**
         | 
| 1239 | 
            -
            - ๋ฐ์ดํฐ ํ์ง ๊ฐ์ ์ด ๋ชจ๋ธ ์ฑ๋ฅ ํฅ์์ 80%๋ฅผ ์ฐจ์งํ๋ค๋ ์ฐ๊ตฌ ๊ฒฐ๊ณผ
         | 
| 1240 | 
            -
            - ์์๋ธ ๊ธฐ๋ฒ๊ณผ ์ ์ดํ์ต์ด ์ฃผ์ ์ฑ๋ฅ ๊ฐ์  ๋ฐฉ๋ฒ์ผ๋ก ์
์ฆ๋จ
         | 
| 1241 | 
            -
            - ๋ฒค์น๋งํฌ: ImageNet์์ 95% ์ด์์ ์ ํ๋ ๋ฌ์ฑ ์ฌ๋ก
         | 
| 1242 | 
            -
             | 
| 1243 | 
            -
            **3. Model Efficiency Techniques**
         | 
| 1244 | 
            -
            - ๋ชจ๋ธ ๊ฒฝ๋ํ(Pruning, Quantization)๋ก ์ถ๋ก  ์๋ 10๋ฐฐ ํฅ์ ๊ฐ๋ฅ
         | 
| 1245 | 
            -
            - Knowledge Distillation์ผ๋ก ๋ชจ๋ธ ํฌ๊ธฐ 90% ๊ฐ์, ์ฑ๋ฅ ์ ์ง
         | 
| 1246 | 
            -
            - ์ต์  ํธ๋ ๋: Efficient Transformers, Neural Architecture Search
         | 
| 1247 | 
            -
             | 
| 1248 | 
            -
            **4. ์ค์  ์ ์ฉ ์ฌ๋ก**
         | 
| 1249 | 
            -
            - Netflix: ์ถ์ฒ ์์คํ
 ๊ฐ์ ์ผ๋ก ์ฌ์ฉ์ ๋ง์กฑ๋ 35% ํฅ์
         | 
| 1250 | 
            -
            - Tesla: ์ค์๊ฐ ๊ฐ์ฒด ์ธ์ ์๋ 50% ๊ฐ์ 
         | 
| 1251 | 
            -
            - OpenAI: GPT ๋ชจ๋ธ ํจ์จ์ฑ ๊ฐ์ ์ผ๋ก ๋น์ฉ 70% ์ ๊ฐ""",
         | 
| 1252 | 
            -
                            
         | 
| 1253 | 
            -
                            "supervisor_execution": """์กฐ์ฌ ๋ด์ฉ์ ๋ฐํ์ผ๋ก ์คํ์ AI์๊ฒ ๋ค์๊ณผ ๊ฐ์ด ๊ตฌ์ฒด์ ์ผ๋ก ์ง์ํฉ๋๋ค.
         | 
| 1254 | 
            -
             | 
| 1255 | 
            -
            **1๋จ๊ณ: ํ์ฌ ๋ชจ๋ธ ์ง๋จ (1์ฃผ์ฐจ)**
         | 
| 1256 | 
            -
            - ์กฐ์ฌ๋ ๋ฒค์น๋งํฌ ๊ธฐ์ค์ผ๋ก ํ์ฌ ๋ชจ๋ธ ์ฑ๋ฅ ํ๊ฐ
         | 
| 1257 | 
            -
            - Netflix ์ฌ๋ก๋ฅผ ์ฐธ๊ณ ํ์ฌ ์ฃผ์ ๋ณ๋ชฉ ์ง์  ์๋ณ
         | 
| 1258 | 
            -
            - AutoML ๋๊ตฌ๋ฅผ ํ์ฉํ ์ด๊ธฐ ์ต์ ํ ๊ฐ๋ฅ์ฑ ํ์
         | 
| 1259 | 
            -
             | 
| 1260 | 
            -
            **2๋จ๊ณ: ๋ฐ์ดํฐ ํ์ง ๊ฐ์  (2-3์ฃผ์ฐจ)**
         | 
| 1261 | 
            -
            - ์กฐ์ฌ ๊ฒฐ๊ณผ์ "80% ๊ท์น"์ ๋ฐ๋ผ ๋ฐ์ดํฐ ์ ์  ์ฐ์  ์คํ
         | 
| 1262 | 
            -
            - ๋ฐ์ดํฐ ์ฆ๊ฐ ๊ธฐ๋ฒ ์ ์ฉ (์กฐ์ฌ๋ ์ต์  ๊ธฐ๋ฒ ํ์ฉ)
         | 
| 1263 | 
            -
            - A/B ํ
์คํธ๋ก ๊ฐ์  ํจ๊ณผ ์ธก์ 
         | 
| 1264 | 
            -
             | 
| 1265 | 
            -
            **3๋จ๊ณ: ๋ชจ๋ธ ์ต์ ํ ๊ตฌํ (4-6์ฃผ์ฐจ)**
         | 
| 1266 | 
            -
            - Knowledge Distillation ์ ์ฉํ์ฌ ๋ชจ๋ธ ๊ฒฝ๋ํ
         | 
| 1267 | 
            -
            - ์กฐ์ฌ๋ Pruning ๊ธฐ๋ฒ์ผ๋ก ์ถ๋ก  ์๋ ๊ฐ์ 
         | 
| 1268 | 
            -
            - Tesla ์ฌ๋ก์ ์ค์๊ฐ ์ฒ๋ฆฌ ์ต์ ํ ๊ธฐ๋ฒ ๋ฒค์น๋งํน
         | 
| 1269 | 
            -
             | 
| 1270 | 
            -
            **4๋จ๊ณ: ์ฑ๊ณผ ๊ฒ์ฆ ๋ฐ ๋ฐฐํฌ (7-8์ฃผ์ฐจ)**
         | 
| 1271 | 
            -
            - OpenAI ์ฌ๋ก์ ๋น์ฉ ์ ๊ฐ ์งํ ์ ์ฉ
         | 
| 1272 | 
            -
            - ์กฐ์ฌ๋ ์ฑ๋ฅ ์งํ๋ก ๊ฐ์ ์จ ์ธก์ 
         | 
| 1273 | 
            -
            - ๋จ๊ณ์  ๋ฐฐํฌ ์ ๋ต ์๋ฆฝ""",
         | 
| 1274 | 
            -
                            
         | 
| 1275 | 
            -
                            "executor": """๊ฐ๋
์์ ์ง์์ ์กฐ์ฌ ๋ด์ฉ์ ๊ธฐ๋ฐ์ผ๋ก ๊ตฌ์ฒด์ ์ธ ์คํ ๊ณํ์ ์๋ฆฝํฉ๋๋ค.
         | 
| 1276 | 
            -
             | 
| 1277 | 
            -
            **1๋จ๊ณ: ํ์ฌ ๋ชจ๋ธ ์ง๋จ (1์ฃผ์ฐจ)**
         | 
| 1278 | 
            -
            - ์์์ผ-ํ์์ผ: MLflow๋ฅผ ์ฌ์ฉํ ํ์ฌ ๋ชจ๋ธ ๋ฉํธ๋ฆญ ์์ง
         | 
| 1279 | 
            -
              * ์กฐ์ฌ ๊ฒฐ๊ณผ ์ฐธ๊ณ : Netflix๊ฐ ์ฌ์ฉํ ํต์ฌ ์งํ (์ ํ๋, ์ง์ฐ์๊ฐ, ์ฒ๋ฆฌ๋)
         | 
| 1280 | 
            -
            - ์์์ผ-๋ชฉ์์ผ: AutoML ๋๊ตฌ (Optuna, Ray Tune) ์ค์  ๋ฐ ์ด๊ธฐ ์คํ
         | 
| 1281 | 
            -
              * ์กฐ์ฌ๋ best practice์ ๋ฐ๋ผ search space ์ ์
         | 
| 1282 | 
            -
            - ๊ธ์์ผ: ์ง๋จ ๋ณด๊ณ ์ ์์ฑ ๋ฐ ๊ฐ์  ์ฐ์ ์์ ๊ฒฐ์ 
         | 
| 1283 | 
            -
             | 
| 1284 | 
            -
            **2๋จ๊ณ: ๋ฐ์ดํฐ ํ์ง ๊ฐ์  (2-3์ฃผ์ฐจ)**
         | 
| 1285 | 
            -
            - ๋ฐ์ดํฐ ์ ์  ํ์ดํ๋ผ์ธ ๊ตฌ์ถ
         | 
| 1286 | 
            -
              * ์กฐ์ฌ ๊ฒฐ๊ณผ์ "80% ๊ท์น" ์ ์ฉ: ๋๋ฝ๊ฐ, ์ด์์น, ๋ ์ด๋ธ ์ค๋ฅ ์ฒ๋ฆฌ
         | 
| 1287 | 
            -
              * ์ฝ๋ ์์: `data_quality_pipeline.py` ๊ตฌํ
         | 
| 1288 | 
            -
            - ๋ฐ์ดํฐ ์ฆ๊ฐ ๊ตฌํ
         | 
| 1289 | 
            -
              * ์ต์  ๊ธฐ๋ฒ ์ ์ฉ: MixUp, CutMix, AutoAugment
         | 
| 1290 | 
            -
              * ๊ฒ์ฆ ๋ฐ์ดํฐ์
์ผ๋ก ํจ๊ณผ ์ธก์  (๋ชฉํ: 15% ์ฑ๋ฅ ํฅ์)
         | 
| 1291 | 
            -
             | 
| 1292 | 
            -
            **3๋จ๊ณ: ๋ชจ๋ธ ์ต์ ํ ๊ตฌํ (4-6์ฃผ์ฐจ)**
         | 
| 1293 | 
            -
            - Knowledge Distillation ๊ตฌํ
         | 
| 1294 | 
            -
              * Teacher ๋ชจ๋ธ: ํ์ฌ ๋๊ท๋ชจ ๋ชจ๋ธ
         | 
| 1295 | 
            -
              * Student ๋ชจ๋ธ: 90% ์์ ํฌ๊ธฐ ๋ชฉํ (์กฐ์ฌ ๊ฒฐ๊ณผ ๊ธฐ๋ฐ)
         | 
| 1296 | 
            -
              * ๊ตฌํ ํ๋ ์์ํฌ: PyTorch/TensorFlow
         | 
| 1297 | 
            -
            - Pruning ๋ฐ Quantization ์ ์ฉ
         | 
| 1298 | 
            -
              * ๊ตฌ์กฐ์  pruning์ผ๋ก 50% ํ๋ผ๋ฏธํฐ ์ ๊ฑฐ
         | 
| 1299 | 
            -
              * INT8 quantization์ผ๋ก ์ถ๊ฐ 4๋ฐฐ ์๋ ํฅ์
         | 
| 1300 | 
            -
              * Tesla ์ฌ๋ก ์ฐธ๊ณ : TensorRT ์ต์ ํ ์ ์ฉ
         | 
| 1301 | 
            -
             | 
| 1302 | 
            -
            **4๋จ๊ณ: ์ฑ๊ณผ ๊ฒ์ฆ ๋ฐ ๋ฐฐํฌ (7-8์ฃผ์ฐจ)**
         | 
| 1303 | 
            -
            - ์ฑ๊ณผ ์งํ ์ธก์ 
         | 
| 1304 | 
            -
              * ์ถ๋ก  ์๋: ๋ชฉํ 10๋ฐฐ ํฅ์ (์กฐ์ฌ ๊ฒฐ๊ณผ ๊ธฐ๋ฐ)
         | 
| 1305 | 
            -
              * ์ ํ๋ ์์ค: ์ต๋ 2% ์ด๋ด ์ ์ง
         | 
| 1306 | 
            -
              * ๋น์ฉ ์ ๊ฐ: 70% ๋ชฉํ (OpenAI ์ฌ๋ก ์ฐธ๊ณ )
         | 
| 1307 | 
            -
            - ๋ฐฐํฌ ์ ๋ต
         | 
| 1308 | 
            -
              * A/B ํ
์คํธ: 10% ํธ๋ํฝ์ผ๋ก ์์
         | 
| 1309 | 
            -
              * ๋ชจ๋ํฐ๋ง: Prometheus + Grafana ๋์๋ณด๋
         | 
| 1310 | 
            -
              * ๋กค๋ฐฑ ๊ณํ: ์ฑ๋ฅ ์ ํ ์ ์๋ ๋กค๋ฐฑ
         | 
| 1311 | 
            -
             | 
| 1312 | 
            -
            **์์ ๊ฒฐ๊ณผ๋ฌผ**
         | 
| 1313 | 
            -
            - ์ต์ ํ๋ ๋ชจ๋ธ (ํฌ๊ธฐ 90% ๊ฐ์, ์๋ 10๋ฐฐ ํฅ์)
         | 
| 1314 | 
            -
            - ์์ธ ์ฑ๋ฅ ๋ฒค์น๋งํฌ ๋ณด๊ณ ์
         | 
| 1315 | 
            -
            - ํ๋ก๋์
 ๋ฐฐํฌ ๊ฐ์ด๋ ๋ฐ ๋ชจ๋ํฐ๋ง ๋์๋ณด๋
         | 
| 1316 | 
            -
            - ์ฌํ ๊ฐ๋ฅํ ์ต์ ํ ํ์ดํ๋ผ์ธ ์ฝ๋""",
         | 
| 1317 | 
            -
                            
         | 
| 1318 | 
            -
                            "supervisor_review": """์คํ์ AI์ ๊ณํ์ ๊ฒํ ํ ๊ฒฐ๊ณผ, ์กฐ์ฌ ๋ด์ฉ์ด ์ ๋ฐ์๋์์ต๋๋ค. ๋ค์๊ณผ ๊ฐ์ ๊ฐ์ ์ฌํญ์ ์ ์ํฉ๋๋ค.
         | 
| 1319 | 
            -
             | 
| 1320 | 
            -
            **๊ฐ์ **
         | 
| 1321 | 
            -
            - ์กฐ์ฌ๋ ์ฌ๋ก๋ค(Netflix, Tesla, OpenAI)์ด ๊ฐ ๋จ๊ณ์ ์ ์ ํ ํ์ฉ๋จ
         | 
| 1322 | 
            -
            - ๊ตฌ์ฒด์ ์ธ ๋๊ตฌ์ ๊ธฐ๋ฒ์ด ๋ช
์๋์ด ์คํ ๊ฐ๋ฅ์ฑ์ด ๋์
         | 
| 1323 | 
            -
            - ์ธก์  ๊ฐ๋ฅํ ๋ชฉํ๊ฐ ์กฐ์ฌ ๊ฒฐ๊ณผ๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์ค์ ๋จ
         | 
| 1324 | 
            -
             | 
| 1325 | 
            -
            **๊ฐ์  ํ์์ฌํญ**
         | 
| 1326 | 
            -
            1. **๋ฆฌ์คํฌ ๊ด๋ฆฌ ๊ฐํ**
         | 
| 1327 | 
            -
               - ๊ฐ ๋จ๊ณ๋ณ ์คํจ ์๋๋ฆฌ์ค์ ๋์ ๋ฐฉ์ ์ถ๊ฐ ํ์
         | 
| 1328 | 
            -
               - ๊ธฐ์ ์  ๋ฌธ์  ๋ฐ์ ์ ๋ฐฑ์
 ๊ณํ ์๋ฆฝ
         | 
| 1329 | 
            -
               
         | 
| 1330 | 
            -
            2. **๋น์ฉ ๋ถ์ ๊ตฌ์ฒดํ**
         | 
| 1331 | 
            -
               - OpenAI ์ฌ๋ก์ 70% ์ ๊ฐ์ ์ํ ๊ตฌ์ฒด์ ์ธ ๋น์ฉ ๊ณ์ฐ
         | 
| 1332 | 
            -
               - ROI ๋ถ์ ๋ฐ ํฌ์ ๋๋น ํจ๊ณผ ์ธก์  ๋ฐฉ๋ฒ
         | 
| 1333 | 
            -
               
         | 
| 1334 | 
            -
            3. **ํ ํ์
 ์ฒด๊ณํ**
         | 
| 1335 | 
            -
               - ๋ฐ์ดํฐ ๊ณผํ์, ML ์์ง๋์ด, DevOps ๊ฐ ์ญํ  ๋ถ๋ด ๋ช
ํํ
         | 
| 1336 | 
            -
               - ์ฃผ๊ฐ ์งํ ์ํฉ ๊ณต์  ๋ฐ ์ด์ ํธ๋ํน ํ๋ก์ธ์ค
         | 
| 1337 | 
            -
             | 
| 1338 | 
            -
            **์ถ๊ฐ ๊ถ์ฅ์ฌํญ**
         | 
| 1339 | 
            -
            - ์ต์  ์ฐ๊ตฌ ๋ํฅ ๋ชจ๋ํฐ๋ง ์ฒด๊ณ ๊ตฌ์ถ
         | 
| 1340 | 
            -
            - ๊ฒฝ์์ฌ ๋ฒค์น๋งํน์ ์ํ ์ ๊ธฐ์ ์ธ ์กฐ์ฌ ํ๋ก์ธ์ค
         | 
| 1341 | 
            -
            - ๋ด๋ถ ์ง์ ๊ณต์ ๋ฅผ ์ํ ๋ฌธ์ํ ๋ฐ ์ธ๋ฏธ๋ ๊ณํ
         | 
| 1342 | 
            -
            - ์คํจ ์ฌ๋ก์์ ๋ฐฐ์ด ๊ตํ์ ์ถ์ ํ๋ ์์คํ
 ๊ตฌ์ถ""",
         | 
| 1343 | 
            -
                            
         | 
| 1344 | 
            -
                            "executor_final": """๊ฐ๋
์ AI์ ํผ๋๋ฐฑ์ ์์ ํ ๋ฐ์ํ์ฌ ์ต์ข
 ์คํ ๋ณด๊ณ ์๋ฅผ ์์ฑํฉ๋๋ค.
         | 
| 1345 | 
            -
             | 
| 1346 | 
            -
            # ๐ฏ ๊ธฐ๊ณํ์ต ๋ชจ๋ธ ์ฑ๋ฅ ํฅ์ ์ต์ข
 ์คํ ๋ณด๊ณ ์
         | 
| 1347 | 
            -
             | 
| 1348 | 
            -
            ## ๐ Executive Summary
         | 
| 1349 | 
            -
            ๋ณธ ๋ณด๊ณ ์๋ ์น ๊ฒ์์ ํตํด ์์ง๋ ์ต์  ์ฌ๋ก์ ๊ฐ๋
์ AI์ ์ ๋ต์  ์ง์นจ์ ๋ฐํ์ผ๋ก, 8์ฃผ๊ฐ์ ์ฒด๊ณ์ ์ธ ๋ชจ๋ธ ์ต์ ํ ํ๋ก์ ํธ๋ฅผ ์ ์ํฉ๋๋ค. ๋ชฉํ๋ ๋ชจ๋ธ ํฌ๊ธฐ 90% ๊ฐ์, ์ถ๋ก  ์๋ 10๋ฐฐ ํฅ์, ์ด์ ๋น์ฉ 70% ์ ๊ฐ์
๋๋ค.
         | 
| 1350 | 
            -
             | 
| 1351 | 
            -
            ## ๐ 1๋จ๊ณ: ํ์ฌ ๋ชจ๋ธ ์ง๋จ ๋ฐ ๋ฒ ์ด์ค๋ผ์ธ ์ค์  (1์ฃผ์ฐจ)
         | 
| 1352 | 
            -
             | 
| 1353 | 
            -
            ### ์คํ ๊ณํ
         | 
| 1354 | 
            -
            **์-ํ์์ผ: ์ฑ๋ฅ ๋ฉํธ๋ฆญ ์์ง**
         | 
| 1355 | 
            -
            - MLflow๋ฅผ ํตํ ํ์ฌ ๋ชจ๋ธ ์ ์ฒด ๋ถ์
         | 
| 1356 | 
            -
            - Netflix ์ฌ๋ก ๊ธฐ๋ฐ ํต์ฌ ์งํ: ์ ํ๋(92%), ์ง์ฐ์๊ฐ(45ms), ์ฒ๋ฆฌ๋(1,000 req/s)
         | 
| 1357 | 
            -
            - ๋ฆฌ์์ค ์ฌ์ฉ๋: GPU ๋ฉ๋ชจ๋ฆฌ 8GB, ์ถ๋ก  ์ CPU ์ฌ์ฉ๋ฅ  85%
         | 
| 1358 | 
            -
             | 
| 1359 | 
            -
            **์-๋ชฉ์์ผ: AutoML ์ด๊ธฐ ํ์**
         | 
| 1360 | 
            -
            - Optuna๋ก ํ์ดํผํ๋ผ๋ฏธํฐ ์ต์ ํ (200ํ ์๋)
         | 
| 1361 | 
            -
            - Ray Tune์ผ๋ก ๋ถ์ฐ ํ์ต ํ๊ฒฝ ๊ตฌ์ถ
         | 
| 1362 | 
            -
            - ์ด๊ธฐ ๊ฐ์  ๊ฐ๋ฅ์ฑ: 15-20% ์ฑ๋ฅ ํฅ์ ์์
         | 
| 1363 | 
            -
             | 
| 1364 | 
            -
            **๊ธ์์ผ: ์ง๋จ ๋ณด๊ณ ์ ๋ฐ ๋ฆฌ์คํฌ ๋ถ์**
         | 
| 1365 | 
            -
            - ์ฃผ์ ๋ณ๋ชฉ: ๋ชจ๋ธ ํฌ๊ธฐ(2.5GB), ๋ฐฐ์น ์ฒ๋ฆฌ ๋นํจ์จ์ฑ
         | 
| 1366 | 
            -
            - ๋ฆฌ์คํฌ: ๋ฐ์ดํฐ ๋๋ฆฌํํธ, ํ๋์จ์ด ์ ์ฝ
         | 
| 1367 | 
            -
            - ๋ฐฑ์
 ๊ณํ: ํด๋ผ์ฐ๋ GPU ์ธ์คํด์ค ํ๋ณด
         | 
| 1368 | 
            -
             | 
| 1369 | 
            -
            ### ์์ ์ฐ์ถ๋ฌผ
         | 
| 1370 | 
            -
            - ์์ธ ์ฑ๋ฅ ๋ฒ ์ด์ค๋ผ์ธ ๋ฌธ์
         | 
| 1371 | 
            -
            - ๊ฐ์  ๊ธฐํ ์ฐ์ ์์ ๋งคํธ๋ฆญ์ค
         | 
| 1372 | 
            -
            - ๋ฆฌ์คํฌ ๋ ์ง์คํฐ ๋ฐ ๋์ ๊ณํ
         | 
| 1373 | 
            -
             | 
| 1374 | 
            -
            ## ๐ 2๋จ๊ณ: ๋ฐ์ดํฐ ํ์ง ๊ฐ์  (2-3์ฃผ์ฐจ)
         | 
| 1375 | 
            -
             | 
| 1376 | 
            -
            ### ์คํ ๊ณํ
         | 
| 1377 | 
            -
            **2์ฃผ์ฐจ: ๋ฐ์ดํฐ ์ ์  ํ์ดํ๋ผ์ธ**
         | 
| 1378 | 
            -
            ```python
         | 
| 1379 | 
            -
            # data_quality_pipeline.py ์ฃผ์ ๊ตฌ์ฑ
         | 
| 1380 | 
            -
            class DataQualityPipeline:
         | 
| 1381 | 
            -
                def __init__(self):
         | 
| 1382 | 
            -
                    self.validators = [
         | 
| 1383 | 
            -
                        MissingValueHandler(threshold=0.05),
         | 
| 1384 | 
            -
                        OutlierDetector(method='isolation_forest'),
         | 
| 1385 | 
            -
                        LabelConsistencyChecker(),
         | 
| 1386 | 
            -
                        DataDriftMonitor()
         | 
| 1387 | 
            -
                    ]
         | 
| 1388 | 
            -
                
         | 
| 1389 | 
            -
                def process(self, data):
         | 
| 1390 | 
            -
                    # 80% ๊ท์น ์ ์ฉ: ๋ฐ์ดํฐ ํ์ง์ด ์ฑ๋ฅ์ 80% ๊ฒฐ์ 
         | 
| 1391 | 
            -
                    for validator in self.validators:
         | 
| 1392 | 
            -
                        data = validator.transform(data)
         | 
| 1393 | 
            -
                        self.log_metrics(validator.get_stats())
         | 
| 1394 | 
            -
                    return data
         | 
| 1395 | 
            -
            ```
         | 
| 1396 | 
            -
             | 
| 1397 | 
            -
            **3์ฃผ์ฐจ: ๊ณ ๊ธ ๋ฐ์ดํฐ ์ฆ๊ฐ**
         | 
| 1398 | 
            -
            - MixUp: 15% ์ ํ๋ ํฅ์ ์์
         | 
| 1399 | 
            -
            - CutMix: ๊ฒฝ๊ณ ๊ฒ์ถ ์ฑ๋ฅ 20% ๊ฐ์ 
         | 
| 1400 | 
            -
            - AutoAugment: ์๋ ์ต์  ์ฆ๊ฐ ์ ์ฑ
 ํ์
         | 
| 1401 | 
            -
            - A/B ํ
์คํธ: ๊ฐ ๊ธฐ๋ฒ๋ณ ํจ๊ณผ ์ธก์ 
         | 
| 1402 | 
            -
             | 
| 1403 | 
            -
            ### ๋ฆฌ์คํฌ ๋์
         | 
| 1404 | 
            -
            - ๋ฐ์ดํฐ ํ์ง ์ ํ ์: ๋กค๋ฐฑ ๋ฉ์ปค๋์ฆ ๊ตฌํ
         | 
| 1405 | 
            -
            - ์ฆ๊ฐ ๊ณผ์ ํฉ ๋ฐฉ์ง: ๊ฒ์ฆ์
 ๋ถ๋ฆฌ ๋ฐ ๊ต์ฐจ ๊ฒ์ฆ
         | 
| 1406 | 
            -
             | 
| 1407 | 
            -
            ### ์์ ์ฐ์ถ๋ฌผ
         | 
| 1408 | 
            -
            - ์๋ํ๋ ๋ฐ์ดํฐ ํ์ง ํ์ดํ๋ผ์ธ
         | 
| 1409 | 
            -
            - ๋ฐ์ดํฐ ํ์ง ๋์๋ณด๋ (Grafana)
         | 
| 1410 | 
            -
            - 15% ์ด์ ์ฑ๋ฅ ํฅ์ ๊ฒ์ฆ ๋ณด๊ณ ์
         | 
| 1411 | 
            -
             | 
| 1412 | 
            -
            ## ๐ 3๋จ๊ณ: ๋ชจ๋ธ ์ต์ ํ ๊ตฌํ (4-6์ฃผ์ฐจ)
         | 
| 1413 | 
            -
             | 
| 1414 | 
            -
            ### ์คํ ๊ณํ
         | 
| 1415 | 
            -
            **4-5์ฃผ์ฐจ: Knowledge Distillation**
         | 
| 1416 | 
            -
            - Teacher ๋ชจ๋ธ: ํ์ฌ 2.5GB ๋ชจ๋ธ
         | 
| 1417 | 
            -
            - Student ๋ชจ๋ธ ์ํคํ
์ฒ:
         | 
| 1418 | 
            -
              * ํ๋ผ๋ฏธํฐ ์: 250M โ 25M (90% ๊ฐ์)
         | 
| 1419 | 
            -
              * ๋ ์ด์ด ์: 24 โ 6
         | 
| 1420 | 
            -
              * Hidden dimension: 1024 โ 256
         | 
| 1421 | 
            -
            - ํ๋ จ ์ ๋ต:
         | 
| 1422 | 
            -
              * Temperature: 5.0
         | 
| 1423 | 
            -
              * Alpha (KD loss weight): 0.7
         | 
| 1424 | 
            -
              * ํ๋ จ ์ํญ: 50
         | 
| 1425 | 
            -
             | 
| 1426 | 
            -
            **6์ฃผ์ฐจ: Pruning & Quantization**
         | 
| 1427 | 
            -
            - ๊ตฌ์กฐ์  Pruning:
         | 
| 1428 | 
            -
              * Magnitude ๊ธฐ๋ฐ 50% ์ฑ๋ ์ ๊ฑฐ
         | 
| 1429 | 
            -
              * Fine-tuning: 10 ์ํญ
         | 
| 1430 | 
            -
            - INT8 Quantization:
         | 
| 1431 | 
            -
              * Post-training quantization
         | 
| 1432 | 
            -
              * Calibration dataset: 1,000 ์ํ
         | 
| 1433 | 
            -
            - TensorRT ์ต์ ํ (Tesla ์ฌ๋ก ์ ์ฉ):
         | 
| 1434 | 
            -
              * FP16 ์ถ๋ก  ํ์ฑํ
         | 
| 1435 | 
            -
              * ๋์  ๋ฐฐ์น ์ต์ ํ
         | 
| 1436 | 
            -
             | 
| 1437 | 
            -
            ### ํ ํ์
 ์ฒด๊ณ
         | 
| 1438 | 
            -
            - ML ์์ง๋์ด: ๋ชจ๋ธ ์ํคํ
์ฒ ๋ฐ ํ๋ จ
         | 
| 1439 | 
            -
            - DevOps: ์ธํ๋ผ ๋ฐ ๋ฐฐํฌ ํ์ดํ๋ผ์ธ
         | 
| 1440 | 
            -
            - ๋ฐ์ดํฐ ๊ณผํ์: ์ฑ๋ฅ ๋ถ์ ๋ฐ ๊ฒ์ฆ
         | 
| 1441 | 
            -
            - ์ฃผ๊ฐ ์คํ ๋์
 ๋ฏธํ
 ๋ฐ Jira ์ด์ ํธ๋ํน
         | 
| 1442 | 
            -
             | 
| 1443 | 
            -
            ### ์์ ์ฐ์ถ๋ฌผ
         | 
| 1444 | 
            -
            - ์ต์ ํ๋ ๋ชจ๋ธ ์ฒดํฌํฌ์ธํธ
         | 
| 1445 | 
            -
            - ์ฑ๋ฅ ๋ฒค์น๋งํฌ ์์ธ ๋ณด๊ณ ์
         | 
| 1446 | 
            -
            - ๋ชจ๋ธ ๋ณํ ์๋ํ ์คํฌ๋ฆฝํธ
         | 
| 1447 | 
            -
             | 
| 1448 | 
            -
            ## ๐ 4๋จ๊ณ: ์ฑ๊ณผ ๊ฒ์ฆ ๋ฐ ํ๋ก๋์
 ๋ฐฐํฌ (7-8์ฃผ์ฐจ)
         | 
| 1449 | 
            -
             | 
| 1450 | 
            -
            ### ์คํ ๊ณํ
         | 
| 1451 | 
            -
            **7์ฃผ์ฐจ: ์ข
ํฉ ์ฑ๋ฅ ๊ฒ์ฆ**
         | 
| 1452 | 
            -
            - ์ฑ๋ฅ ์งํ ๋ฌ์ฑ๋:
         | 
| 1453 | 
            -
              * ์ถ๋ก  ์๋: 45ms โ 4.5ms (10๋ฐฐ ํฅ์) โ
         | 
| 1454 | 
            -
              * ๋ชจ๋ธ ํฌ๊ธฐ: 2.5GB โ 250MB (90% ๊ฐ์) โ
         | 
| 1455 | 
            -
              * ์ ํ๋ ์์ค: 92% โ 90.5% (1.5% ์์ค) โ
         | 
| 1456 | 
            -
            - ๋น์ฉ ๋ถ์:
         | 
| 1457 | 
            -
              * GPU ์ธ์คํด์ค: $2,000/์ โ $600/์
         | 
| 1458 | 
            -
              * ์ฒ๋ฆฌ๋ ์ฆ๊ฐ๋ก ์ธํ ์๋ฒ ์ ๊ฐ์: 10๋ โ 3๋
         | 
| 1459 | 
            -
              * ์ด ๋น์ฉ ์ ๊ฐ: 70% ๋ฌ์ฑ โ
         | 
| 1460 | 
            -
             | 
| 1461 | 
            -
            **8์ฃผ์ฐจ: ๋จ๊ณ์  ๋ฐฐํฌ**
         | 
| 1462 | 
            -
            - Canary ๋ฐฐํฌ:
         | 
| 1463 | 
            -
              * 1์ผ์ฐจ: 1% ํธ๋ํฝ
         | 
| 1464 | 
            -
              * 3์ผ์ฐจ: 10% ํธ๋ํฝ
         | 
| 1465 | 
            -
              * 7์ผ์ฐจ: 50% ํธ๋ํฝ
         | 
| 1466 | 
            -
              * 14์ผ์ฐจ: 100% ์ ํ
         | 
| 1467 | 
            -
            - ๋ชจ๋ํฐ๋ง ์ค์ :
         | 
| 1468 | 
            -
              * Prometheus + Grafana ๋์๋ณด๋
         | 
| 1469 | 
            -
              * ์๋ฆผ ์๊ณ๊ฐ: ์ง์ฐ์๊ฐ >10ms, ์ค๋ฅ์จ >0.1%
         | 
| 1470 | 
            -
            - ๋กค๋ฐฑ ๊ณํ:
         | 
| 1471 | 
            -
              * ์๋ ๋กค๋ฐฑ ํธ๋ฆฌ๊ฑฐ ์ค์ 
         | 
| 1472 | 
            -
              * Blue-Green ๋ฐฐํฌ๋ก ์ฆ์ ์ ํ ๊ฐ๋ฅ
         | 
| 1473 | 
            -
             | 
| 1474 | 
            -
            ### ROI ๋ถ์
         | 
| 1475 | 
            -
            - ์ด๊ธฐ ํฌ์: $50,000 (์ธ๊ฑด๋น + ์ธํ๋ผ)
         | 
| 1476 | 
            -
            - ์๊ฐ ์ ๊ฐ์ก: $14,000
         | 
| 1477 | 
            -
            - ํฌ์ ํ์ ๊ธฐ๊ฐ: 3.6๊ฐ์
         | 
| 1478 | 
            -
            - 1๋
 ์์ด์ต: $118,000
         | 
| 1479 | 
            -
             | 
| 1480 | 
            -
            ### ์์ ์ฐ์ถ๋ฌผ
         | 
| 1481 | 
            -
            - ํ๋ก๋์
 ๋ฐฐํฌ ์๋ฃ
         | 
| 1482 | 
            -
            - ์ค์๊ฐ ๋ชจ๋ํฐ๋ง ๋์๋ณด๋
         | 
| 1483 | 
            -
            - ROI ๋ถ์ ๋ณด๊ณ ์
         | 
| 1484 | 
            -
            - ์ด์ ๊ฐ์ด๋ ๋ฌธ์
         | 
| 1485 | 
            -
             | 
| 1486 | 
            -
            ## ๐ ์ง์์  ๊ฐ์  ๊ณํ
         | 
| 1487 | 
            -
             | 
| 1488 | 
            -
            ### ๋ชจ๋ํฐ๋ง ๋ฐ ์ ์ง๋ณด์
         | 
| 1489 | 
            -
            - ์๊ฐ ์ฑ๋ฅ ๋ฆฌ๋ทฐ ๋ฏธํ
         | 
| 1490 | 
            -
            - ๋ถ๊ธฐ๋ณ ์ฌํ๋ จ ๊ณํ
         | 
| 1491 | 
            -
            - ์ ๊ธฐ์  ๋์
 ๊ฒํ  (Sparse Models, MoE)
         | 
| 1492 | 
            -
             | 
| 1493 | 
            -
            ### ์ง์ ๊ณต์ 
         | 
| 1494 | 
            -
            - ๋ด๋ถ ๊ธฐ์  ์ธ๋ฏธ๋ (์ 1ํ)
         | 
| 1495 | 
            -
            - ์ธ๋ถ ์ปจํผ๋ฐ์ค ๋ฐํ ์ค๋น
         | 
| 1496 | 
            -
            - ์คํ์์ค ๊ธฐ์ฌ ๊ณํ
         | 
| 1497 | 
            -
             | 
| 1498 | 
            -
            ### ์ฐจ๊ธฐ ํ๋ก์ ํธ
         | 
| 1499 | 
            -
            - ์ฃ์ง ๋๋ฐ์ด์ค ๋ฐฐํฌ ์ต์ ํ
         | 
| 1500 | 
            -
            - ์ฐํฉ ํ์ต(Federated Learning) ๋์
         | 
| 1501 | 
            -
            - AutoML ํ๋ซํผ ๊ตฌ์ถ
         | 
| 1502 | 
            -
             | 
| 1503 | 
            -
            ## ๐ ๊ฒฐ๋ก 
         | 
| 1504 | 
            -
            ๋ณธ ํ๋ก์ ํธ๋ ์ต์  ์ฐ๊ตฌ ๊ฒฐ๊ณผ์ ์
๊ณ ๋ฒ ์คํธ ํ๋ํฐ์ค๋ฅผ ์ ์ฉํ์ฌ, 8์ฃผ ๋ง์ ๋ชจ๋ธ ์ฑ๋ฅ์ ํ๊ธฐ์ ์ผ๋ก ๊ฐ์ ํ๊ณ  ์ด์ ๋น์ฉ์ 70% ์ ๊ฐํ๋ ์ฑ๊ณผ๋ฅผ ๋ฌ์ฑํ  ๊ฒ์ผ๋ก ์์๋ฉ๋๋ค. ์ฒด๊ณ์ ์ธ ์ ๊ทผ๊ณผ ๋ฆฌ์คํฌ ๊ด๋ฆฌ, ๊ทธ๋ฆฌ๊ณ  ์ง์์ ์ธ ๊ฐ์  ๊ณํ์ ํตํด ์ฅ๊ธฐ์ ์ธ ๊ฒฝ์๋ ฅ์ ํ๋ณดํ  ์ ์์ต๋๋ค.
         | 
| 1505 | 
            -
             | 
| 1506 | 
            -
            ---
         | 
| 1507 | 
            -
            *์์ฑ์ผ: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}*
         | 
| 1508 | 
            -
            *์์ฑ์: ํ๋ ฅ์  AI ์์คํ
 (๊ฐ๋
์, ์กฐ์ฌ์, ์คํ์ AI)*"""
         | 
| 1509 | 
            -
                        }
         | 
| 1510 | 
            -
                        
         | 
| 1511 | 
            -
                        # ํ๋กฌํํธ ๋ด์ฉ์ ๋ฐ๋ผ ์ ์ ํ ์๋ต ์ ํ
         | 
| 1512 | 
            -
                        if role == "supervisor" and "์กฐ์ฌ์ AI๊ฐ ์ ๋ฆฌํ" in messages[0]["content"]:
         | 
| 1513 | 
            -
                            response = test_responses["supervisor_execution"]
         | 
| 1514 | 
            -
                        elif role == "supervisor" and messages[0]["content"].find("์คํ์ AI์ ๋ต๋ณ") > -1:
         | 
| 1515 | 
            -
                            response = test_responses["supervisor_review"]
         | 
| 1516 | 
            -
                        elif role == "supervisor":
         | 
| 1517 | 
            -
                            response = test_responses["supervisor_initial"]
         | 
| 1518 | 
            -
                        elif role == "researcher":
         | 
| 1519 | 
            -
                            response = test_responses["researcher"]
         | 
| 1520 | 
            -
                        elif role == "executor" and "์ต์ข
 ๋ณด๊ณ ์" in messages[0]["content"]:
         | 
| 1521 | 
            -
                            response = test_responses["executor_final"]
         | 
| 1522 | 
            -
                        else:
         | 
| 1523 | 
            -
                            response = test_responses["executor"]
         | 
| 1524 | 
            -
                        
         | 
| 1525 | 
            -
                        yield from self.simulate_streaming(response, role)
         | 
| 1526 | 
            -
                        return
         | 
| 1527 | 
            -
                    
         | 
| 1528 | 
            -
                    # ์ค์  API ํธ์ถ
         | 
| 1529 | 
            -
                    try:
         | 
| 1530 | 
            -
                        system_prompts = {
         | 
| 1531 | 
            -
                            "supervisor": "๋น์ ์ ๊ฑฐ์์  ๊ด์ ์์ ๋ถ์ํ๊ณ  ์ง๋ํ๋ ๊ฐ๋
์ AI์
๋๋ค.",
         | 
| 1532 | 
            -
                            "researcher": "๋น์ ์ ์ ๋ณด๋ฅผ ์กฐ์ฌํ๊ณ  ์ฒด๊ณ์ ์ผ๋ก ์ ๋ฆฌํ๋ ์กฐ์ฌ์ AI์
๋๋ค.",
         | 
| 1533 | 
            -
                            "executor": "๋น์ ์ ์ธ๋ถ์ ์ธ ๋ด์ฉ์ ๊ตฌํํ๋ ์คํ์ AI์
๋๋ค."
         | 
| 1534 | 
            -
                        }
         | 
| 1535 | 
            -
                        
         | 
| 1536 | 
            -
                        full_messages = [
         | 
| 1537 | 
            -
                            {"role": "system", "content": system_prompts.get(role, "")},
         | 
| 1538 | 
            -
                            *messages
         | 
| 1539 | 
            -
                        ]
         | 
| 1540 | 
            -
                        
         | 
| 1541 | 
            -
                        payload = {
         | 
| 1542 | 
            -
                            "model": self.model_id,
         | 
| 1543 | 
            -
                            "messages": full_messages,
         | 
| 1544 | 
            -
                            "max_tokens": 2048,
         | 
| 1545 | 
            -
                            "temperature": 0.7,
         | 
| 1546 | 
            -
                            "top_p": 0.8,
         | 
| 1547 | 
            -
                            "stream": True,
         | 
| 1548 | 
            -
                            "stream_options": {"include_usage": True}
         | 
| 1549 | 
            -
                        }
         | 
| 1550 | 
            -
                        
         | 
| 1551 | 
            -
                        logger.info(f"API ์คํธ๋ฆฌ๋ฐ ํธ์ถ ์์ - Role: {role}")
         | 
| 1552 | 
            -
                        
         | 
| 1553 | 
            -
                        response = requests.post(
         | 
| 1554 | 
            -
                            self.api_url,
         | 
| 1555 | 
            -
                            headers=self.create_headers(),
         | 
| 1556 | 
            -
                            json=payload,
         | 
| 1557 | 
            -
                            stream=True,
         | 
| 1558 | 
            -
                            timeout=10
         | 
| 1559 | 
            -
                        )
         | 
| 1560 | 
            -
                        
         | 
| 1561 | 
            -
                        if response.status_code != 200:
         | 
| 1562 | 
            -
                            logger.error(f"API ์ค๋ฅ: {response.status_code}")
         | 
| 1563 | 
            -
                            yield f"โ API ์ค๋ฅ ({response.status_code}): {response.text[:200]}"
         | 
| 1564 | 
            -
                            return
         | 
| 1565 | 
            -
                        
         | 
| 1566 | 
            -
                        for line in response.iter_lines():
         | 
| 1567 | 
            -
                            if line:
         | 
| 1568 | 
            -
                                line = line.decode('utf-8')
         | 
| 1569 | 
            -
                                if line.startswith("data: "):
         | 
| 1570 | 
            -
                                    data = line[6:]
         | 
| 1571 | 
            -
                                    if data == "[DONE]":
         | 
| 1572 | 
            -
                                        break
         | 
| 1573 | 
            -
                                    try:
         | 
| 1574 | 
            -
                                        chunk = json.loads(data)
         | 
| 1575 | 
            -
                                        if "choices" in chunk and chunk["choices"]:
         | 
| 1576 | 
            -
                                            content = chunk["choices"][0].get("delta", {}).get("content", "")
         | 
| 1577 | 
            -
                                            if content:
         | 
| 1578 | 
            -
                                                yield content
         | 
| 1579 | 
            -
                                    except json.JSONDecodeError:
         | 
| 1580 | 
            -
                                        continue
         | 
| 1581 | 
            -
                                        
         | 
| 1582 | 
            -
                    except requests.exceptions.Timeout:
         | 
| 1583 | 
            -
                        yield "โฑ๏ธ API ํธ์ถ ์๊ฐ์ด ์ด๊ณผ๋์์ต๋๋ค. ๋ค์ ์๋ํด์ฃผ์ธ์."
         | 
| 1584 | 
            -
                    except requests.exceptions.ConnectionError:
         | 
| 1585 | 
            -
                        yield "๐ API ์๋ฒ์ ์ฐ๊ฒฐํ  ์ ์์ต๋๋ค. ์ธํฐ๋ท ์ฐ๊ฒฐ์ ํ์ธํด์ฃผ์ธ์."
         | 
| 1586 | 
            -
                    except Exception as e:
         | 
| 1587 | 
            -
                        logger.error(f"์คํธ๋ฆฌ๋ฐ ์ค ์ค๋ฅ: {str(e)}")
         | 
| 1588 | 
            -
                        yield f"โ ์ค๋ฅ ๋ฐ์: {str(e)}"
         | 
| 1589 | 
            -
             | 
| 1590 | 
            -
            # ์์คํ
 ์ธ์คํด์ค ์์ฑ
         | 
| 1591 | 
            -
            llm_system = LLMCollaborativeSystem()
         | 
| 1592 | 
            -
             | 
| 1593 | 
            -
            def process_query_streaming(user_query: str, history: List):
         | 
| 1594 | 
            -
                """์คํธ๋ฆฌ๋ฐ์ ์ง์ํ๋ ์ฟผ๋ฆฌ ์ฒ๋ฆฌ"""
         | 
| 1595 | 
            -
                if not user_query:
         | 
| 1596 | 
            -
                    return history, "", "", "", "", "โ ์ง๋ฌธ์ ์
๋ ฅํด์ฃผ์ธ์."
         | 
| 1597 | 
            -
                
         | 
| 1598 | 
            -
                conversation_log = []
         | 
| 1599 | 
            -
                all_responses = {"supervisor": [], "researcher": [], "executor": []}
         | 
| 1600 | 
            -
                
         | 
| 1601 | 
            -
                try:
         | 
| 1602 | 
            -
                    # 1๋จ๊ณ: ๊ฐ๋
์ AI ์ด๊ธฐ ๋ถ์ ๋ฐ ํค์๋ ์ถ์ถ
         | 
| 1603 | 
            -
                    supervisor_prompt = llm_system.create_supervisor_initial_prompt(user_query)
         | 
| 1604 | 
            -
                    supervisor_initial_response = ""
         | 
| 1605 | 
            -
                    
         | 
| 1606 | 
            -
                    supervisor_text = "[์ด๊ธฐ ๋ถ์] ๐ ์์ฑ ์ค...\n"
         | 
| 1607 | 
            -
                    for chunk in llm_system.call_llm_streaming(
         | 
| 1608 | 
            -
                        [{"role": "user", "content": supervisor_prompt}], 
         | 
| 1609 | 
            -
                        "supervisor"
         | 
| 1610 | 
            -
                    ):
         | 
| 1611 | 
            -
                        supervisor_initial_response += chunk
         | 
| 1612 | 
            -
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_initial_response}"
         | 
| 1613 | 
            -
                        yield history, supervisor_text, "", "", "", "๐ ๊ฐ๋
์ AI๊ฐ ๋ถ์ ์ค..."
         | 
| 1614 | 
            -
                    
         | 
| 1615 | 
            -
                    all_responses["supervisor"].append(supervisor_initial_response)
         | 
| 1616 | 
            -
                    
         | 
| 1617 | 
            -
                    # ํค์๋ ์ถ์ถ
         | 
| 1618 | 
            -
                    keywords = llm_system.extract_keywords(supervisor_initial_response)
         | 
| 1619 | 
            -
                    logger.info(f"์ถ์ถ๋ ํค์๋: {keywords}")
         | 
| 1620 | 
            -
                    
         | 
| 1621 | 
            -
                    # 2๋จ๊ณ: ๋ธ๋ ์ด๋ธ ๊ฒ์ ์ํ
         | 
| 1622 | 
            -
                    researcher_text = "[์น ๊ฒ์] ๐ ๊ฒ์ ์ค...\n"
         | 
| 1623 | 
            -
                    yield history, supervisor_text, researcher_text, "", "", "๐ ์น ๊ฒ์ ์ํ ์ค..."
         | 
| 1624 | 
            -
                    
         | 
| 1625 | 
            -
                    search_results = {}
         | 
| 1626 | 
            -
                    for keyword in keywords:
         | 
| 1627 | 
            -
                        results = llm_system.brave_search(keyword)
         | 
| 1628 | 
            -
                        if results:
         | 
| 1629 | 
            -
                            search_results[keyword] = results
         | 
| 1630 | 
            -
                            researcher_text += f"โ '{keyword}' ๊ฒ์ ์๋ฃ\n"
         | 
| 1631 | 
            -
                            yield history, supervisor_text, researcher_text, "", "", f"๐ '{keyword}' ๊ฒ์ ์ค..."
         | 
| 1632 | 
            -
                    
         | 
| 1633 | 
            -
                    # 3๋จ๊ณ: ์กฐ์ฌ์ AI๊ฐ ๊ฒ์ ๊ฒฐ๊ณผ ์ ๋ฆฌ
         | 
| 1634 | 
            -
                    researcher_prompt = llm_system.create_researcher_prompt(user_query, supervisor_initial_response, search_results)
         | 
| 1635 | 
            -
                    researcher_response = ""
         | 
| 1636 | 
            -
                    
         | 
| 1637 | 
            -
                    researcher_text = "[์กฐ์ฌ ๊ฒฐ๊ณผ ์ ๋ฆฌ] ๐ ์์ฑ ์ค...\n"
         | 
| 1638 | 
            -
                    for chunk in llm_system.call_llm_streaming(
         | 
| 1639 | 
            -
                        [{"role": "user", "content": researcher_prompt}], 
         | 
| 1640 | 
            -
                        "researcher"
         | 
| 1641 | 
            -
                    ):
         | 
| 1642 | 
            -
                        researcher_response += chunk
         | 
| 1643 | 
            -
                        researcher_text = f"[์กฐ์ฌ ๊ฒฐ๊ณผ ์ ๋ฆฌ] - {datetime.now().strftime('%H:%M:%S')}\n{researcher_response}"
         | 
| 1644 | 
            -
                        yield history, supervisor_text, researcher_text, "", "", "๐ ์กฐ์ฌ์ AI๊ฐ ์ ๋ฆฌ ์ค..."
         | 
| 1645 | 
            -
                    
         | 
| 1646 | 
            -
                    all_responses["researcher"].append(researcher_response)
         | 
| 1647 | 
            -
                    
         | 
| 1648 | 
            -
                    # 4๋จ๊ณ: ๊ฐ๋
์ AI๊ฐ ์กฐ์ฌ ๋ด์ฉ ๊ธฐ๋ฐ์ผ๋ก ์คํ ์ง์
         | 
| 1649 | 
            -
                    supervisor_execution_prompt = llm_system.create_supervisor_execution_prompt(user_query, researcher_response)
         | 
| 1650 | 
            -
                    supervisor_execution_response = ""
         | 
| 1651 | 
            -
                    
         | 
| 1652 | 
            -
                    supervisor_text += "\n\n---\n\n[์คํ ์ง์] ๐ ์์ฑ ์ค...\n"
         | 
| 1653 | 
            -
                    for chunk in llm_system.call_llm_streaming(
         | 
| 1654 | 
            -
                        [{"role": "user", "content": supervisor_execution_prompt}], 
         | 
| 1655 | 
            -
                        "supervisor"
         | 
| 1656 | 
            -
                    ):
         | 
| 1657 | 
            -
                        supervisor_execution_response += chunk
         | 
| 1658 | 
            -
                        temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์คํ ์ง์] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_execution_response}"
         | 
| 1659 | 
            -
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
         | 
| 1660 | 
            -
                        yield history, supervisor_text, researcher_text, "", "", "๐ฏ ๊ฐ๋
์ AI๊ฐ ์ง์ ์ค..."
         | 
| 1661 | 
            -
                    
         | 
| 1662 | 
            -
                    all_responses["supervisor"].append(supervisor_execution_response)
         | 
| 1663 | 
            -
                    
         | 
| 1664 | 
            -
                    # 5๋จ๊ณ: ์คํ์ AI๊ฐ ์กฐ์ฌ ๋ด์ฉ๊ณผ ์ง์๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์ด๊ธฐ ๊ตฌํ
         | 
| 1665 | 
            -
                    executor_prompt = llm_system.create_executor_prompt(user_query, supervisor_execution_response, researcher_response)
         | 
| 1666 | 
            -
                    executor_response = ""
         | 
| 1667 | 
            -
                    
         | 
| 1668 | 
            -
                    executor_text = "[์ด๊ธฐ ๊ตฌํ] ๐ ์์ฑ ์ค...\n"
         | 
| 1669 | 
            -
                    for chunk in llm_system.call_llm_streaming(
         | 
| 1670 | 
            -
                        [{"role": "user", "content": executor_prompt}], 
         | 
| 1671 | 
            -
                        "executor"
         | 
| 1672 | 
            -
                    ):
         | 
| 1673 | 
            -
                        executor_response += chunk
         | 
| 1674 | 
            -
                        executor_text = f"[์ด๊ธฐ ๊ตฌํ] - {datetime.now().strftime('%H:%M:%S')}\n{executor_response}"
         | 
| 1675 | 
            -
                        yield history, supervisor_text, researcher_text, executor_text, "", "๐ง ์คํ์ AI๊ฐ ๊ตฌํ ์ค..."
         | 
| 1676 | 
            -
                    
         | 
| 1677 | 
            -
                    all_responses["executor"].append(executor_response)
         | 
| 1678 | 
            -
                    
         | 
| 1679 | 
            -
                    # 6๋จ๊ณ: ๊ฐ๋
์ AI ๊ฒํ  ๋ฐ ํผ๋๋ฐฑ
         | 
| 1680 | 
            -
                    review_prompt = f"""๋น์ ์ ๊ฑฐ์์  ๊ด์ ์์ ๋ถ์ํ๊ณ  ์ง๋ํ๋ ๊ฐ๋
์ AI์
๋๋ค.
         | 
| 1681 | 
            -
             | 
| 1682 | 
            -
            ์ฌ์ฉ์ ์ง๋ฌธ: {user_query}
         | 
| 1683 | 
            -
             | 
| 1684 | 
            -
            ์คํ์ AI์ ๋ต๋ณ:
         | 
| 1685 | 
            -
            {executor_response}
         | 
| 1686 | 
            -
             | 
| 1687 | 
            -
            ์ด ๋ต๋ณ์ ๊ฒํ ํ๊ณ  ๊ฐ์ ์ ๊ณผ ์ถ๊ฐ ๊ณ ๋ ค์ฌํญ์ ์ ์ํด์ฃผ์ธ์. ๊ตฌ์ฒด์ ์ด๊ณ  ์คํ ๊ฐ๋ฅํ ๊ฐ์  ๋ฐฉ์์ ์ ์ํ์ธ์."""
         | 
| 1688 | 
            -
                    
         | 
| 1689 | 
            -
                    review_response = ""
         | 
| 1690 | 
            -
                    supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][0]}\n\n---\n\n[์คํ ์ง์] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฒํ  ๋ฐ ํผ๋๋ฐฑ] ๐ ์์ฑ ์ค...\n"
         | 
| 1691 | 
            -
                    
         | 
| 1692 | 
            -
                    for chunk in llm_system.call_llm_streaming(
         | 
| 1693 | 
            -
                        [{"role": "user", "content": review_prompt}], 
         | 
| 1694 | 
            -
                        "supervisor"
         | 
| 1695 | 
            -
                    ):
         | 
| 1696 | 
            -
                        review_response += chunk
         | 
| 1697 | 
            -
                        temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์คํ ์ง์] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฒํ  ๋ฐ ํผ๋๋ฐฑ] - {datetime.now().strftime('%H:%M:%S')}\n{review_response}"
         | 
| 1698 | 
            -
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
         | 
| 1699 | 
            -
                        yield history, supervisor_text, researcher_text, executor_text, "", "๐ ๊ฐ๋
์ AI๊ฐ ๊ฒํ  ์ค..."
         | 
| 1700 | 
            -
                    
         | 
| 1701 | 
            -
                    all_responses["supervisor"].append(review_response)
         | 
| 1702 | 
            -
                    
         | 
| 1703 | 
            -
                    # 7๋จ๊ณ: ์คํ์ AI ์ต์ข
 ๋ณด๊ณ ์ (ํผ๋๋ฐฑ ๋ฐ์)
         | 
| 1704 | 
            -
                    final_executor_prompt = llm_system.create_executor_final_prompt(
         | 
| 1705 | 
            -
                        user_query, 
         | 
| 1706 | 
            -
                        executor_response, 
         | 
| 1707 | 
            -
                        review_response, 
         | 
| 1708 | 
            -
                        researcher_response
         | 
| 1709 | 
            -
                    )
         | 
| 1710 | 
            -
                    final_executor_response = ""
         | 
| 1711 | 
            -
                    
         | 
| 1712 | 
            -
                    executor_text += "\n\n---\n\n[์ต์ข
 ๋ณด๊ณ ์] ๐ ์์ฑ ์ค...\n"
         | 
| 1713 | 
            -
                    for chunk in llm_system.call_llm_streaming(
         | 
| 1714 | 
            -
                        [{"role": "user", "content": final_executor_prompt}], 
         | 
| 1715 | 
            -
                        "executor"
         | 
| 1716 | 
            -
                    ):
         | 
| 1717 | 
            -
                        final_executor_response += chunk
         | 
| 1718 | 
            -
                        temp_text = f"[์ด๊ธฐ ๊ตฌํ] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['executor'][0]}\n\n---\n\n[์ต์ข
 ๋ณด๊ณ ์] - {datetime.now().strftime('%H:%M:%S')}\n{final_executor_response}"
         | 
| 1719 | 
            -
                        executor_text = temp_text
         | 
| 1720 | 
            -
                        yield history, supervisor_text, researcher_text, executor_text, "", "๐ ์ต์ข
 ๋ณด๊ณ ์ ์์ฑ ์ค..."
         | 
| 1721 | 
            -
                    
         | 
| 1722 | 
            -
                    all_responses["executor"].append(final_executor_response)
         | 
| 1723 | 
            -
                    
         | 
| 1724 | 
            -
                    # ์ต์ข
 ๊ฒฐ๊ณผ ์์ฑ (์ต์ข
 ๋ณด๊ณ ์๋ฅผ ๋ฉ์ธ์ผ๋ก)
         | 
| 1725 | 
            -
                    final_summary = f"""## ๐ฏ ์ต์ข
 ์ข
ํฉ ๋ณด๊ณ ์
         | 
| 1726 | 
            -
             | 
| 1727 | 
            -
            ### ๐ ์ฌ์ฉ์ ์ง๋ฌธ
         | 
| 1728 | 
            -
            {user_query}
         | 
| 1729 | 
            -
             | 
| 1730 | 
            -
            ### ๐ ์ต์ข
 ๋ณด๊ณ ์ (์คํ์ AI - ํผ๋๋ฐฑ ๋ฐ์)
         | 
| 1731 | 
            -
            {final_executor_response}
         | 
| 1732 | 
            -
             | 
| 1733 | 
            -
            ---
         | 
| 1734 | 
            -
             | 
| 1735 | 
            -
            <details>
         | 
| 1736 | 
            -
            <summary>๐ ์ ์ฒด ํ๋ ฅ ๊ณผ์  ๋ณด๊ธฐ</summary>
         | 
| 1737 | 
            -
             | 
| 1738 | 
            -
            #### ๐ ๊ฑฐ์์  ๋ถ์ (๊ฐ๋
์ AI)
         | 
| 1739 | 
            -
            {all_responses['supervisor'][0]}
         | 
| 1740 | 
            -
             | 
| 1741 | 
            -
            #### ๐ ์กฐ์ฌ ๊ฒฐ๊ณผ (์กฐ์ฌ์ AI)
         | 
| 1742 | 
            -
            {researcher_response}
         | 
| 1743 | 
            -
             | 
| 1744 | 
            -
            #### ๐ฏ ์คํ ์ง์ (๊ฐ๋
์ AI)
         | 
| 1745 | 
            -
            {all_responses['supervisor'][1]}
         | 
| 1746 | 
            -
             | 
| 1747 | 
            -
            #### ๐ก ์ด๊ธฐ ๊ตฌํ (์คํ์ AI)
         | 
| 1748 | 
            -
            {executor_response}
         | 
| 1749 | 
            -
             | 
| 1750 | 
            -
            #### โจ ๊ฒํ  ๋ฐ ๊ฐ์ ์ฌํญ (๊ฐ๋
์ AI)
         | 
| 1751 | 
            -
            {review_response}
         | 
| 1752 | 
            -
             | 
| 1753 | 
            -
            </details>
         | 
| 1754 | 
            -
             | 
| 1755 | 
            -
            ---
         | 
| 1756 | 
            -
            *์ด ๋ณด๊ณ ์๋ ์น ๊ฒ์์ ํตํ ์ต์  ์ ๋ณด์ AI๋ค์ ํ๋ ฅ, ๊ทธ๋ฆฌ๊ณ  ํผ๋๋ฐฑ ๋ฐ์์ ํตํด ์์ฑ๋์์ต๋๋ค.*"""
         | 
| 1757 | 
            -
                    
         | 
| 1758 | 
            -
                    # ํ์คํ ๋ฆฌ ์
๋ฐ์ดํธ
         | 
| 1759 | 
            -
                    new_history = history + [(user_query, final_summary)]
         | 
| 1760 | 
            -
                    
         | 
| 1761 | 
            -
                    yield new_history, supervisor_text, researcher_text, executor_text, final_summary, "โ
 ์ต์ข
 ๋ณด๊ณ ์ ์์ฑ!"
         | 
| 1762 | 
            -
                    
         | 
| 1763 | 
            -
                except Exception as e:
         | 
| 1764 | 
            -
                    error_msg = f"โ ์ฒ๋ฆฌ ์ค ์ค๋ฅ: {str(e)}"
         | 
| 1765 | 
            -
                    yield history, "", "", "", error_msg, error_msg
         | 
| 1766 | 
            -
             | 
| 1767 | 
            -
            def clear_all():
         | 
| 1768 | 
            -
                """๋ชจ๋  ๋ด์ฉ ์ด๊ธฐํ"""
         | 
| 1769 | 
            -
                return [], "", "", "", "", "๐ ์ด๊ธฐํ๋์์ต๋๋ค."
         | 
| 1770 | 
            -
             | 
| 1771 | 
            -
            # Gradio ์ธํฐํ์ด์ค
         | 
| 1772 | 
            -
            css = """
         | 
| 1773 | 
            -
            .gradio-container {
         | 
| 1774 | 
            -
                font-family: 'Arial', sans-serif;
         | 
| 1775 | 
            -
            }
         | 
| 1776 | 
            -
            .supervisor-box textarea {
         | 
| 1777 | 
            -
                border-left: 4px solid #667eea !important;
         | 
| 1778 | 
            -
                padding-left: 10px !important;
         | 
| 1779 | 
            -
            }
         | 
| 1780 | 
            -
            .researcher-box textarea {
         | 
| 1781 | 
            -
                border-left: 4px solid #10b981 !important;
         | 
| 1782 | 
            -
                padding-left: 10px !important;
         | 
| 1783 | 
            -
            }
         | 
| 1784 | 
            -
            .executor-box textarea {
         | 
| 1785 | 
            -
                border-left: 4px solid #764ba2 !important;
         | 
| 1786 | 
            -
                padding-left: 10px !important;
         | 
| 1787 | 
            -
            }
         | 
| 1788 | 
            -
            """
         | 
| 1789 | 
            -
             | 
| 1790 | 
            -
            with gr.Blocks(title="ํ๋ ฅ์  LLM ์์คํ
", theme=gr.themes.Soft(), css=css) as app:
         | 
| 1791 | 
            -
                gr.Markdown(
         | 
| 1792 | 
            -
                    f"""
         | 
| 1793 | 
            -
                    # ๐ค ํ๋ ฅ์  LLM ์์คํ
 (์กฐ์ฌ์ ํฌํจ + ํผ๋๋ฐฑ ๋ฐ์)
         | 
| 1794 | 
            -
                    
         | 
| 1795 | 
            -
                    > ๊ฐ๋
์, ์กฐ์ฌ์, ์คํ์ AI๊ฐ ํ๋ ฅํ์ฌ ํผ๋๋ฐฑ์ ๋ฐ์ํ ์์ ํ ๋ณด๊ณ ์๋ฅผ ์์ฑํฉ๋๋ค.
         | 
| 1796 | 
            -
                    
         | 
| 1797 | 
            -
                    **์ํ**: 
         | 
| 1798 | 
            -
                    - LLM: {'๐ข ์ค์  ๋ชจ๋' if not llm_system.test_mode else '๐ก ํ
์คํธ ๋ชจ๋'}
         | 
| 1799 | 
            -
                    - Brave Search: {'๐ข ํ์ฑํ' if llm_system.bapi_token != "YOUR_BRAVE_API_TOKEN" else '๐ก ํ
์คํธ ๋ชจ๋'}
         | 
| 1800 | 
            -
                    
         | 
| 1801 | 
            -
                    **7๋จ๊ณ ํ๋ ฅ ํ๋ก์ธ์ค:**
         | 
| 1802 | 
            -
                    1. ๐ง  **๊ฐ๋
์**: ๊ฑฐ์์  ๋ถ์ ๋ฐ ๊ฒ์ ํค์๋ ์ถ์ถ
         | 
| 1803 | 
            -
                    2. ๐ **์กฐ์ฌ์**: ๋ธ๋ ์ด๋ธ ๊ฒ์์ผ๋ก ์ต์  ์ ๋ณด ์์ง
         | 
| 1804 | 
            -
                    3. ๐ง  **๊ฐ๋
์**: ์กฐ์ฌ ๋ด์ฉ ๊ธฐ๋ฐ ๊ตฌ์ฒด์  ์คํ ์ง์
         | 
| 1805 | 
            -
                    4. ๐๏ธ **์คํ์**: ์ด๊ธฐ ์คํ ๊ณํ ์์ฑ
         | 
| 1806 | 
            -
                    5. ๐ง  **๊ฐ๋
์**: ๊ฒํ  ๋ฐ ๊ฐ์ ์ฌํญ ํผ๋๋ฐฑ
         | 
| 1807 | 
            -
                    6. ๐๏ธ **์คํ์**: ํผ๋๋ฐฑ ๋ฐ์ํ ์ต์ข
 ๋ณด๊ณ ์ ์์ฑ
         | 
| 1808 | 
            -
                    7. ๐ **์ต์ข
 ์ฐ์ถ๋ฌผ**: ์์ ํ ์คํ ๋ณด๊ณ ์
         | 
| 1809 | 
            -
                    """
         | 
| 1810 | 
            -
                )
         | 
| 1811 | 
            -
                
         | 
| 1812 | 
            -
                with gr.Row():
         | 
| 1813 | 
            -
                    # ์ผ์ชฝ: ์
๋ ฅ ๋ฐ ์ฑํ
 ๊ธฐ๋ก
         | 
| 1814 | 
            -
                    with gr.Column(scale=1):
         | 
| 1815 | 
            -
                        chatbot = gr.Chatbot(
         | 
| 1816 | 
            -
                            label="๐ฌ ๋ํ ๊ธฐ๋ก",
         | 
| 1817 | 
            -
                            height=600,
         | 
| 1818 | 
            -
                            show_copy_button=True,
         | 
| 1819 | 
            -
                            bubble_full_width=False
         | 
| 1820 | 
            -
                        )
         | 
| 1821 | 
            -
                        
         | 
| 1822 | 
            -
                        user_input = gr.Textbox(
         | 
| 1823 | 
            -
                            label="์ง๋ฌธ ์
๋ ฅ",
         | 
| 1824 | 
            -
                            placeholder="์: ๊ธฐ๊ณํ์ต ๋ชจ๋ธ์ ์ฑ๋ฅ์ ํฅ์์ํค๋ ๋ฐฉ๋ฒ์?",
         | 
| 1825 | 
            -
                            lines=3
         | 
| 1826 | 
            -
                        )
         | 
| 1827 | 
            -
                        
         | 
| 1828 | 
            -
                        with gr.Row():
         | 
| 1829 | 
            -
                            submit_btn = gr.Button("๐ ๋ถ์ ์์", variant="primary", scale=2)
         | 
| 1830 | 
            -
                            clear_btn = gr.Button("๐๏ธ ์ด๊ธฐํ", scale=1)
         | 
| 1831 | 
            -
                        
         | 
| 1832 | 
            -
                        status_text = gr.Textbox(
         | 
| 1833 | 
            -
                            label="์ํ",
         | 
| 1834 | 
             
                            interactive=False,
         | 
| 1835 | 
            -
                             | 
| 1836 | 
            -
                            max_lines=1
         | 
| 1837 | 
             
                        )
         | 
| 1838 | 
            -
                    
         | 
| 1839 | 
            -
                    # ์ค๋ฅธ์ชฝ: AI ์ถ๋ ฅ
         | 
| 1840 | 
            -
                    with gr.Column(scale=2):
         | 
| 1841 | 
            -
                        # ์ต์ข
 ๊ฒฐ๊ณผ
         | 
| 1842 | 
            -
                        with gr.Accordion("๐ ์ต์ข
 ์ข
ํฉ ๊ฒฐ๊ณผ", open=True):
         | 
| 1843 | 
            -
                            final_output = gr.Markdown(
         | 
| 1844 | 
            -
                                value="*์ง๋ฌธ์ ์
๋ ฅํ๋ฉด ๊ฒฐ๊ณผ๊ฐ ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค.*"
         | 
| 1845 | 
            -
                            )
         | 
| 1846 | 
            -
                        
         | 
| 1847 | 
            -
                        # AI ์ถ๋ ฅ๋ค
         | 
| 1848 | 
            -
                        with gr.Row():
         | 
| 1849 | 
            -
                            # ๊ฐ๋
์ AI ์ถ๋ ฅ
         | 
| 1850 | 
            -
                            with gr.Column():
         | 
| 1851 | 
            -
                                gr.Markdown("### ๐ง  ๊ฐ๋
์ AI (๊ฑฐ์์  ๋ถ์)")
         | 
| 1852 | 
            -
                                supervisor_output = gr.Textbox(
         | 
| 1853 | 
            -
                                    label="",
         | 
| 1854 | 
            -
                                    lines=12,
         | 
| 1855 | 
            -
                                    max_lines=15,
         | 
| 1856 | 
            -
                                    interactive=False,
         | 
| 1857 | 
            -
                                    elem_classes=["supervisor-box"]
         | 
| 1858 | 
            -
                                )
         | 
| 1859 | 
            -
                        
         | 
| 1860 | 
            -
                        with gr.Row():
         | 
| 1861 | 
            -
                            # ์กฐ์ฌ์ AI ์ถ๋ ฅ
         | 
| 1862 | 
            -
                            with gr.Column():
         | 
| 1863 | 
            -
                                gr.Markdown("### ๐ ์กฐ์ฌ์ AI (์น ๊ฒ์ & ์ ๋ฆฌ)")
         | 
| 1864 | 
            -
                                researcher_output = gr.Textbox(
         | 
| 1865 | 
            -
                                    label="",
         | 
| 1866 | 
            -
                                    lines=12,
         | 
| 1867 | 
            -
                                    max_lines=15,
         | 
| 1868 | 
            -
                                    interactive=False,
         | 
| 1869 | 
            -
                                    elem_classes=["researcher-box"]
         | 
| 1870 | 
            -
                                )
         | 
| 1871 | 
            -
                            
         | 
| 1872 | 
            -
                            # ์คํ์ AI ์ถ๋ ฅ
         | 
| 1873 | 
            -
                            with gr.Column():
         | 
| 1874 | 
            -
                                gr.Markdown("### ๐๏ธ ์คํ์ AI (๋ฏธ์์  ๊ตฌํ)")
         | 
| 1875 | 
            -
                                executor_output = gr.Textbox(
         | 
| 1876 | 
            -
                                    label="",
         | 
| 1877 | 
            -
                                    lines=12,
         | 
| 1878 | 
            -
                                    max_lines=15,
         | 
| 1879 | 
            -
                                    interactive=False,
         | 
| 1880 | 
            -
                                    elem_classes=["executor-box"]
         | 
| 1881 | 
            -
                                )
         | 
| 1882 |  | 
| 1883 | 
             
                # ์์ 
         | 
| 1884 | 
             
                gr.Examples(
         | 
| @@ -1896,8 +919,8 @@ with gr.Blocks(title="ํ๋ ฅ์  LLM ์์คํ
", theme=gr.themes.Soft(), css=css) | |
| 1896 | 
             
                # ์ด๋ฒคํธ ํธ๋ค๋ฌ
         | 
| 1897 | 
             
                submit_btn.click(
         | 
| 1898 | 
             
                    fn=process_query_streaming,
         | 
| 1899 | 
            -
                    inputs=[user_input | 
| 1900 | 
            -
                    outputs=[ | 
| 1901 | 
             
                ).then(
         | 
| 1902 | 
             
                    fn=lambda: "",
         | 
| 1903 | 
             
                    outputs=[user_input]
         | 
| @@ -1905,8 +928,8 @@ with gr.Blocks(title="ํ๋ ฅ์  LLM ์์คํ
", theme=gr.themes.Soft(), css=css) | |
| 1905 |  | 
| 1906 | 
             
                user_input.submit(
         | 
| 1907 | 
             
                    fn=process_query_streaming,
         | 
| 1908 | 
            -
                    inputs=[user_input | 
| 1909 | 
            -
                    outputs=[ | 
| 1910 | 
             
                ).then(
         | 
| 1911 | 
             
                    fn=lambda: "",
         | 
| 1912 | 
             
                    outputs=[user_input]
         | 
| @@ -1914,7 +937,7 @@ with gr.Blocks(title="ํ๋ ฅ์  LLM ์์คํ
", theme=gr.themes.Soft(), css=css) | |
| 1914 |  | 
| 1915 | 
             
                clear_btn.click(
         | 
| 1916 | 
             
                    fn=clear_all,
         | 
| 1917 | 
            -
                    outputs=[ | 
| 1918 | 
             
                )
         | 
| 1919 |  | 
| 1920 | 
             
                gr.Markdown(
         | 
|  | |
| 614 | 
             
            # ์์คํ
 ์ธ์คํด์ค ์์ฑ
         | 
| 615 | 
             
            llm_system = LLMCollaborativeSystem()
         | 
| 616 |  | 
| 617 | 
            +
            # ๋ด๋ถ ํ์คํ ๋ฆฌ ๊ด๋ฆฌ (UI์๋ ํ์ํ์ง ์์)
         | 
| 618 | 
            +
            internal_history = []
         | 
| 619 | 
            +
             | 
| 620 | 
            +
            def process_query_streaming(user_query: str):
         | 
| 621 | 
             
                """์คํธ๋ฆฌ๋ฐ์ ์ง์ํ๋ ์ฟผ๋ฆฌ ์ฒ๋ฆฌ"""
         | 
| 622 | 
            +
                global internal_history
         | 
| 623 | 
            +
                
         | 
| 624 | 
             
                if not user_query:
         | 
| 625 | 
            +
                    return "", "", "", "", "โ ์ง๋ฌธ์ ์
๋ ฅํด์ฃผ์ธ์."
         | 
| 626 |  | 
| 627 | 
             
                conversation_log = []
         | 
| 628 | 
             
                all_responses = {"supervisor": [], "researcher": [], "executor": []}
         | 
|  | |
| 639 | 
             
                    ):
         | 
| 640 | 
             
                        supervisor_initial_response += chunk
         | 
| 641 | 
             
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_initial_response}"
         | 
| 642 | 
            +
                        yield supervisor_text, "", "", "", "๐ ๊ฐ๋
์ AI๊ฐ ๋ถ์ ์ค..."
         | 
| 643 |  | 
| 644 | 
             
                    all_responses["supervisor"].append(supervisor_initial_response)
         | 
| 645 |  | 
|  | |
| 649 |  | 
| 650 | 
             
                    # 2๋จ๊ณ: ๋ธ๋ ์ด๋ธ ๊ฒ์ ์ํ
         | 
| 651 | 
             
                    researcher_text = "[์น ๊ฒ์] ๐ ๊ฒ์ ์ค...\n"
         | 
| 652 | 
            +
                    yield supervisor_text, researcher_text, "", "", "๐ ์น ๊ฒ์ ์ํ ์ค..."
         | 
| 653 |  | 
| 654 | 
             
                    search_results = {}
         | 
| 655 | 
             
                    for keyword in keywords:
         | 
|  | |
| 657 | 
             
                        if results:
         | 
| 658 | 
             
                            search_results[keyword] = results
         | 
| 659 | 
             
                            researcher_text += f"โ '{keyword}' ๊ฒ์ ์๋ฃ\n"
         | 
| 660 | 
            +
                            yield supervisor_text, researcher_text, "", "", f"๐ '{keyword}' ๊ฒ์ ์ค..."
         | 
| 661 |  | 
| 662 | 
             
                    # 3๋จ๊ณ: ์กฐ์ฌ์ AI๊ฐ ๊ฒ์ ๊ฒฐ๊ณผ ์ ๋ฆฌ
         | 
| 663 | 
             
                    researcher_prompt = llm_system.create_researcher_prompt(user_query, supervisor_initial_response, search_results)
         | 
|  | |
| 670 | 
             
                    ):
         | 
| 671 | 
             
                        researcher_response += chunk
         | 
| 672 | 
             
                        researcher_text = f"[์กฐ์ฌ ๊ฒฐ๊ณผ ์ ๋ฆฌ] - {datetime.now().strftime('%H:%M:%S')}\n{researcher_response}"
         | 
| 673 | 
            +
                        yield supervisor_text, researcher_text, "", "", "๐ ์กฐ์ฌ์ AI๊ฐ ์ ๋ฆฌ ์ค..."
         | 
| 674 |  | 
| 675 | 
             
                    all_responses["researcher"].append(researcher_response)
         | 
| 676 |  | 
|  | |
| 686 | 
             
                        supervisor_execution_response += chunk
         | 
| 687 | 
             
                        temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์คํ ์ง์] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_execution_response}"
         | 
| 688 | 
             
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
         | 
| 689 | 
            +
                        yield supervisor_text, researcher_text, "", "", "๐ฏ ๊ฐ๋
์ AI๊ฐ ์ง์ ์ค..."
         | 
| 690 |  | 
| 691 | 
             
                    all_responses["supervisor"].append(supervisor_execution_response)
         | 
| 692 |  | 
|  | |
| 701 | 
             
                    ):
         | 
| 702 | 
             
                        executor_response += chunk
         | 
| 703 | 
             
                        executor_text = f"[์ด๊ธฐ ๊ตฌํ] - {datetime.now().strftime('%H:%M:%S')}\n{executor_response}"
         | 
| 704 | 
            +
                        yield supervisor_text, researcher_text, executor_text, "", "๐ง ์คํ์ AI๊ฐ ๊ตฌํ ์ค..."
         | 
| 705 |  | 
| 706 | 
             
                    all_responses["executor"].append(executor_response)
         | 
| 707 |  | 
|  | |
| 725 | 
             
                        review_response += chunk
         | 
| 726 | 
             
                        temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์คํ ์ง์] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฒํ  ๋ฐ ํผ๋๋ฐฑ] - {datetime.now().strftime('%H:%M:%S')}\n{review_response}"
         | 
| 727 | 
             
                        supervisor_text = f"[์ด๊ธฐ ๋ถ์] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
         | 
| 728 | 
            +
                        yield supervisor_text, researcher_text, executor_text, "", "๐ ๊ฐ๋
์ AI๊ฐ ๊ฒํ  ์ค..."
         | 
| 729 |  | 
| 730 | 
             
                    all_responses["supervisor"].append(review_response)
         | 
| 731 |  | 
|  | |
| 746 | 
             
                        final_executor_response += chunk
         | 
| 747 | 
             
                        temp_text = f"[์ด๊ธฐ ๊ตฌํ] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['executor'][0]}\n\n---\n\n[์ต์ข
 ๋ณด๊ณ ์] - {datetime.now().strftime('%H:%M:%S')}\n{final_executor_response}"
         | 
| 748 | 
             
                        executor_text = temp_text
         | 
| 749 | 
            +
                        yield supervisor_text, researcher_text, executor_text, "", "๐ ์ต์ข
 ๋ณด๊ณ ์ ์์ฑ ์ค..."
         | 
| 750 |  | 
| 751 | 
             
                    all_responses["executor"].append(final_executor_response)
         | 
| 752 |  | 
|  | |
| 784 | 
             
            ---
         | 
| 785 | 
             
            *์ด ๋ณด๊ณ ์๋ ์น ๊ฒ์์ ํตํ ์ต์  ์ ๋ณด์ AI๋ค์ ํ๋ ฅ, ๊ทธ๋ฆฌ๊ณ  ํผ๋๋ฐฑ ๋ฐ์์ ํตํด ์์ฑ๋์์ต๋๋ค.*"""
         | 
| 786 |  | 
| 787 | 
            +
                    # ๋ด๋ถ ํ์คํ ๋ฆฌ ์
๋ฐ์ดํธ (UI์๋ ํ์ํ์ง ์์)
         | 
| 788 | 
            +
                    internal_history.append((user_query, final_summary))
         | 
| 789 |  | 
| 790 | 
            +
                    yield supervisor_text, researcher_text, executor_text, final_summary, "โ
 ์ต์ข
 ๋ณด๊ณ ์ ์์ฑ!"
         | 
| 791 |  | 
| 792 | 
             
                except Exception as e:
         | 
| 793 | 
             
                    error_msg = f"โ ์ฒ๋ฆฌ ์ค ์ค๋ฅ: {str(e)}"
         | 
| 794 | 
            +
                    yield "", "", "", error_msg, error_msg
         | 
| 795 |  | 
| 796 | 
             
            def clear_all():
         | 
| 797 | 
             
                """๋ชจ๋  ๋ด์ฉ ์ด๊ธฐํ"""
         | 
| 798 | 
            +
                global internal_history
         | 
| 799 | 
            +
                internal_history = []
         | 
| 800 | 
            +
                return "", "", "", "", "๐ ์ด๊ธฐํ๋์์ต๋๋ค."
         | 
| 801 |  | 
| 802 | 
             
            # Gradio ์ธํฐํ์ด์ค
         | 
| 803 | 
             
            css = """
         | 
|  | |
| 840 | 
             
                    """
         | 
| 841 | 
             
                )
         | 
| 842 |  | 
| 843 | 
            +
                # ์
๋ ฅ ์น์
         | 
| 844 | 
             
                with gr.Row():
         | 
| 845 | 
            +
                    with gr.Column():
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 846 | 
             
                        user_input = gr.Textbox(
         | 
| 847 | 
             
                            label="์ง๋ฌธ ์
๋ ฅ",
         | 
| 848 | 
             
                            placeholder="์: ๊ธฐ๊ณํ์ต ๋ชจ๋ธ์ ์ฑ๋ฅ์ ํฅ์์ํค๋ ๋ฐฉ๋ฒ์?",
         | 
|  | |
| 859 | 
             
                            value="๋๊ธฐ ์ค...",
         | 
| 860 | 
             
                            max_lines=1
         | 
| 861 | 
             
                        )
         | 
| 862 | 
            +
                
         | 
| 863 | 
            +
                # ์ต์ข
 ๊ฒฐ๊ณผ
         | 
| 864 | 
            +
                with gr.Row():
         | 
| 865 | 
            +
                    with gr.Column():
         | 
| 866 | 
             
                        with gr.Accordion("๐ ์ต์ข
 ์ข
ํฉ ๊ฒฐ๊ณผ", open=True):
         | 
| 867 | 
             
                            final_output = gr.Markdown(
         | 
| 868 | 
             
                                value="*์ง๋ฌธ์ ์
๋ ฅํ๋ฉด ๊ฒฐ๊ณผ๊ฐ ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค.*"
         | 
| 869 | 
             
                            )
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 870 |  | 
| 871 | 
            +
                # AI ์ถ๋ ฅ๋ค - ํ ์ค์ ๋๋ํ ๋ฐฐ์น
         | 
| 872 | 
            +
                with gr.Row():
         | 
| 873 | 
            +
                    # ๊ฐ๋
์ AI ์ถ๋ ฅ
         | 
| 874 | 
            +
                    with gr.Column():
         | 
| 875 | 
            +
                        gr.Markdown("### ๐ง  ๊ฐ๋
์ AI (๊ฑฐ์์  ๋ถ์)")
         | 
| 876 | 
            +
                        supervisor_output = gr.Textbox(
         | 
| 877 | 
            +
                            label="",
         | 
| 878 | 
            +
                            lines=20,
         | 
| 879 | 
            +
                            max_lines=25,
         | 
| 880 | 
            +
                            interactive=False,
         | 
| 881 | 
            +
                            elem_classes=["supervisor-box"]
         | 
| 882 | 
            +
                        )
         | 
| 883 |  | 
| 884 | 
            +
                    # ์กฐ์ฌ์ AI ์ถ๋ ฅ
         | 
| 885 | 
            +
                    with gr.Column():
         | 
| 886 | 
            +
                        gr.Markdown("### ๐ ์กฐ์ฌ์ AI (์น ๊ฒ์ & ์ ๋ฆฌ)")
         | 
| 887 | 
            +
                        researcher_output = gr.Textbox(
         | 
| 888 | 
            +
                            label="",
         | 
| 889 | 
            +
                            lines=20,
         | 
| 890 | 
            +
                            max_lines=25,
         | 
| 891 | 
            +
                            interactive=False,
         | 
| 892 | 
            +
                            elem_classes=["researcher-box"]
         | 
| 893 | 
            +
                        )
         | 
| 894 |  | 
| 895 | 
            +
                    # ์คํ์ AI ์ถ๋ ฅ
         | 
| 896 | 
            +
                    with gr.Column():
         | 
| 897 | 
            +
                        gr.Markdown("### ๐๏ธ ์คํ์ AI (๋ฏธ์์  ๊ตฌํ)")
         | 
| 898 | 
            +
                        executor_output = gr.Textbox(
         | 
| 899 | 
            +
                            label="",
         | 
| 900 | 
            +
                            lines=20,
         | 
| 901 | 
            +
                            max_lines=25,
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 902 | 
             
                            interactive=False,
         | 
| 903 | 
            +
                            elem_classes=["executor-box"]
         | 
|  | |
| 904 | 
             
                        )
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 905 |  | 
| 906 | 
             
                # ์์ 
         | 
| 907 | 
             
                gr.Examples(
         | 
|  | |
| 919 | 
             
                # ์ด๋ฒคํธ ํธ๋ค๋ฌ
         | 
| 920 | 
             
                submit_btn.click(
         | 
| 921 | 
             
                    fn=process_query_streaming,
         | 
| 922 | 
            +
                    inputs=[user_input],
         | 
| 923 | 
            +
                    outputs=[supervisor_output, researcher_output, executor_output, final_output, status_text]
         | 
| 924 | 
             
                ).then(
         | 
| 925 | 
             
                    fn=lambda: "",
         | 
| 926 | 
             
                    outputs=[user_input]
         | 
|  | |
| 928 |  | 
| 929 | 
             
                user_input.submit(
         | 
| 930 | 
             
                    fn=process_query_streaming,
         | 
| 931 | 
            +
                    inputs=[user_input],
         | 
| 932 | 
            +
                    outputs=[supervisor_output, researcher_output, executor_output, final_output, status_text]
         | 
| 933 | 
             
                ).then(
         | 
| 934 | 
             
                    fn=lambda: "",
         | 
| 935 | 
             
                    outputs=[user_input]
         | 
|  | |
| 937 |  | 
| 938 | 
             
                clear_btn.click(
         | 
| 939 | 
             
                    fn=clear_all,
         | 
| 940 | 
            +
                    outputs=[supervisor_output, researcher_output, executor_output, final_output, status_text]
         | 
| 941 | 
             
                )
         | 
| 942 |  | 
| 943 | 
             
                gr.Markdown(
         | 
