Spaces:
				
			
			
	
			
			
					
		Running
		
	
	
	
			
			
	
	
	
	
		
		
					
		Running
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -28,9 +28,9 @@ Always run the code at each step and repeat the steps if necessary until you rea | |
| 28 | 
             
            NEVER ASSUME, ALWAYS VERIFY!"""
         | 
| 29 |  | 
| 30 |  | 
| 31 | 
            -
            def execute_jupyter_agent(sytem_prompt, user_input, max_new_tokens, message_history):
         | 
| 32 | 
             
                client = InferenceClient(api_key=HF_TOKEN)
         | 
| 33 | 
            -
                model = "meta-llama/Llama-3.1-8B-Instruct"
         | 
| 34 |  | 
| 35 | 
             
                sbx = Sandbox(api_key=E2B_API_KEY)
         | 
| 36 |  | 
| @@ -86,10 +86,11 @@ with gr.Blocks(css=css) as demo: | |
| 86 | 
             
                        step=8,
         | 
| 87 | 
             
                        interactive=True
         | 
| 88 | 
             
                    )
         | 
|  | |
| 89 |  | 
| 90 | 
             
                generate_btn.click(
         | 
| 91 | 
             
                    fn=execute_jupyter_agent,
         | 
| 92 | 
            -
                    inputs=[system_input, user_input, max_tokens, gr.State(value=[])],
         | 
| 93 | 
             
                    outputs=[html_output,  gr.State()]
         | 
| 94 | 
             
                )
         | 
| 95 |  | 
|  | |
| 28 | 
             
            NEVER ASSUME, ALWAYS VERIFY!"""
         | 
| 29 |  | 
| 30 |  | 
| 31 | 
            +
            def execute_jupyter_agent(sytem_prompt, user_input, max_new_tokens, model, message_history):
         | 
| 32 | 
             
                client = InferenceClient(api_key=HF_TOKEN)
         | 
| 33 | 
            +
                #model = "meta-llama/Llama-3.1-8B-Instruct"
         | 
| 34 |  | 
| 35 | 
             
                sbx = Sandbox(api_key=E2B_API_KEY)
         | 
| 36 |  | 
|  | |
| 86 | 
             
                        step=8,
         | 
| 87 | 
             
                        interactive=True
         | 
| 88 | 
             
                    )
         | 
| 89 | 
            +
                    model = gr.Dropdown(choices=["meta-llama/Llama-3.1-8B-Instruct", "meta-llama/Llama-3.1-70B-Instruct"])
         | 
| 90 |  | 
| 91 | 
             
                generate_btn.click(
         | 
| 92 | 
             
                    fn=execute_jupyter_agent,
         | 
| 93 | 
            +
                    inputs=[system_input, user_input, max_tokens, model, gr.State(value=[])],
         | 
| 94 | 
             
                    outputs=[html_output,  gr.State()]
         | 
| 95 | 
             
                )
         | 
| 96 |  | 
