Spaces:
				
			
			
	
			
			
		Sleeping
		
	
	
	
			
			
	
	
	
	
		
		
		Sleeping
		
	Create app.py
Browse files
    	
        app.py
    ADDED
    
    | @@ -0,0 +1,51 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            from huggingface_hub import InferenceClient
         | 
| 2 | 
            +
            import gradio as gr
         | 
| 3 | 
            +
             | 
| 4 | 
            +
            client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            def format_prompt(message, history):
         | 
| 7 | 
            +
              prompt = "<s>"
         | 
| 8 | 
            +
              for user_prompt, bot_response in history:
         | 
| 9 | 
            +
                prompt += f"[INST] {user_prompt} [/INST]"
         | 
| 10 | 
            +
                prompt += f" {bot_response}</s> "
         | 
| 11 | 
            +
              prompt += f"[INST] {message} [/INST]"
         | 
| 12 | 
            +
              return prompt
         | 
| 13 | 
            +
             | 
| 14 | 
            +
            def generate(
         | 
| 15 | 
            +
                prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
         | 
| 16 | 
            +
            ):
         | 
| 17 | 
            +
                temperature = float(temperature)
         | 
| 18 | 
            +
                if temperature < 1e-2:
         | 
| 19 | 
            +
                    temperature = 1e-2
         | 
| 20 | 
            +
                top_p = float(top_p)
         | 
| 21 | 
            +
             | 
| 22 | 
            +
                generate_kwargs = dict(
         | 
| 23 | 
            +
                    temperature=temperature,
         | 
| 24 | 
            +
                    max_new_tokens=max_new_tokens,
         | 
| 25 | 
            +
                    top_p=top_p,
         | 
| 26 | 
            +
                    repetition_penalty=repetition_penalty,
         | 
| 27 | 
            +
                    do_sample=True,
         | 
| 28 | 
            +
                    seed=42,
         | 
| 29 | 
            +
                )
         | 
| 30 | 
            +
             | 
| 31 | 
            +
                system_prompt = "You are Mixtral, a gentle and smart AI assistant who is always ready to help and answer any questions truthfully"
         | 
| 32 | 
            +
             | 
| 33 | 
            +
                formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
         | 
| 34 | 
            +
                stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
         | 
| 35 | 
            +
                output = ""
         | 
| 36 | 
            +
             | 
| 37 | 
            +
                for response in stream:
         | 
| 38 | 
            +
                    output += response.token.text
         | 
| 39 | 
            +
                    yield output
         | 
| 40 | 
            +
                return output
         | 
| 41 | 
            +
             | 
| 42 | 
            +
            mychatbot = gr.Chatbot(
         | 
| 43 | 
            +
                avatar_images=["./user.png", "./bot.png"], bubble_full_width=False, show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel")
         | 
| 44 | 
            +
             | 
| 45 | 
            +
            gr.ChatInterface(
         | 
| 46 | 
            +
                fn=generate,
         | 
| 47 | 
            +
                chatbot=mychatbot,
         | 
| 48 | 
            +
                title="Tomoniai's Mixtral Chat",
         | 
| 49 | 
            +
                retry_btn=None,
         | 
| 50 | 
            +
                undo_btn=None
         | 
| 51 | 
            +
            ).launch(show_api=False)
         |