from huggingface_hub import InferenceClient client = InferenceClient( provider="fireworks-ai", api_key="fw_3ZdA78uZG5J5SDvQCVT9qmZA", ) stream = client.chat.completions.create( model="deepseek-ai/DeepSeek-R1", messages=[ { "role": "user", "content": "What is the capital of France?" } ], stream=True, ) for chunk in stream: print(chunk.choices[0].delta.content, end="")