RRemove old QwenModel
Browse files- e2bqwen.py +1 -166
e2bqwen.py
CHANGED
|
@@ -411,7 +411,7 @@ class E2BVisionAgent(CodeAgent):
|
|
| 411 |
and previous_memory_step.step_number == current_step - 1
|
| 412 |
):
|
| 413 |
if previous_memory_step.tool_calls[0].arguments == memory_step.tool_calls[0].arguments:
|
| 414 |
-
memory_step.observations += "\nWARNING: You've executed the same action several times in a row. MAKE SURE TO NOT
|
| 415 |
|
| 416 |
# Add to the current memory step
|
| 417 |
memory_step.observations_images = [image.copy()]
|
|
@@ -471,168 +471,3 @@ class QwenVLAPIModel(Model):
|
|
| 471 |
return message
|
| 472 |
except Exception as e:
|
| 473 |
raise Exception(f"Both endpoints failed. Last error: {e}")
|
| 474 |
-
|
| 475 |
-
# class QwenVLAPIModel(Model):
|
| 476 |
-
# """Model wrapper for Qwen2.5VL API with fallback mechanism"""
|
| 477 |
-
|
| 478 |
-
# def __init__(
|
| 479 |
-
# self,
|
| 480 |
-
# model_path: str = "Qwen/Qwen2.5-VL-72B-Instruct",
|
| 481 |
-
# provider: str = "hyperbolic",
|
| 482 |
-
# hf_token: str = None,
|
| 483 |
-
# hf_base_url: str = "https://n5wr7lfx6wp94tvl.us-east-1.aws.endpoints.huggingface.cloud"
|
| 484 |
-
# ):
|
| 485 |
-
# super().__init__()
|
| 486 |
-
# self.model_path = model_path
|
| 487 |
-
# self.model_id = model_path
|
| 488 |
-
# self.provider = provider
|
| 489 |
-
# self.hf_token = hf_token
|
| 490 |
-
# self.hf_base_url = hf_base_url
|
| 491 |
-
|
| 492 |
-
# # Initialize hyperbolic client
|
| 493 |
-
# self.hyperbolic_client = InferenceClient(
|
| 494 |
-
# provider=self.provider,
|
| 495 |
-
# )
|
| 496 |
-
|
| 497 |
-
# assert not self.hf_base_url.endswith("/v1/"), "Enter your base url without '/v1/' suffix."
|
| 498 |
-
|
| 499 |
-
# # Initialize HF OpenAI-compatible client if token is provided
|
| 500 |
-
# self.hf_client = None
|
| 501 |
-
# from openai import OpenAI
|
| 502 |
-
# self.hf_client = OpenAI(
|
| 503 |
-
# base_url=self.hf_base_url + "/v1/",
|
| 504 |
-
# api_key=self.hf_token
|
| 505 |
-
# )
|
| 506 |
-
|
| 507 |
-
# def __call__(
|
| 508 |
-
# self,
|
| 509 |
-
# messages: List[Dict[str, Any]],
|
| 510 |
-
# stop_sequences: Optional[List[str]] = None,
|
| 511 |
-
# **kwargs
|
| 512 |
-
# ) -> ChatMessage:
|
| 513 |
-
# """Convert a list of messages to an API request with fallback mechanism"""
|
| 514 |
-
|
| 515 |
-
# # Format messages once for both APIs
|
| 516 |
-
# formatted_messages = self._format_messages(messages)
|
| 517 |
-
|
| 518 |
-
# # First try the HF endpoint if available - THIS ALWAYS FAILS SO SKIPPING
|
| 519 |
-
# try:
|
| 520 |
-
# completion = self._call_hf_endpoint(
|
| 521 |
-
# formatted_messages,
|
| 522 |
-
# stop_sequences,
|
| 523 |
-
# **kwargs
|
| 524 |
-
# )
|
| 525 |
-
# print("SUCCESSFUL call of inference endpoint")
|
| 526 |
-
# return ChatMessage(role=MessageRole.ASSISTANT, content=completion)
|
| 527 |
-
# except Exception as e:
|
| 528 |
-
# print(f"HF endpoint failed with error: {e}. Falling back to hyperbolic.")
|
| 529 |
-
# # Continue to fallback
|
| 530 |
-
|
| 531 |
-
# # Fallback to hyperbolic
|
| 532 |
-
# try:
|
| 533 |
-
# return self._call_hyperbolic(formatted_messages, stop_sequences, **kwargs)
|
| 534 |
-
# except Exception as e:
|
| 535 |
-
# raise Exception(f"Both endpoints failed. Last error: {e}")
|
| 536 |
-
|
| 537 |
-
# def _format_messages(self, messages: List[Dict[str, Any]]):
|
| 538 |
-
# """Format messages for API requests - works for both endpoints"""
|
| 539 |
-
|
| 540 |
-
# formatted_messages = []
|
| 541 |
-
|
| 542 |
-
# for msg in messages:
|
| 543 |
-
# role = msg["role"]
|
| 544 |
-
# content = []
|
| 545 |
-
|
| 546 |
-
# if isinstance(msg["content"], list):
|
| 547 |
-
# for item in msg["content"]:
|
| 548 |
-
# if item["type"] == "text":
|
| 549 |
-
# content.append({"type": "text", "text": item["text"]})
|
| 550 |
-
# elif item["type"] == "image":
|
| 551 |
-
# # Handle image path or direct image object
|
| 552 |
-
# if isinstance(item["image"], str):
|
| 553 |
-
# # Image is a path
|
| 554 |
-
# with open(item["image"], "rb") as image_file:
|
| 555 |
-
# base64_image = base64.b64encode(image_file.read()).decode("utf-8")
|
| 556 |
-
# else:
|
| 557 |
-
# # Image is a PIL image or similar object
|
| 558 |
-
# img_byte_arr = BytesIO()
|
| 559 |
-
# base64_image = base64.b64encode(img_byte_arr.getvalue()).decode("utf-8")
|
| 560 |
-
|
| 561 |
-
# content.append({
|
| 562 |
-
# "type": "image_url",
|
| 563 |
-
# "image_url": {
|
| 564 |
-
# "url": f"data:image/png;base64,{base64_image}"
|
| 565 |
-
# }
|
| 566 |
-
# })
|
| 567 |
-
# else:
|
| 568 |
-
# # Plain text message
|
| 569 |
-
# content = [{"type": "text", "text": msg["content"]}]
|
| 570 |
-
|
| 571 |
-
# formatted_messages.append({"role": role, "content": content})
|
| 572 |
-
|
| 573 |
-
# return formatted_messages
|
| 574 |
-
|
| 575 |
-
# def _call_hf_endpoint(self, formatted_messages, stop_sequences=None, **kwargs):
|
| 576 |
-
# """Call the Hugging Face OpenAI-compatible endpoint"""
|
| 577 |
-
|
| 578 |
-
# # Extract parameters with defaults
|
| 579 |
-
# max_tokens = kwargs.get("max_new_tokens", 4096)
|
| 580 |
-
# temperature = kwargs.get("temperature", 0.7)
|
| 581 |
-
# top_p = kwargs.get("top_p", 0.9)
|
| 582 |
-
# stream = kwargs.get("stream", False)
|
| 583 |
-
|
| 584 |
-
# completion = self.hf_client.chat.completions.create(
|
| 585 |
-
# model="tgi", # Model name for the endpoint
|
| 586 |
-
# messages=formatted_messages,
|
| 587 |
-
# max_tokens=max_tokens,
|
| 588 |
-
# temperature=temperature,
|
| 589 |
-
# top_p=top_p,
|
| 590 |
-
# stream=stream,
|
| 591 |
-
# stop=stop_sequences
|
| 592 |
-
# )
|
| 593 |
-
|
| 594 |
-
# if stream:
|
| 595 |
-
# # For streaming responses, return a generator
|
| 596 |
-
# def stream_generator():
|
| 597 |
-
# for chunk in completion:
|
| 598 |
-
# yield chunk.choices[0].delta.content or ""
|
| 599 |
-
# return stream_generator()
|
| 600 |
-
# else:
|
| 601 |
-
# # For non-streaming, return the full text
|
| 602 |
-
# return completion.choices[0].message.content
|
| 603 |
-
|
| 604 |
-
# def _call_hyperbolic(self, formatted_messages, stop_sequences=None, **kwargs):
|
| 605 |
-
# """Call the hyperbolic API"""
|
| 606 |
-
|
| 607 |
-
# completion = self.hyperbolic_client.chat.completions.create(
|
| 608 |
-
# model=self.model_path,
|
| 609 |
-
# messages=formatted_messages,
|
| 610 |
-
# max_tokens=kwargs.get("max_new_tokens", 4096),
|
| 611 |
-
# temperature=kwargs.get("temperature", 0.7),
|
| 612 |
-
# top_p=kwargs.get("top_p", 0.9),
|
| 613 |
-
# stop=stop_sequences
|
| 614 |
-
# )
|
| 615 |
-
|
| 616 |
-
# # Extract the response text
|
| 617 |
-
# output_text = completion.choices[0].message.content
|
| 618 |
-
|
| 619 |
-
# return ChatMessage(role=MessageRole.ASSISTANT, content=output_text)
|
| 620 |
-
|
| 621 |
-
# def to_dict(self) -> Dict[str, Any]:
|
| 622 |
-
# """Convert the model to a dictionary"""
|
| 623 |
-
# return {
|
| 624 |
-
# "class": self.__class__.__name__,
|
| 625 |
-
# "model_path": self.model_path,
|
| 626 |
-
# "provider": self.provider,
|
| 627 |
-
# "hf_base_url": self.hf_base_url,
|
| 628 |
-
# # We don't save the API keys for security reasons
|
| 629 |
-
# }
|
| 630 |
-
|
| 631 |
-
# @classmethod
|
| 632 |
-
# def from_dict(cls, data: Dict[str, Any]) -> "QwenVLAPIModel":
|
| 633 |
-
# """Create a model from a dictionary"""
|
| 634 |
-
# return cls(
|
| 635 |
-
# model_path=data.get("model_path", "Qwen/Qwen2.5-VL-72B-Instruct"),
|
| 636 |
-
# provider=data.get("provider", "hyperbolic"),
|
| 637 |
-
# hf_base_url=data.get("hf_base_url", "https://s41ydkv0iyjeokyj.us-east-1.aws.endpoints.huggingface.cloud"),
|
| 638 |
-
# )
|
|
|
|
| 411 |
and previous_memory_step.step_number == current_step - 1
|
| 412 |
):
|
| 413 |
if previous_memory_step.tool_calls[0].arguments == memory_step.tool_calls[0].arguments:
|
| 414 |
+
memory_step.observations += "\nWARNING: You've executed the same action several times in a row. MAKE SURE TO NOT UNNECESSARILY REPEAT ACTIONS."
|
| 415 |
|
| 416 |
# Add to the current memory step
|
| 417 |
memory_step.observations_images = [image.copy()]
|
|
|
|
| 471 |
return message
|
| 472 |
except Exception as e:
|
| 473 |
raise Exception(f"Both endpoints failed. Last error: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|