Merge branch 'main' of github.com:xlang-ai/OSWorld

This commit is contained in:
Timothyxxx
2025-08-22 23:29:21 +08:00
12 changed files with 899 additions and 62 deletions

View File

@@ -199,26 +199,62 @@ class SetupController:
path: str = f["path"]
if not os.path.exists(local_path):
logger.error(f"Setup Upload - Invalid local path ({local_path}).")
return
raise Exception(f"Setup Upload - Invalid local path ({local_path}).")
form = MultipartEncoder({
"file_path": path,
"file_data": (os.path.basename(path), open(local_path, "rb"))
})
headers = {"Content-Type": form.content_type}
logger.debug(form.content_type)
# send request to server to upload file
file_size = None
try:
logger.debug("REQUEST ADDRESS: %s", self.http_server + "/setup" + "/upload")
response = requests.post(self.http_server + "/setup" + "/upload", headers=headers, data=form)
if response.status_code == 200:
logger.info("Command executed successfully: %s", response.text)
else:
logger.error("Failed to upload file. Status code: %s", response.text)
except requests.exceptions.RequestException as e:
logger.error("An error occurred while trying to send the request: %s", e)
file_size = os.path.getsize(local_path)
except Exception:
pass
max_retries = 3
last_error: Optional[Exception] = None
for attempt in range(max_retries):
try:
logger.info(
f"Uploading {os.path.basename(local_path)}{f' ({file_size} bytes)' if file_size is not None else ''} "
f"to VM at {path} (attempt {attempt + 1}/{max_retries})"
)
logger.debug("REQUEST ADDRESS: %s", self.http_server + "/setup" + "/upload")
# Open the file inside each attempt to ensure fresh stream position
with open(local_path, "rb") as fp:
form = MultipartEncoder({
"file_path": path,
"file_data": (os.path.basename(path), fp)
})
headers = {"Content-Type": form.content_type}
logger.debug(form.content_type)
# Explicit connect/read timeout to avoid hanging forever
response = requests.post(
self.http_server + "/setup" + "/upload",
headers=headers,
data=form,
timeout=(10, 600)
)
if response.status_code == 200:
logger.info(f"File uploaded successfully: {path}")
logger.debug("Upload response: %s", response.text)
last_error = None
break
else:
msg = f"Failed to upload file {path}. Status code: {response.status_code}, Response: {response.text}"
logger.error(msg)
last_error = requests.RequestException(msg)
except requests.exceptions.RequestException as e:
last_error = e
logger.error(f"Upload attempt {attempt + 1} failed for {path}: {e}")
# Exponential backoff between retries
if attempt < max_retries - 1:
time.sleep(2 ** attempt)
if last_error is not None:
raise last_error
def _change_wallpaper_setup(self, path: str):
if not path:

View File

@@ -172,54 +172,52 @@ class DesktopEnv(gym.Env):
else:
self.path_to_vm = self.manager.get_vm_path(os_type=self.os_type, region=region, screen_size=(self.screen_width, self.screen_height))
try:
self.snapshot_name = snapshot_name
self.cache_dir_base: str = cache_dir
# todo: add the logic to get the screen size from the VM
self.headless = headless
self.require_a11y_tree = require_a11y_tree
self.require_terminal = require_terminal
self.snapshot_name = snapshot_name
self.cache_dir_base: str = cache_dir
# todo: add the logic to get the screen size from the VM
self.headless = headless
self.require_a11y_tree = require_a11y_tree
self.require_terminal = require_terminal
# Initialize emulator and controller
logger.info("Initializing...")
self._start_emulator()
# Initialize emulator and controller
logger.info("Initializing...")
self._start_emulator()
# mode: human or machine
self.instruction = None
assert action_space in ["computer_13", "pyautogui", "claude_computer_use", "autoglm_computer_use"]
self.action_space = action_space # todo: refactor it to the ActType
# mode: human or machine
self.instruction = None
assert action_space in ["computer_13", "pyautogui", "claude_computer_use", "autoglm_computer_use"]
self.action_space = action_space # todo: refactor it to the ActType
# episodic stuffs, like counters, will be updated or reset
# when calling self.reset()
self._traj_no: int = -1
self._step_no: int = 0
self.action_history: List[Dict[str, any]] = []
# episodic stuffs, like counters, will be updated or reset
# when calling self.reset()
self._traj_no: int = -1
self._step_no: int = 0
self.action_history: List[Dict[str, any]] = []
except Exception as e:
logger.error(f"Failed to initialize DesktopEnv: {e}")
# If initialization fails, we should clean up the VM
try:
self.close()
self.manager.delete_vm(self.path_to_vm, self.region)
logger.info(f"Cleaned up VM {self.path_to_vm}.")
except Exception as cleanup_error:
logger.error(f"Failed to clean up VM {self.path_to_vm}: {cleanup_error}")
raise
def _start_emulator(self):
# Power on the virtual machine
self.provider.start_emulator(self.path_to_vm, self.headless, self.os_type)
try:
# Power on the virtual machine
self.provider.start_emulator(self.path_to_vm, self.headless, self.os_type)
# Get the ip from the virtual machine, and setup the controller
vm_ip_ports = self.provider.get_ip_address(self.path_to_vm).split(':')
self.vm_ip = vm_ip_ports[0]
# Get the ports from the virtual machine (for Docker provider only)
if len(vm_ip_ports) > 1:
self.server_port = int(vm_ip_ports[1])
self.chromium_port = int(vm_ip_ports[2])
self.vnc_port = int(vm_ip_ports[3])
self.vlc_port = int(vm_ip_ports[4])
self.controller = PythonController(vm_ip=self.vm_ip, server_port=self.server_port)
self.setup_controller = SetupController(vm_ip=self.vm_ip, server_port=self.server_port, chromium_port=self.chromium_port, vlc_port=self.vlc_port, cache_dir=self.cache_dir_base, client_password=self.client_password, screen_width=self.screen_width, screen_height=self.screen_height)
# Get the ip from the virtual machine, and setup the controller
vm_ip_ports = self.provider.get_ip_address(self.path_to_vm).split(':')
self.vm_ip = vm_ip_ports[0]
# Get the ports from the virtual machine (for Docker provider only)
if len(vm_ip_ports) > 1:
self.server_port = int(vm_ip_ports[1])
self.chromium_port = int(vm_ip_ports[2])
self.vnc_port = int(vm_ip_ports[3])
self.vlc_port = int(vm_ip_ports[4])
self.controller = PythonController(vm_ip=self.vm_ip, server_port=self.server_port)
self.setup_controller = SetupController(vm_ip=self.vm_ip, server_port=self.server_port, chromium_port=self.chromium_port, vlc_port=self.vlc_port, cache_dir=self.cache_dir_base, client_password=self.client_password, screen_width=self.screen_width, screen_height=self.screen_height)
except Exception as e:
try:
self.provider.stop_emulator(self.path_to_vm)
except Exception as stop_err:
logger.warning(f"Cleanup after interrupt failed: {stop_err}")
raise
def _revert_to_snapshot(self):
# Revert to certain snapshot of the virtual machine, and refresh the path to vm and ip of vm

View File

@@ -0,0 +1,10 @@
# Copyright (c) 2023 - 2025, AG2ai, Inc., AG2ai open-source projects maintainers and core contributors
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
from .abstract_cache_base import AbstractCache
from .cache import Cache
__all__ = ["AbstractCache", "Cache"]

View File

@@ -0,0 +1,75 @@
# Copyright (c) 2023 - 2025, AG2ai, Inc., AG2ai open-source projects maintainers and core contributors
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
import sys
from types import TracebackType
from typing import Any, Optional, Protocol
from ..doc_utils import export_module
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
@export_module("autogen.cache")
class AbstractCache(Protocol):
"""This protocol defines the basic interface for cache operations.
Implementing classes should provide concrete implementations for
these methods to handle caching mechanisms.
"""
def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
"""Retrieve an item from the cache.
Args:
key (str): The key identifying the item in the cache.
default (optional): The default value to return if the key is not found.
Defaults to None.
Returns:
The value associated with the key if found, else the default value.
"""
...
def set(self, key: str, value: Any) -> None:
"""Set an item in the cache.
Args:
key (str): The key under which the item is to be stored.
value: The value to be stored in the cache.
"""
...
def close(self) -> None:
"""Close the cache. Perform any necessary cleanup, such as closing network connections or
releasing resources.
"""
...
def __enter__(self) -> Self:
"""Enter the runtime context related to this object.
The with statement will bind this method's return value to the target(s)
specified in the as clause of the statement, if any.
"""
...
def __exit__(
self,
exc_type: Optional[type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
"""Exit the runtime context and close the cache.
Args:
exc_type: The exception type if an exception was raised in the context.
exc_value: The exception value if an exception was raised in the context.
traceback: The traceback if an exception was raised in the context.
"""
...

203
mm_agents/coact/autogen/cache/cache.py vendored Normal file
View File

@@ -0,0 +1,203 @@
# Copyright (c) 2023 - 2025, AG2ai, Inc., AG2ai open-source projects maintainers and core contributors
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
from __future__ import annotations
from contextvars import ContextVar
from types import TracebackType
from typing import Any, Optional, Union
from ..doc_utils import export_module
from .abstract_cache_base import AbstractCache
from .cache_factory import CacheFactory
@export_module("autogen")
class Cache(AbstractCache):
"""A wrapper class for managing cache configuration and instances.
This class provides a unified interface for creating and interacting with
different types of cache (e.g., Redis, Disk). It abstracts the underlying
cache implementation details, providing methods for cache operations.
Attributes:
config (Dict[str, Any]): A dictionary containing cache configuration.
cache: The cache instance created based on the provided configuration.
"""
_current_cache: ContextVar[Cache] = ContextVar("current_cache", default=None)
ALLOWED_CONFIG_KEYS = [
"cache_seed",
"redis_url",
"cache_path_root",
"cosmos_db_config",
]
@staticmethod
def redis(cache_seed: Union[str, int] = 42, redis_url: str = "redis://localhost:6379/0") -> Cache:
"""Create a Redis cache instance.
Args:
cache_seed (Union[str, int], optional): A seed for the cache. Defaults to 42.
redis_url (str, optional): The URL for the Redis server. Defaults to "redis://localhost:6379/0".
Returns:
Cache: A Cache instance configured for Redis.
"""
return Cache({"cache_seed": cache_seed, "redis_url": redis_url})
@staticmethod
def disk(cache_seed: Union[str, int] = 42, cache_path_root: str = ".cache") -> Cache:
"""Create a Disk cache instance.
Args:
cache_seed (Union[str, int], optional): A seed for the cache. Defaults to 42.
cache_path_root (str, optional): The root path for the disk cache. Defaults to ".cache".
Returns:
Cache: A Cache instance configured for Disk caching.
"""
return Cache({"cache_seed": cache_seed, "cache_path_root": cache_path_root})
@staticmethod
def cosmos_db(
connection_string: Optional[str] = None,
container_id: Optional[str] = None,
cache_seed: Union[str, int] = 42,
client: Optional[Any] = None,
) -> Cache:
"""Create a Cosmos DB cache instance with 'autogen_cache' as database ID.
Args:
connection_string (str, optional): Connection string to the Cosmos DB account.
container_id (str, optional): The container ID for the Cosmos DB account.
cache_seed (Union[str, int], optional): A seed for the cache.
client: Optional[CosmosClient]: Pass an existing Cosmos DB client.
Returns:
Cache: A Cache instance configured for Cosmos DB.
"""
cosmos_db_config = {
"connection_string": connection_string,
"database_id": "autogen_cache",
"container_id": container_id,
"client": client,
}
return Cache({"cache_seed": str(cache_seed), "cosmos_db_config": cosmos_db_config})
def __init__(self, config: dict[str, Any]):
"""Initialize the Cache with the given configuration.
Validates the configuration keys and creates the cache instance.
Args:
config (Dict[str, Any]): A dictionary containing the cache configuration.
Raises:
ValueError: If an invalid configuration key is provided.
"""
self.config = config
# Ensure that the seed is always treated as a string before being passed to any cache factory or stored.
self.config["cache_seed"] = str(self.config.get("cache_seed", 42))
# validate config
for key in self.config:
if key not in self.ALLOWED_CONFIG_KEYS:
raise ValueError(f"Invalid config key: {key}")
# create cache instance
self.cache = CacheFactory.cache_factory(
seed=self.config["cache_seed"],
redis_url=self.config.get("redis_url"),
cache_path_root=self.config.get("cache_path_root"),
cosmosdb_config=self.config.get("cosmos_db_config"),
)
def __enter__(self) -> Cache:
"""Enter the runtime context related to the cache object.
Returns:
The cache instance for use within a context block.
"""
# Store the previous cache so we can restore it
self._previous_cache = self.__class__._current_cache.get(None)
# Set the current cache to this instance
self._token = self.__class__._current_cache.set(self)
# Call the underlying cache's __enter__ method
return self.cache.__enter__()
def __exit__(
self,
exc_type: Optional[type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
"""Exit the runtime context related to the cache object.
Cleans up the cache instance and handles any exceptions that occurred
within the context.
Args:
exc_type: The exception type if an exception was raised in the context.
exc_value: The exception value if an exception was raised in the context.
traceback: The traceback if an exception was raised in the context.
"""
# First exit the underlying cache context
result = self.cache.__exit__(exc_type, exc_value, traceback)
try:
# Then reset the context variable to previous value
self.__class__._current_cache.reset(self._token)
except RuntimeError:
# Token might have been reset by a nested context manager
# In this case, we just set it back to the previous value
if self._previous_cache is not None:
self.__class__._current_cache.set(self._previous_cache)
return result
def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
"""Retrieve an item from the cache.
Args:
key (str): The key identifying the item in the cache.
default (optional): The default value to return if the key is not found.
Defaults to None.
Returns:
The value associated with the key if found, else the default value.
"""
return self.cache.get(key, default)
def set(self, key: str, value: Any) -> None:
"""Set an item in the cache.
Args:
key (str): The key under which the item is to be stored.
value: The value to be stored in the cache.
"""
self.cache.set(key, value)
def close(self) -> None:
"""Close the cache.
Perform any necessary cleanup, such as closing connections or releasing resources.
"""
self.cache.close()
@classmethod
def get_current_cache(cls, cache: "Optional[Cache]" = None) -> "Optional[Cache]":
"""Get the current cache instance.
Returns:
Cache: The current cache instance.
"""
if cache is not None:
return cache
try:
return cls._current_cache.get()
except LookupError:
return None

View File

@@ -0,0 +1,88 @@
# Copyright (c) 2023 - 2025, AG2ai, Inc., AG2ai open-source projects maintainers and core contributors
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
import logging
import os
from typing import Any, Optional, Union
from ..import_utils import optional_import_block
from .abstract_cache_base import AbstractCache
from .disk_cache import DiskCache
class CacheFactory:
@staticmethod
def cache_factory(
seed: Union[str, int],
redis_url: Optional[str] = None,
cache_path_root: str = ".cache",
cosmosdb_config: Optional[dict[str, Any]] = None,
) -> AbstractCache:
"""Factory function for creating cache instances.
This function decides whether to create a RedisCache, DiskCache, or CosmosDBCache instance
based on the provided parameters. If RedisCache is available and a redis_url is provided,
a RedisCache instance is created. If connection_string, database_id, and container_id
are provided, a CosmosDBCache is created. Otherwise, a DiskCache instance is used.
Args:
seed (Union[str, int]): Used as a seed or namespace for the cache.
redis_url (Optional[str]): URL for the Redis server.
cache_path_root (str): Root path for the disk cache.
cosmosdb_config (Optional[Dict[str, str]]): Dictionary containing 'connection_string',
'database_id', and 'container_id' for Cosmos DB cache.
Returns:
An instance of RedisCache, DiskCache, or CosmosDBCache.
Examples:
Creating a Redis cache
```python
redis_cache = cache_factory("myseed", "redis://localhost:6379/0")
```
Creating a Disk cache
```python
disk_cache = cache_factory("myseed", None)
```
Creating a Cosmos DB cache:
```python
cosmos_cache = cache_factory(
"myseed",
cosmosdb_config={
"connection_string": "your_connection_string",
"database_id": "your_database_id",
"container_id": "your_container_id",
},
)
```
"""
if redis_url:
with optional_import_block() as result:
from .redis_cache import RedisCache
if result.is_successful:
return RedisCache(seed, redis_url)
else:
logging.warning(
"RedisCache is not available. Checking other cache options. The last fallback is DiskCache."
)
if cosmosdb_config:
with optional_import_block() as result:
from .cosmos_db_cache import CosmosDBCache
if result.is_successful:
return CosmosDBCache.create_cache(seed, cosmosdb_config)
else:
logging.warning("CosmosDBCache is not available. Fallback to DiskCache.")
# Default to DiskCache if neither Redis nor Cosmos DB configurations are provided
path = os.path.join(cache_path_root, str(seed))
return DiskCache(os.path.join(".", path))

View File

@@ -0,0 +1,144 @@
# Copyright (c) 2023 - 2025, AG2ai, Inc., AG2ai open-source projects maintainers and core contributors
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
# Install Azure Cosmos DB SDK if not already
import pickle
from typing import Any, Optional, TypedDict, Union
from ..import_utils import optional_import_block, require_optional_import
from .abstract_cache_base import AbstractCache
with optional_import_block():
from azure.cosmos import CosmosClient, PartitionKey
from azure.cosmos.exceptions import CosmosResourceNotFoundError
@require_optional_import("azure", "cosmosdb")
class CosmosDBConfig(TypedDict, total=False):
connection_string: str
database_id: str
container_id: str
cache_seed: Optional[Union[str, int]]
client: Optional["CosmosClient"]
@require_optional_import("azure", "cosmosdb")
class CosmosDBCache(AbstractCache):
"""Synchronous implementation of AbstractCache using Azure Cosmos DB NoSQL API.
This class provides a concrete implementation of the AbstractCache
interface using Azure Cosmos DB for caching data, with synchronous operations.
Attributes:
seed (Union[str, int]): A seed or namespace used as a partition key.
client (CosmosClient): The Cosmos DB client used for caching.
container: The container instance used for caching.
"""
def __init__(self, seed: Union[str, int], cosmosdb_config: CosmosDBConfig):
"""Initialize the CosmosDBCache instance.
Args:
seed: A seed or namespace for the cache, used as a partition key.
cosmosdb_config: The configuration for the Cosmos DB cache.
"""
self.seed = str(seed)
self.client = cosmosdb_config.get("client") or CosmosClient.from_connection_string(
cosmosdb_config["connection_string"]
)
database_id = cosmosdb_config.get("database_id", "autogen_cache")
self.database = self.client.get_database_client(database_id)
container_id = cosmosdb_config.get("container_id")
self.container = self.database.create_container_if_not_exists(
id=container_id, partition_key=PartitionKey(path="/partitionKey")
)
@classmethod
def create_cache(cls, seed: Union[str, int], cosmosdb_config: CosmosDBConfig):
"""Factory method to create a CosmosDBCache instance based on the provided configuration.
This method decides whether to use an existing CosmosClient or create a new one.
"""
if "client" in cosmosdb_config and isinstance(cosmosdb_config["client"], CosmosClient):
return cls.from_existing_client(seed, **cosmosdb_config)
else:
return cls.from_config(seed, cosmosdb_config)
@classmethod
def from_config(cls, seed: Union[str, int], cosmosdb_config: CosmosDBConfig):
return cls(str(seed), cosmosdb_config)
@classmethod
def from_connection_string(cls, seed: Union[str, int], connection_string: str, database_id: str, container_id: str):
config = {"connection_string": connection_string, "database_id": database_id, "container_id": container_id}
return cls(str(seed), config)
@classmethod
def from_existing_client(cls, seed: Union[str, int], client: "CosmosClient", database_id: str, container_id: str):
config = {"client": client, "database_id": database_id, "container_id": container_id}
return cls(str(seed), config)
def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
"""Retrieve an item from the Cosmos DB cache.
Args:
key (str): The key identifying the item in the cache.
default (optional): The default value to return if the key is not found.
Returns:
The deserialized value associated with the key if found, else the default value.
"""
try:
response = self.container.read_item(item=key, partition_key=str(self.seed))
return pickle.loads(response["data"])
except CosmosResourceNotFoundError:
return default
except Exception as e:
# Log the exception or rethrow after logging if needed
# Consider logging or handling the error appropriately here
raise e
def set(self, key: str, value: Any) -> None:
"""Set an item in the Cosmos DB cache.
Args:
key (str): The key under which the item is to be stored.
value: The value to be stored in the cache.
Notes:
The value is serialized using pickle before being stored.
"""
try:
serialized_value = pickle.dumps(value)
item = {"id": key, "partitionKey": str(self.seed), "data": serialized_value}
self.container.upsert_item(item)
except Exception as e:
# Log or handle exception
raise e
def close(self) -> None:
"""Close the Cosmos DB client.
Perform any necessary cleanup, such as closing network connections.
"""
# CosmosClient doesn"t require explicit close in the current SDK
# If you created the client inside this class, you should close it if necessary
pass
def __enter__(self):
"""Context management entry.
Returns:
self: The instance itself.
"""
return self
def __exit__(self, exc_type: Optional[type], exc_value: Optional[Exception], traceback: Optional[Any]) -> None:
"""Context management exit.
Perform cleanup actions such as closing the Cosmos DB client.
"""
self.close()

View File

@@ -0,0 +1,102 @@
# Copyright (c) 2023 - 2025, AG2ai, Inc., AG2ai open-source projects maintainers and core contributors
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
import sys
from types import TracebackType
from typing import Any, Optional, Union
import diskcache
from .abstract_cache_base import AbstractCache
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
class DiskCache(AbstractCache):
"""Implementation of AbstractCache using the DiskCache library.
This class provides a concrete implementation of the AbstractCache
interface using the diskcache library for caching data on disk.
Attributes:
cache (diskcache.Cache): The DiskCache instance used for caching.
Methods:
__init__(self, seed): Initializes the DiskCache with the given seed.
get(self, key, default=None): Retrieves an item from the cache.
set(self, key, value): Sets an item in the cache.
close(self): Closes the cache.
__enter__(self): Context management entry.
__exit__(self, exc_type, exc_value, traceback): Context management exit.
"""
def __init__(self, seed: Union[str, int]):
"""Initialize the DiskCache instance.
Args:
seed (Union[str, int]): A seed or namespace for the cache. This is used to create
a unique storage location for the cache data.
"""
self.cache = diskcache.Cache(seed)
def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
"""Retrieve an item from the cache.
Args:
key (str): The key identifying the item in the cache.
default (optional): The default value to return if the key is not found.
Defaults to None.
Returns:
The value associated with the key if found, else the default value.
"""
return self.cache.get(key, default)
def set(self, key: str, value: Any) -> None:
"""Set an item in the cache.
Args:
key (str): The key under which the item is to be stored.
value: The value to be stored in the cache.
"""
self.cache.set(key, value)
def close(self) -> None:
"""Close the cache.
Perform any necessary cleanup, such as closing file handles or
releasing resources.
"""
self.cache.close()
def __enter__(self) -> Self:
"""Enter the runtime context related to the object.
Returns:
self: The instance itself.
"""
return self
def __exit__(
self,
exc_type: Optional[type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
"""Exit the runtime context related to the object.
Perform cleanup actions such as closing the cache.
Args:
exc_type: The exception type if an exception was raised in the context.
exc_value: The exception value if an exception was raised in the context.
traceback: The traceback if an exception was raised in the context.
"""
self.close()

View File

@@ -0,0 +1,58 @@
# Copyright (c) 2023 - 2025, AG2ai, Inc., AG2ai open-source projects maintainers and core contributors
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
import sys
from types import TracebackType
from typing import Any, Optional, Union
from .abstract_cache_base import AbstractCache
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
class InMemoryCache(AbstractCache):
def __init__(self, seed: Union[str, int] = ""):
self._seed = str(seed)
self._cache: dict[str, Any] = {}
def _prefixed_key(self, key: str) -> str:
separator = "_" if self._seed else ""
return f"{self._seed}{separator}{key}"
def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
result = self._cache.get(self._prefixed_key(key))
if result is None:
return default
return result
def set(self, key: str, value: Any) -> None:
self._cache[self._prefixed_key(key)] = value
def close(self) -> None:
pass
def __enter__(self) -> Self:
"""Enter the runtime context related to the object.
Returns:
self: The instance itself.
"""
return self
def __exit__(
self, exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
) -> None:
"""Exit the runtime context related to the object.
Args:
exc_type: The exception type if an exception was raised in the context.
exc_val: The exception value if an exception was raised in the context.
exc_tb: The traceback if an exception was raised in the context.
"""
self.close()

View File

@@ -0,0 +1,123 @@
# Copyright (c) 2023 - 2025, AG2ai, Inc., AG2ai open-source projects maintainers and core contributors
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
import pickle
import sys
from types import TracebackType
from typing import Any, Optional, Union
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
from ..import_utils import optional_import_block, require_optional_import
from .abstract_cache_base import AbstractCache
with optional_import_block():
import redis
@require_optional_import("redis", "redis")
class RedisCache(AbstractCache):
"""Implementation of AbstractCache using the Redis database.
This class provides a concrete implementation of the AbstractCache
interface using the Redis database for caching data.
Attributes:
seed (Union[str, int]): A seed or namespace used as a prefix for cache keys.
cache (redis.Redis): The Redis client used for caching.
Methods:
__init__(self, seed, redis_url): Initializes the RedisCache with the given seed and Redis URL.
_prefixed_key(self, key): Internal method to get a namespaced cache key.
get(self, key, default=None): Retrieves an item from the cache.
set(self, key, value): Sets an item in the cache.
close(self): Closes the Redis client.
__enter__(self): Context management entry.
__exit__(self, exc_type, exc_value, traceback): Context management exit.
"""
def __init__(self, seed: Union[str, int], redis_url: str):
"""Initialize the RedisCache instance.
Args:
seed (Union[str, int]): A seed or namespace for the cache. This is used as a prefix for all cache keys.
redis_url (str): The URL for the Redis server.
"""
self.seed = seed
self.cache = redis.Redis.from_url(redis_url)
def _prefixed_key(self, key: str) -> str:
"""Get a namespaced key for the cache.
Args:
key (str): The original key.
Returns:
str: The namespaced key.
"""
return f"autogen:{self.seed}:{key}"
def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
"""Retrieve an item from the Redis cache.
Args:
key (str): The key identifying the item in the cache.
default (optional): The default value to return if the key is not found.
Defaults to None.
Returns:
The deserialized value associated with the key if found, else the default value.
"""
result = self.cache.get(self._prefixed_key(key))
if result is None:
return default
return pickle.loads(result)
def set(self, key: str, value: Any) -> None:
"""Set an item in the Redis cache.
Args:
key (str): The key under which the item is to be stored.
value: The value to be stored in the cache.
Notes:
The value is serialized using pickle before being stored in Redis.
"""
serialized_value = pickle.dumps(value)
self.cache.set(self._prefixed_key(key), serialized_value)
def close(self) -> None:
"""Close the Redis client.
Perform any necessary cleanup, such as closing network connections.
"""
self.cache.close()
def __enter__(self) -> Self:
"""Enter the runtime context related to the object.
Returns:
self: The instance itself.
"""
return self
def __exit__(
self, exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
) -> None:
"""Exit the runtime context related to the object.
Perform cleanup actions such as closing the Redis client.
Args:
exc_type: The exception type if an exception was raised in the context.
exc_val: The exception value if an exception was raised in the context.
exc_tb: The traceback if an exception was raised in the context.
"""
self.close()

View File

@@ -2,7 +2,7 @@ numpy~=1.24.4
Pillow~=11.0.0
fabric
gymnasium~=0.28.1
requests~=2.31.0
requests
pytz~=2024.1
transformers~=4.35.2
torch~=2.5.0

View File

@@ -42,7 +42,7 @@ setup(
"Pillow~=11.0.0",
"fabric",
"gymnasium~=0.28.1",
"requests~=2.31.0",
"requests",
"pytz~=2024.1",
"transformers~=4.35.2",
"torch~=2.5.0",