Add AutoGLM-OS agent (#309)
* autoglm-os initialize * clean code * chore: use proxy for download setup * feat(autoglm-os): add parameter to toggle images * fix: use temporary directory for files pulled from the vm to prevent potential collision when running multiple instances of the same task in parallel * update * add client_password * update multienv * fix * fix prompt * fix prompt * fix prompt * fix sys prompt * feat: use proxy in file evaluator * fix client_password * fix note_prompt * fix autoglm agent cmd type * fix * revert: fix: use temporary directory for files pulled from the vm to prevent potential collision when running multiple instances of the same task in parallel reverts commit bab5473eea1de0e61b0e1d68b23ce324a5b0ee57 * feat(autoglm): setup tools * fix(autoglm): remove second time of get a11y tree * add osworld server restart * Revert "add osworld server restart" This reverts commit 7bd9d84122e246ce2a26de0e49c25494244c2b3d. * fix _launch_setup * fix autoglm agent tools & xml tree * fix desktop_env * fix bug for tool name capitalization * fix: always use proxy for setup download * add fail after exceeding max turns * fix(autoglm): avoid adding image to message when screenshot is empty * fix maximize_window * fix maximize_window * fix maximize_window * fix import browsertools module bug * fix task proxy config bug * restore setup * refactor desktop env * restore image in provider * restore file.py * refactor desktop_env * quick fix * refactor desktop_env.step * fix our env reset * add max truns constraint * clean run script * clean lib_run_single.py --------- Co-authored-by: hanyullai <hanyullai@outlook.com> Co-authored-by: JingBh <jingbohao@yeah.net>
This commit is contained in:
committed by
GitHub
parent
c833d03a4b
commit
aa05f6cc26
@@ -779,27 +779,27 @@ class SetupController:
|
||||
cache_path = os.path.join(self.cache_dir, "history_new.sqlite")
|
||||
db_url = "https://drive.usercontent.google.com/u/0/uc?id=1Lv74QkJYDWVX0RIgg0Co-DUcoYpVL0oX&export=download" # google drive
|
||||
if not os.path.exists(cache_path):
|
||||
max_retries = 3
|
||||
downloaded = False
|
||||
e = None
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
response = requests.get(db_url, stream=True)
|
||||
response.raise_for_status()
|
||||
max_retries = 3
|
||||
downloaded = False
|
||||
e = None
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
response = requests.get(db_url, stream=True)
|
||||
response.raise_for_status()
|
||||
|
||||
with open(cache_path, 'wb') as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
logger.info("File downloaded successfully")
|
||||
downloaded = True
|
||||
break
|
||||
with open(cache_path, 'wb') as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
logger.info("File downloaded successfully")
|
||||
downloaded = True
|
||||
break
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(
|
||||
f"Failed to download {db_url} caused by {e}. Retrying... ({max_retries - i - 1} attempts left)")
|
||||
if not downloaded:
|
||||
raise requests.RequestException(f"Failed to download {db_url}. No retries left. Error: {e}")
|
||||
except requests.RequestException as e:
|
||||
logger.error(
|
||||
f"Failed to download {db_url} caused by {e}. Retrying... ({max_retries - i - 1} attempts left)")
|
||||
if not downloaded:
|
||||
raise requests.RequestException(f"Failed to download {db_url}. No retries left. Error: {e}")
|
||||
else:
|
||||
logger.info("File already exists in cache directory")
|
||||
# copy a new history file in the tmp folder
|
||||
|
||||
Reference in New Issue
Block a user