Check and fix on Chrome tasks
- Added `pytz` dependency to `requirements.txt` for timezone handling. - Introduced `get_macys_product_url_parse` function to replace the old `get_url_path_parse` for better clarity and maintain backward compatibility. - Enhanced logging throughout the `get_active_tab_html_parse` and `get_rule_relativeTime` functions for improved debugging and traceability. - Updated JSON examples to reflect changes in expected keys and added new fields for better evaluation context. - Removed deprecated execution commands from JSON examples to streamline the evaluation process.
This commit is contained in:
@@ -26,7 +26,8 @@ from .chrome import (
|
||||
get_active_url_from_accessTree,
|
||||
get_find_installed_extension_name,
|
||||
get_info_from_website,
|
||||
get_url_path_parse
|
||||
get_macys_product_url_parse,
|
||||
get_url_path_parse # Alias for backward compatibility
|
||||
)
|
||||
from .file import get_cloud_file, get_vm_file, get_cache_file, get_content_from_vm_file
|
||||
from .general import get_vm_command_line, get_vm_terminal_output, get_vm_command_error
|
||||
|
||||
@@ -1153,14 +1153,19 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
}
|
||||
"""
|
||||
active_tab_url = get_active_url_from_accessTree(env, config)
|
||||
logger.info(f"[DEBUG] get_active_url_from_accessTree returned: {active_tab_url} (type: {type(active_tab_url)})")
|
||||
if not isinstance(active_tab_url, str):
|
||||
logger.error("active_tab_url is not a string")
|
||||
logger.error(f"[DEBUG] active_tab_url is not a string, got {type(active_tab_url)}: {active_tab_url}")
|
||||
return None
|
||||
host = env.vm_ip
|
||||
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
||||
server_port = env.server_port
|
||||
|
||||
remote_debugging_url = f"http://{host}:{port}"
|
||||
|
||||
# DEBUG: Add logging for configuration
|
||||
logger.info(f"[DEBUG] get_active_tab_html_parse called with config: {config}")
|
||||
|
||||
with sync_playwright() as p:
|
||||
# connect to remote Chrome instance
|
||||
try:
|
||||
@@ -1189,13 +1194,21 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
for page in context.pages:
|
||||
page.wait_for_load_state("networkidle")
|
||||
# the accTree and playwright can get encoding(percent-encoding) characters, we need to convert them to normal characters
|
||||
if unquote(page.url) == unquote(active_tab_url):
|
||||
# Normalize URLs by removing trailing slashes and decoding percent-encoding
|
||||
def normalize_url(url):
|
||||
return unquote(url).rstrip('/')
|
||||
|
||||
if normalize_url(page.url) == normalize_url(active_tab_url):
|
||||
target_page = page
|
||||
print("\33[32mtartget page url: ", target_page.url, "\33[0m")
|
||||
print("\33[32mtartget page title: ", target_page.title(), "\33[0m")
|
||||
break
|
||||
if target_page is None:
|
||||
logger.error("Your tab is not the target tab.")
|
||||
logger.error("[DEBUG] Could not find target tab matching URL. Available tabs:")
|
||||
for context in browser.contexts:
|
||||
for page in context.pages:
|
||||
logger.error(f"[DEBUG] - Tab URL: {page.url}")
|
||||
logger.error(f"[DEBUG] Expected URL: {active_tab_url}")
|
||||
return {}
|
||||
|
||||
return_json = {}
|
||||
@@ -1220,7 +1233,8 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
.filter(Boolean)
|
||||
''')
|
||||
results.append(texts)
|
||||
return results[0]
|
||||
# Safety check: return empty list if no elements found
|
||||
return results[0] if results else []
|
||||
|
||||
def safely_get_direct_li_playwright(selector):
|
||||
elements = target_page.query_selector_all(selector + " li.catAllProducts")
|
||||
@@ -1238,6 +1252,9 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
index = int(order_key)
|
||||
if len(elements_texts) > index:
|
||||
return_json[key] = elements_texts[index]
|
||||
else:
|
||||
logger.warning(f"[DEBUG] Element at index {index} not found for class '{class_name}'. Found {len(elements_texts)} elements.")
|
||||
return_json[key] = "" # Return empty string instead of None
|
||||
|
||||
class_multiObject_child = config.get("class_multiObject_child", {})
|
||||
for class_name, object_dict in class_multiObject_child.items():
|
||||
@@ -1246,6 +1263,9 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
index = int(order_key)
|
||||
if len(elements_texts) > index:
|
||||
return_json[key] = elements_texts[index]
|
||||
else:
|
||||
logger.warning(f"[DEBUG] Child element at index {index} not found for class '{class_name}'. Found {len(elements_texts)} elements.")
|
||||
return_json[key] = "" # Return empty string instead of None
|
||||
|
||||
class_multiObject_only_child = config.get("class_multiObject_only_child", {})
|
||||
for class_name, object_dict in class_multiObject_only_child.items():
|
||||
@@ -1254,10 +1274,16 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
index = int(order_key)
|
||||
if len(elements_texts) > index:
|
||||
return_json[key] = elements_texts[index]
|
||||
else:
|
||||
logger.warning(f"[DEBUG] Only child element at index {index} not found for class '{class_name}'. Found {len(elements_texts)} elements.")
|
||||
return_json[key] = "" # Return empty string instead of None
|
||||
|
||||
class_multiObject_search_exist = config.get("class_multiObject_search_exist", {})
|
||||
for class_name, object_list in class_multiObject_search_exist.items():
|
||||
elements_texts = safely_get_text_content("." + class_name)
|
||||
logger.info(f"[DEBUG] Found elements with class '{class_name}': {elements_texts}")
|
||||
logger.info(f"[DEBUG] Expected elements: {[obj for obj in object_list if obj != 'is_other_exist']}")
|
||||
|
||||
for each_object in object_list:
|
||||
if each_object == "is_other_exist":
|
||||
continue
|
||||
@@ -1266,10 +1292,15 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
else:
|
||||
return_json[each_object] = False
|
||||
if "is_other_exist" in object_list:
|
||||
extra_elements = []
|
||||
for each_element in elements_texts:
|
||||
if each_element not in object_list:
|
||||
extra_elements.append(each_element)
|
||||
return_json["is_other_exist"] = True
|
||||
break
|
||||
if extra_elements:
|
||||
logger.warning(f"[DEBUG] Found unexpected elements not in expected list: {extra_elements}")
|
||||
else:
|
||||
logger.info(f"[DEBUG] No unexpected elements found")
|
||||
if "is_other_exist" not in return_json.keys():
|
||||
return_json["is_other_exist"] = False
|
||||
|
||||
@@ -1277,8 +1308,13 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
class_singleObject = config.get("class_singleObject", {})
|
||||
for class_name, key in class_singleObject.items():
|
||||
element_text = safely_get_text_content("." + class_name)
|
||||
logger.info(f"[DEBUG] Class '{class_name}' found {len(element_text)} elements")
|
||||
if element_text:
|
||||
return_json[key] = element_text[0]
|
||||
logger.info(f"[DEBUG] Class extraction for key '{key}': '{element_text[0]}'")
|
||||
else:
|
||||
logger.warning(f"[DEBUG] No elements found for class: {class_name}")
|
||||
return_json[key] = "" # Return empty string instead of None
|
||||
|
||||
elif config['category'] == "label":
|
||||
# Assuming get_by_label is a custom function or part of the framework being used
|
||||
@@ -1290,17 +1326,75 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
|
||||
elif config["category"] == "xpath":
|
||||
xpathObject = config.get("xpathObject", {})
|
||||
logger.info(f"[DEBUG] Processing xpath category with xpathObject: {xpathObject}")
|
||||
|
||||
for xpath, key in xpathObject.items():
|
||||
logger.info(f"[DEBUG] Processing xpath: {xpath} -> key: {key}")
|
||||
elements = target_page.locator(f"xpath={xpath}")
|
||||
if elements.count() > 0:
|
||||
return_json[key] = elements.first.text_content().strip()
|
||||
element_count = elements.count()
|
||||
logger.info(f"[DEBUG] Found {element_count} elements for xpath: {xpath}")
|
||||
|
||||
if element_count > 0:
|
||||
try:
|
||||
text_content = elements.first.text_content()
|
||||
if text_content is not None:
|
||||
text_content = text_content.strip()
|
||||
logger.info(f"[DEBUG] Raw text content for key '{key}': '{text_content}' (type: {type(text_content)})")
|
||||
|
||||
# 处理空文本内容的情况
|
||||
if text_content is None or text_content == "":
|
||||
logger.warning(f"[DEBUG] Element found but text content is empty for key '{key}' xpath: {xpath}")
|
||||
# 尝试获取更多信息
|
||||
element_html = elements.first.inner_html()
|
||||
element_text = elements.first.inner_text()
|
||||
logger.info(f"[DEBUG] Element innerHTML: '{element_html[:100]}...' innerText: '{element_text}'")
|
||||
|
||||
return_json[key] = text_content if text_content else ""
|
||||
logger.info(f"[DEBUG] Final value for key '{key}': '{return_json[key]}'")
|
||||
except Exception as e:
|
||||
logger.error(f"[DEBUG] Error extracting text from element for key '{key}': {e}")
|
||||
return_json[key] = ""
|
||||
else:
|
||||
logger.warning(f"[DEBUG] No elements found for xpath: {xpath}")
|
||||
# 尝试一些备用的xpath查找方法
|
||||
try:
|
||||
# 尝试不使用xpath前缀
|
||||
fallback_elements = target_page.locator(xpath)
|
||||
fallback_count = fallback_elements.count()
|
||||
logger.info(f"[DEBUG] Fallback search (without xpath prefix) found {fallback_count} elements")
|
||||
if fallback_count > 0:
|
||||
text_content = fallback_elements.first.text_content()
|
||||
if text_content:
|
||||
text_content = text_content.strip()
|
||||
return_json[key] = text_content if text_content else ""
|
||||
logger.info(f"[DEBUG] Fallback extraction successful for key '{key}': '{return_json[key]}'")
|
||||
else:
|
||||
return_json[key] = ""
|
||||
except Exception as e:
|
||||
logger.info(f"[DEBUG] Fallback xpath search also failed: {e}")
|
||||
return_json[key] = ""
|
||||
|
||||
elif config["category"] == "input":
|
||||
inputObjects = config.get("inputObject", {})
|
||||
logger.info(f"[DEBUG] Processing input category with inputObjects: {inputObjects}")
|
||||
for xpath, key in inputObjects.items():
|
||||
logger.info(f"[DEBUG] Processing input xpath: {xpath} -> key: {key}")
|
||||
inputs = target_page.locator(f"xpath={xpath}")
|
||||
if inputs.count() > 0:
|
||||
return_json[key] = inputs.first.input_value().strip()
|
||||
input_count = inputs.count()
|
||||
logger.info(f"[DEBUG] Found {input_count} input elements for xpath: {xpath}")
|
||||
if input_count > 0:
|
||||
try:
|
||||
input_value = inputs.first.input_value()
|
||||
if input_value:
|
||||
input_value = input_value.strip()
|
||||
return_json[key] = input_value if input_value else ""
|
||||
logger.info(f"[DEBUG] Input value for key '{key}': '{return_json[key]}'")
|
||||
except Exception as e:
|
||||
logger.error(f"[DEBUG] Error getting input value for key '{key}': {e}")
|
||||
return_json[key] = ""
|
||||
else:
|
||||
logger.warning(f"[DEBUG] No input elements found for xpath: {xpath}")
|
||||
return_json[key] = ""
|
||||
|
||||
elif config["category"] == "class&url":
|
||||
class_multiObject = config.get("class_multiObject", {})
|
||||
@@ -1352,6 +1446,23 @@ def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
||||
return_json[value.lower()] = False
|
||||
|
||||
browser.close()
|
||||
|
||||
# DEBUG: Add logging for final result and check for None values
|
||||
logger.info(f"[DEBUG] get_active_tab_html_parse final result: {return_json}")
|
||||
|
||||
# 检查是否有None值
|
||||
none_keys = [key for key, value in return_json.items() if value is None]
|
||||
if none_keys:
|
||||
logger.warning(f"[DEBUG] Found None values for keys: {none_keys}")
|
||||
|
||||
# 检查是否期望的键都存在
|
||||
if config["category"] == "xpath":
|
||||
expected_keys = set(config.get("xpathObject", {}).values())
|
||||
actual_keys = set(return_json.keys())
|
||||
missing_keys = expected_keys - actual_keys
|
||||
if missing_keys:
|
||||
logger.warning(f"[DEBUG] Missing expected keys: {missing_keys}")
|
||||
|
||||
return return_json
|
||||
|
||||
|
||||
@@ -1402,8 +1513,24 @@ def get_gotoRecreationPage_and_get_html_content(env, config: Dict[str, Any]):
|
||||
print("go to newpage: ")
|
||||
print(newpage.title())
|
||||
time.sleep(2)
|
||||
newpage.click("button.next-available")
|
||||
print("after third click")
|
||||
|
||||
# Try to click the button with better error handling and longer timeout
|
||||
try:
|
||||
# Wait for the button to be available with a longer timeout
|
||||
newpage.wait_for_selector("button.next-available", timeout=60000)
|
||||
newpage.click("button.next-available", timeout=60000)
|
||||
print("after third click")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to click 'next-available' button: {e}")
|
||||
# Try alternative selectors if the main one fails
|
||||
try:
|
||||
newpage.wait_for_selector("button[class*='next']", timeout=30000)
|
||||
newpage.click("button[class*='next']", timeout=30000)
|
||||
print("after third click (alternative selector)")
|
||||
except Exception as e2:
|
||||
logger.error(f"Alternative selector also failed: {e2}")
|
||||
# Continue execution even if button click fails
|
||||
print("Continuing without clicking next-available button")
|
||||
|
||||
return_json = {}
|
||||
return_json["expected"] = {}
|
||||
@@ -1411,11 +1538,31 @@ def get_gotoRecreationPage_and_get_html_content(env, config: Dict[str, Any]):
|
||||
if config["selector"] == "class":
|
||||
if "order" in config.keys():
|
||||
className = config["class"]
|
||||
return_json["expected"][className] = newpage.query_selector_all("." + className)[
|
||||
int(config["order"])].text_content().strip()
|
||||
try:
|
||||
elements = newpage.query_selector_all("." + className)
|
||||
order_index = int(config["order"])
|
||||
if len(elements) > order_index:
|
||||
return_json["expected"][className] = elements[order_index].text_content().strip()
|
||||
else:
|
||||
logger.warning(f"Element with class '{className}' at index {order_index} not found. Found {len(elements)} elements.")
|
||||
# For expected values, if we can't find the element, the evaluation cannot proceed
|
||||
# Return a structure that indicates failure to get expected value
|
||||
return_json["expected"][className] = "__EVALUATION_FAILED__"
|
||||
except Exception as e:
|
||||
logger.error(f"Error accessing element with class '{className}': {e}")
|
||||
return_json["expected"][className] = "__EVALUATION_FAILED__"
|
||||
else:
|
||||
className = config["class"]
|
||||
return_json["expected"][className] = newpage.query_selector("." + className).text_content().strip()
|
||||
try:
|
||||
element = newpage.query_selector("." + className)
|
||||
if element:
|
||||
return_json["expected"][className] = element.text_content().strip()
|
||||
else:
|
||||
logger.warning(f"Element with class '{className}' not found.")
|
||||
return_json["expected"][className] = "__EVALUATION_FAILED__"
|
||||
except Exception as e:
|
||||
logger.error(f"Error accessing element with class '{className}': {e}")
|
||||
return_json["expected"][className] = "__EVALUATION_FAILED__"
|
||||
browser.close()
|
||||
return return_json
|
||||
|
||||
@@ -1481,11 +1628,11 @@ def get_url_dashPart(env, config: Dict[str, str]):
|
||||
return {config["key"]: dash_part}
|
||||
|
||||
|
||||
def get_url_path_parse(env, config: Dict[str, str]):
|
||||
def get_macys_product_url_parse(env, config: Dict[str, str]):
|
||||
"""
|
||||
Parse Macy's product url path, extract:
|
||||
- mens_clothing: true if 'mens-clothing' in path, else None
|
||||
- t_shirts: true if any key 'Top_style' or 'Product_department' value is 'T-shirts', else None
|
||||
- shirts: true if any key 'Top_style' or 'Product_department' value is 'shirts', else None
|
||||
- Men_regular_size_t, Price_discount_range (as list), Sleeve_length: as before, None if not found
|
||||
All fields are None if not found for robustness.
|
||||
"""
|
||||
@@ -1503,9 +1650,12 @@ def get_url_path_parse(env, config: Dict[str, str]):
|
||||
# key-value
|
||||
path_parts = path.strip('/').split('/')
|
||||
key_value_json = {}
|
||||
tshirts_flag = False
|
||||
if "mens-t-shirts" in path:
|
||||
tshirts_flag = True
|
||||
shirts_flag = False
|
||||
short_sleeve_flag = False # Initialize short_sleeve_flag to avoid UnboundLocalError
|
||||
if "shirts" in path:
|
||||
shirts_flag = True
|
||||
if "short-sleeve" in path:
|
||||
short_sleeve_flag = True
|
||||
for i in range(len(path_parts)-1):
|
||||
if ',' in path_parts[i] and ',' in path_parts[i+1]:
|
||||
keys = [k.strip() for k in path_parts[i].split(',')]
|
||||
@@ -1515,21 +1665,34 @@ def get_url_path_parse(env, config: Dict[str, str]):
|
||||
key_value_json[k] = [item.strip() for item in v.split('|')] if v else None
|
||||
else:
|
||||
key_value_json[k] = v if v else None
|
||||
if (k == 'Top_style' or k == 'Product_department') and (v == 'T-shirts' or v == 'T-Shirts' or v == 'T-Shirt'):
|
||||
tshirts_flag = True
|
||||
if k == 'Product_department' and (v == 'shirts' or v == 'Shirts' or v == 'Shirt'):
|
||||
shirts_flag = True
|
||||
if k == 'Sleeve_length' and (v == 'short-sleeve' or v == 'Short Sleeve'):
|
||||
short_sleeve_flag = True
|
||||
break
|
||||
for field in ['Men_regular_size_t', 'Price_discount_range', 'Sleeve_length']:
|
||||
for field in ['Men_regular_size_t', 'Price_discount_range']:
|
||||
if field not in key_value_json:
|
||||
key_value_json[field] = None
|
||||
result['t_shirts'] = tshirts_flag if tshirts_flag else None
|
||||
result['shirts'] = shirts_flag if shirts_flag else None
|
||||
result['short_sleeve'] = short_sleeve_flag if short_sleeve_flag else None
|
||||
# parse_keys
|
||||
for key in config["parse_keys"]:
|
||||
if key in key_value_json:
|
||||
if key == "Price_discount_range":
|
||||
if '50_PERCENT_ off & more' in key_value_json[key] and not '30_PERCENT_ off & more' in key_value_json[key] and not '20_PERCENT_ off & more' in key_value_json[key]:
|
||||
# Check if key_value_json[key] is not None before using 'in' operator
|
||||
if key_value_json[key] is not None and '50_PERCENT_ off & more' in key_value_json[key] and not '30_PERCENT_ off & more' in key_value_json[key] and not '20_PERCENT_ off & more' in key_value_json[key]:
|
||||
result[key] = '50_PERCENT_ off & more'
|
||||
else:
|
||||
result[key] = 'not_50_PERCENT_ off & more'
|
||||
else:
|
||||
result[key] = key_value_json[key]
|
||||
return result
|
||||
|
||||
|
||||
# Alias for backward compatibility - the old function name was too generic
|
||||
def get_url_path_parse(env, config: Dict[str, str]):
|
||||
"""
|
||||
Alias for get_macys_product_url_parse to maintain backward compatibility.
|
||||
This function name is kept for existing configurations that still use "url_path_parse" type.
|
||||
"""
|
||||
return get_macys_product_url_parse(env, config)
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import logging
|
||||
from typing import TypeVar, Dict
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import requests
|
||||
|
||||
logger = logging.getLogger("desktopenv.getters.misc")
|
||||
|
||||
@@ -71,6 +73,11 @@ relativeTime_to_IntDay = {
|
||||
"this Sunday": "special",
|
||||
"next Monday": "special",
|
||||
"next Friday": "special",
|
||||
"next Saturday": "special",
|
||||
"next Sunday": "special",
|
||||
"next week Friday": "special",
|
||||
"next week Saturday": "special",
|
||||
"next week Sunday": "special",
|
||||
"first monday four months later": "special",
|
||||
"first monday eight months later": "special",
|
||||
"next Monday split": "special",
|
||||
@@ -93,68 +100,146 @@ def get_rule_relativeTime(env, config: Dict[str, R]) -> R:
|
||||
}
|
||||
If relativeTime only has key "from", then the key of time in "expected" dict must be "time".
|
||||
If relativeTime has key "to", then the key of time in "expected" dict must be "from" and "to".
|
||||
|
||||
Optional 'timezone': timezone string like 'Europe/Zurich', 'America/New_York', etc.
|
||||
If not specified, will try to get timezone from IP geolocation.
|
||||
"""
|
||||
logger.info(f"[DEBUG] get_rule_relativeTime called with config: {config}")
|
||||
|
||||
relativeRules = config["rules"]
|
||||
relativeTime = relativeRules["relativeTime"] # int, "+" means future, "-" means past
|
||||
# get the date now
|
||||
now = datetime.now()
|
||||
|
||||
logger.info(f"[DEBUG] relativeTime: {relativeTime}")
|
||||
|
||||
# Get timezone configuration
|
||||
timezone_str = get_timezone_from_config(config)
|
||||
try:
|
||||
timezone = pytz.timezone(timezone_str)
|
||||
logger.info(f"Successfully loaded timezone: {timezone_str}")
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
logger.error(f"Unknown timezone: {timezone_str}, falling back to UTC")
|
||||
timezone = pytz.UTC
|
||||
|
||||
# Get current time in the specified timezone
|
||||
now = datetime.now(timezone)
|
||||
logger.info(f"Current time in {timezone_str}: {now.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
|
||||
# calculate the relative time
|
||||
if "to" not in relativeTime.keys():
|
||||
start_relative_time = relativeTime["from"]
|
||||
logger.info(f"Processing single time: '{start_relative_time}'")
|
||||
|
||||
if relativeTime_to_IntDay[start_relative_time] != "special":
|
||||
# relativeTime can be represented by actual int days
|
||||
start_relative_time_IntDat = relativeTime_to_IntDay[start_relative_time]
|
||||
timediff = timedelta(days=start_relative_time_IntDat)
|
||||
absoluteDay = now + timediff
|
||||
logger.info(f"Simple calculation: {start_relative_time} = {start_relative_time_IntDat} days → {absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
# special case, you can add more special cases here
|
||||
if start_relative_time == "5th next month":
|
||||
next_year = now.year + 1 if now.month == 12 else now.year
|
||||
next_month = now.month + 1 if now.month < 12 else 1
|
||||
next_day = 5
|
||||
absoluteDay = datetime(next_year, next_month, next_day)
|
||||
absoluteDay = timezone.localize(datetime(next_year, next_month, next_day))
|
||||
logger.info(f"5th next month: {absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif start_relative_time == "10th next month":
|
||||
next_year = now.year + 1 if now.month == 12 else now.year
|
||||
next_month = now.month + 1 if now.month < 12 else 1
|
||||
next_day = 10
|
||||
absoluteDay = datetime(next_year, next_month, next_day)
|
||||
absoluteDay = timezone.localize(datetime(next_year, next_month, next_day))
|
||||
logger.info(f"10th next month: {absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif start_relative_time == "this month":
|
||||
absoluteDay = now
|
||||
logger.info(f"This month: {absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif start_relative_time == "next Monday":
|
||||
absoluteDay = now + timedelta(days=((6-now.weekday())+1))
|
||||
days_until_monday = (6-now.weekday()) + 1
|
||||
absoluteDay = now + timedelta(days=days_until_monday)
|
||||
logger.info(f"Next Monday: current weekday={now.weekday()}, days to add={days_until_monday} → {absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif start_relative_time == "first monday four months later":
|
||||
next_year = now.year + 1 if now.month >=9 else now.year
|
||||
next_month = (now.month + 4)%12
|
||||
# get the first monday of the next_month
|
||||
temp_date = datetime(next_year, next_month, 1)
|
||||
absoluteDay = temp_date + timedelta(days=((6-temp_date.weekday())+1)%7)
|
||||
temp_date = timezone.localize(datetime(next_year, next_month, 1))
|
||||
days_to_monday = ((6-temp_date.weekday())+1)%7
|
||||
absoluteDay = temp_date + timedelta(days=days_to_monday)
|
||||
logger.info(f"First Monday 4 months later: {next_year}-{next_month:02d} → {absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif start_relative_time == "first monday eight months later":
|
||||
next_year = now.year + 1 if now.month >= 5 else now.year
|
||||
next_month = (now.month + 8)%12
|
||||
# get the first monday of the next_month
|
||||
temp_date = datetime(next_year, next_month, 1)
|
||||
absoluteDay = temp_date + timedelta(days=((6-temp_date.weekday())+1)%7)
|
||||
temp_date = timezone.localize(datetime(next_year, next_month, 1))
|
||||
days_to_monday = ((6-temp_date.weekday())+1)%7
|
||||
absoluteDay = temp_date + timedelta(days=days_to_monday)
|
||||
logger.info(f"First Monday 8 months later: {next_year}-{next_month:02d} → {absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
regular_time = apply_rules_to_timeFormat(relativeRules["expected"]["time"], absoluteDay)
|
||||
logger.info(f"Final formatted time: {regular_time}")
|
||||
config["rules"]["expected"]["time"] = regular_time
|
||||
|
||||
else:
|
||||
from_time = relativeTime["from"]
|
||||
to_time = relativeTime["to"]
|
||||
logger.info(f"Processing time range: from '{from_time}' to '{to_time}'")
|
||||
|
||||
# deal with from_time first
|
||||
if relativeTime_to_IntDay[from_time] != "special":
|
||||
from_time_IntDat = relativeTime_to_IntDay[from_time]
|
||||
from_timediff = timedelta(days=from_time_IntDat)
|
||||
from_absoluteDay = now + from_timediff
|
||||
logger.info(f"From time calculation: {from_time} = {from_time_IntDat} days → {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
if from_time == "this Saturday":
|
||||
from_absoluteDay = now + timedelta(days=(5-now.weekday()))
|
||||
days_until_saturday = (5-now.weekday())
|
||||
from_absoluteDay = now + timedelta(days=days_until_saturday)
|
||||
logger.info(f"This Saturday: current weekday={now.weekday()}, days to add={days_until_saturday} → {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif from_time == "10th next month":
|
||||
next_year = now.year + 1 if now.month == 12 else now.year
|
||||
next_month = now.month + 1 if now.month < 12 else 1
|
||||
next_day = 10
|
||||
from_absoluteDay = datetime(next_year, next_month, next_day)
|
||||
from_absoluteDay = timezone.localize(datetime(next_year, next_month, next_day))
|
||||
logger.info(f"10th next month (from): {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif from_time == "next Monday" or from_time == "next Monday split":
|
||||
from_absoluteDay = now + timedelta(days=((6-now.weekday())+1))
|
||||
days_until_monday = (6-now.weekday()) + 1
|
||||
from_absoluteDay = now + timedelta(days=days_until_monday)
|
||||
logger.info(f"Next Monday (from): current weekday={now.weekday()}, days to add={days_until_monday} → {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif from_time == "next Friday":
|
||||
# Next weekend Friday calculation
|
||||
if now.weekday() < 4: # Monday to Thursday - use this weekend
|
||||
days_until_friday = 4 - now.weekday()
|
||||
elif now.weekday() == 4: # Today is Friday - use next weekend
|
||||
days_until_friday = 7
|
||||
else: # Saturday to Sunday - use next weekend
|
||||
days_until_friday = (7 - now.weekday()) + 4 # Days to next Monday + 4 to get to Friday
|
||||
from_absoluteDay = now + timedelta(days=days_until_friday)
|
||||
logger.info(f"Next Friday (from): current weekday={now.weekday()}, days to add={days_until_friday} → {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif from_time == "next Saturday":
|
||||
# Next weekend Saturday calculation
|
||||
if now.weekday() < 5: # Monday to Friday - use this weekend
|
||||
days_until_saturday = 5 - now.weekday()
|
||||
elif now.weekday() == 5: # Today is Saturday - use next weekend
|
||||
days_until_saturday = 7
|
||||
else: # Sunday - use next weekend
|
||||
days_until_saturday = 6 # 6 days to next Saturday
|
||||
from_absoluteDay = now + timedelta(days=days_until_saturday)
|
||||
logger.info(f"Next Saturday (from): current weekday={now.weekday()}, days to add={days_until_saturday} → {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif from_time == "next week Friday":
|
||||
# Next week Friday - simple: go to next Monday, then +4 days
|
||||
days_to_next_monday = 7 - now.weekday()
|
||||
days_until_friday = days_to_next_monday + 4 # Monday + 4 = Friday
|
||||
from_absoluteDay = now + timedelta(days=days_until_friday)
|
||||
logger.info(f"Next week Friday (from): current weekday={now.weekday()}, days to add={days_until_friday} → {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif from_time == "next week Saturday":
|
||||
# Next week Saturday - simple: go to next Monday, then +5 days
|
||||
days_to_next_monday = 7 - now.weekday()
|
||||
days_until_saturday = days_to_next_monday + 5 # Monday + 5 = Saturday
|
||||
from_absoluteDay = now + timedelta(days=days_until_saturday)
|
||||
logger.info(f"Next week Saturday (from): current weekday={now.weekday()}, days to add={days_until_saturday} → {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif from_time == "next week Sunday":
|
||||
# Next week Sunday - simple: go to next Monday, then +6 days
|
||||
days_to_next_monday = 7 - now.weekday()
|
||||
days_until_sunday = days_to_next_monday + 6 # Monday + 6 = Sunday
|
||||
from_absoluteDay = now + timedelta(days=days_until_sunday)
|
||||
logger.info(f"Next week Sunday (from): current weekday={now.weekday()}, days to add={days_until_sunday} → {from_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
pass # more rules here
|
||||
if from_time == "next Monday split":
|
||||
@@ -164,28 +249,75 @@ def get_rule_relativeTime(env, config: Dict[str, R]) -> R:
|
||||
config["rules"]["expected"]["puMonth"] = pumonth
|
||||
puyear = apply_rules_to_timeFormat(relativeRules["expected"]["puYear"], from_absoluteDay)
|
||||
config["rules"]["expected"]["puYear"] = puyear
|
||||
logger.info(f"Monday split formatting: puDay={puday}, puMonth={pumonth}, puYear={puyear}")
|
||||
else:
|
||||
regular_from_time = apply_rules_to_timeFormat(relativeRules["expected"]["from"], from_absoluteDay)
|
||||
config["rules"]["expected"]["from"] = regular_from_time
|
||||
logger.info(f"From time formatted: {regular_from_time}")
|
||||
|
||||
# deal with to_time
|
||||
if relativeTime_to_IntDay[to_time] != "special":
|
||||
to_time_IntDat = relativeTime_to_IntDay[to_time]
|
||||
to_timediff = timedelta(days=to_time_IntDat)
|
||||
to_absoluteDay = now + to_timediff
|
||||
logger.info(f"To time calculation: {to_time} = {to_time_IntDat} days → {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
if to_time == "this Sunday":
|
||||
to_absoluteDay = now + timedelta(days=(6-now.weekday()))
|
||||
days_until_sunday = (6-now.weekday())
|
||||
to_absoluteDay = now + timedelta(days=days_until_sunday)
|
||||
logger.info(f"This Sunday: current weekday={now.weekday()}, days to add={days_until_sunday} → {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif to_time == "11th next month":
|
||||
next_year = now.year + 1 if now.month == 12 else now.year
|
||||
next_month = now.month + 1 if now.month < 12 else 1
|
||||
next_day = 11
|
||||
to_absoluteDay = datetime(next_year, next_month, next_day)
|
||||
to_absoluteDay = timezone.localize(datetime(next_year, next_month, next_day))
|
||||
logger.info(f"11th next month (to): {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif to_time == "next Friday" or to_time == "next Friday split":
|
||||
if now.weekday() < 4 and from_time in ["next Monday"]:
|
||||
to_absoluteDay = now + timedelta(days=((4-now.weekday())+7))
|
||||
# Check if from_time is any variant of "next Monday"
|
||||
if from_time in ["next Monday", "next Monday split"]:
|
||||
# Calculate Friday of the same week as the Monday
|
||||
# from_absoluteDay is already calculated as next Monday
|
||||
# Friday is 4 days after Monday (Monday=0, Friday=4)
|
||||
to_absoluteDay = from_absoluteDay + timedelta(days=4)
|
||||
logger.info(f"Next Friday (same week as Monday): from Monday {from_absoluteDay.strftime('%Y-%m-%d')} + 4 days → {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
to_absoluteDay = now + timedelta(days=((4-now.weekday()) if now.weekday() < 4 else (6-now.weekday()) + 5))
|
||||
# Standalone "next Friday" calculation
|
||||
if now.weekday() < 4: # Monday to Thursday
|
||||
days_to_friday = 4 - now.weekday()
|
||||
else: # Friday to Sunday
|
||||
days_to_friday = (6 - now.weekday()) + 5
|
||||
to_absoluteDay = now + timedelta(days=days_to_friday)
|
||||
logger.info(f"Next Friday (standalone): current weekday={now.weekday()}, days to add={days_to_friday} → {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif to_time == "next Sunday":
|
||||
# Next weekend Sunday calculation - should be the same weekend as the from_time
|
||||
if from_time in ["next Friday", "next Saturday"]:
|
||||
# Calculate Sunday of the same weekend as from_time
|
||||
# from_absoluteDay is already calculated, get the Sunday of that week
|
||||
days_to_add_for_sunday = 6 - from_absoluteDay.weekday() # Days from Friday/Saturday to Sunday
|
||||
to_absoluteDay = from_absoluteDay + timedelta(days=days_to_add_for_sunday)
|
||||
logger.info(f"Next Sunday (to, same weekend as {from_time}): from {from_absoluteDay.strftime('%Y-%m-%d %A')} + {days_to_add_for_sunday} days → {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
# Standalone next Sunday calculation
|
||||
if now.weekday() < 6: # Monday to Saturday - use this weekend
|
||||
days_until_sunday = 6 - now.weekday()
|
||||
else: # Sunday - use next weekend
|
||||
days_until_sunday = 7
|
||||
to_absoluteDay = now + timedelta(days=days_until_sunday)
|
||||
logger.info(f"Next Sunday (to, standalone): current weekday={now.weekday()}, days to add={days_until_sunday} → {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
elif to_time == "next week Sunday":
|
||||
# Next week Sunday calculation - should be the same week as from_time if from_time is also "next week"
|
||||
if from_time in ["next week Friday", "next week Saturday"]:
|
||||
# Calculate Sunday of the same week as from_time
|
||||
# from_absoluteDay is already calculated, get the Sunday of that week
|
||||
days_to_add_for_sunday = 6 - from_absoluteDay.weekday() # Days from Friday/Saturday to Sunday
|
||||
to_absoluteDay = from_absoluteDay + timedelta(days=days_to_add_for_sunday)
|
||||
logger.info(f"Next week Sunday (to, same week as {from_time}): from {from_absoluteDay.strftime('%Y-%m-%d %A')} + {days_to_add_for_sunday} days → {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
# Standalone next week Sunday calculation - simple: go to next Monday, then +6 days
|
||||
days_to_next_monday = 7 - now.weekday()
|
||||
days_until_sunday = days_to_next_monday + 6 # Monday + 6 = Sunday
|
||||
to_absoluteDay = now + timedelta(days=days_until_sunday)
|
||||
logger.info(f"Next week Sunday (to, standalone): current weekday={now.weekday()}, days to add={days_until_sunday} → {to_absoluteDay.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
pass # more rules here
|
||||
if to_time == "next Friday split":
|
||||
@@ -195,10 +327,14 @@ def get_rule_relativeTime(env, config: Dict[str, R]) -> R:
|
||||
config["rules"]["expected"]["doMonth"] = to_month
|
||||
to_year = apply_rules_to_timeFormat(relativeRules["expected"]["doYear"], to_absoluteDay)
|
||||
config["rules"]["expected"]["doYear"] = to_year
|
||||
logger.info(f"Friday split formatting: doDay={to_day}, doMonth={to_month}, doYear={to_year}")
|
||||
else:
|
||||
regular_to_time = apply_rules_to_timeFormat(relativeRules["expected"]["to"], to_absoluteDay)
|
||||
config["rules"]["expected"]["to"] = regular_to_time
|
||||
|
||||
logger.info(f"To time formatted: {regular_to_time}")
|
||||
|
||||
logger.info(f"[DEBUG] Final config rules: {config['rules']}")
|
||||
print(config["rules"])
|
||||
return config["rules"]
|
||||
|
||||
|
||||
@@ -227,4 +363,44 @@ def get_time_diff_range(env, config) -> str:
|
||||
return config["diff_range_in_minutes"]
|
||||
except:
|
||||
logger.error("diff_range_in_minutes not found in config.")
|
||||
return None
|
||||
return None
|
||||
|
||||
def get_timezone_from_ip() -> str:
|
||||
"""
|
||||
Get timezone from IP address using IP geolocation API
|
||||
Returns timezone string like 'Europe/Zurich' or 'UTC' as fallback
|
||||
"""
|
||||
try:
|
||||
# Try ipapi.co first
|
||||
response = requests.get('https://ipapi.co/json/', timeout=5)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
timezone = data.get('timezone')
|
||||
if timezone:
|
||||
logger.info(f"Timezone from IP: {timezone}")
|
||||
return timezone
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get timezone from IP: {e}")
|
||||
|
||||
# Fallback to UTC
|
||||
logger.info("Using UTC as fallback timezone")
|
||||
return 'UTC'
|
||||
|
||||
def get_timezone_from_config(config: Dict, default_timezone: str = None) -> str:
|
||||
"""
|
||||
Get timezone from config, with fallback options
|
||||
Priority: config timezone > default_timezone > IP-based timezone > UTC
|
||||
"""
|
||||
# Check if timezone is specified in config
|
||||
if "timezone" in config.get("rules", {}):
|
||||
timezone = config["rules"]["timezone"]
|
||||
logger.info(f"Using timezone from config: {timezone}")
|
||||
return timezone
|
||||
|
||||
# Use provided default
|
||||
if default_timezone:
|
||||
logger.info(f"Using provided default timezone: {default_timezone}")
|
||||
return default_timezone
|
||||
|
||||
# Get from IP
|
||||
return get_timezone_from_ip()
|
||||
@@ -307,6 +307,9 @@ def check_direct_json_object(result, rules) -> float:
|
||||
One of the most commonly used function to evalute.
|
||||
Compare two json objects directly.
|
||||
"""
|
||||
logger.info(f"[DEBUG] check_direct_json_object called with result: {result}")
|
||||
logger.info(f"[DEBUG] check_direct_json_object called with rules: {rules}")
|
||||
|
||||
if isinstance(result, str):
|
||||
# remove blanks before and after result
|
||||
result = result.strip()
|
||||
@@ -314,45 +317,84 @@ def check_direct_json_object(result, rules) -> float:
|
||||
result = result.replace("'", '"')
|
||||
# load json object
|
||||
result = json.loads(result)
|
||||
|
||||
logger.info(f"[DEBUG] Processed result: {result}")
|
||||
|
||||
if result is None:
|
||||
logger.info("[DEBUG] Result is None, returning 0.0")
|
||||
return 0.
|
||||
|
||||
# Check if expected value contains evaluation failure indicator
|
||||
try:
|
||||
expected_json = rules.get("expected", {})
|
||||
if expected_json:
|
||||
for key, value in expected_json.items():
|
||||
if value == "__EVALUATION_FAILED__":
|
||||
logger.error(f"[DEBUG] Expected value for key '{key}' indicates evaluation failure, returning 0.0")
|
||||
return 0.
|
||||
except Exception as e:
|
||||
logger.error(f"[DEBUG] Error checking for evaluation failure indicator: {e}")
|
||||
return 0.
|
||||
try:
|
||||
expect_in_result = rules.get("expect_in_result", False)
|
||||
logger.info(f"[DEBUG] expect_in_result: {expect_in_result}")
|
||||
|
||||
if not expect_in_result:
|
||||
expected_json = rules["expected"]
|
||||
logger.info(f"[DEBUG] Expected JSON: {expected_json}")
|
||||
|
||||
for key in expected_json.keys():
|
||||
expected_value = expected_json.get(key)
|
||||
actual_value = result.get(key)
|
||||
logger.info(f"[DEBUG] Checking key '{key}': expected='{expected_value}', actual='{actual_value}'")
|
||||
|
||||
if expected_json.get("ignore_list_order", False):
|
||||
expected_value = sorted(expected_value)
|
||||
result_value = sorted(result.get(key))
|
||||
logger.info(f"[DEBUG] Comparing lists (sorted): expected={expected_value}, actual={result_value}")
|
||||
if expected_value != result_value:
|
||||
logger.info(f"[DEBUG] List comparison failed for key '{key}', returning 0.0")
|
||||
return 0.
|
||||
else:
|
||||
if expected_value != result.get(key):
|
||||
if expected_value != actual_value:
|
||||
logger.info(f"[DEBUG] Value comparison failed for key '{key}': expected='{expected_value}', actual='{actual_value}', returning 0.0")
|
||||
return 0.
|
||||
else:
|
||||
logger.info(f"[DEBUG] Value comparison passed for key '{key}'")
|
||||
|
||||
logger.info("[DEBUG] All comparisons passed, returning 1.0")
|
||||
return 1.0
|
||||
else:
|
||||
expected_json = rules["expected"]
|
||||
logger.info(f"[DEBUG] Expected JSON (expect_in_result mode): {expected_json}")
|
||||
|
||||
for key in expected_json.keys():
|
||||
if isinstance(expected_json.get(key), list):
|
||||
flag = 0
|
||||
expected_value_list = expected_json.get(key)
|
||||
logger.info(f"[DEBUG] Checking list key '{key}': expected_list={expected_value_list}, actual='{result.get(key)}'")
|
||||
for each_expected_value in expected_value_list:
|
||||
if isinstance(result.get(key), list) and each_expected_value in result.get(key):
|
||||
flag = 1
|
||||
logger.info(f"[DEBUG] Found expected value '{each_expected_value}' in result list for key '{key}'")
|
||||
break
|
||||
if flag == 0:
|
||||
logger.info(f"[DEBUG] No expected values found in result list for key '{key}', returning 0.0")
|
||||
return 0.
|
||||
elif isinstance(expected_json.get(key), str):
|
||||
if expected_json.get(key) not in result.get(key):
|
||||
expected_str = expected_json.get(key)
|
||||
actual_str = result.get(key)
|
||||
logger.info(f"[DEBUG] Checking string key '{key}': expected='{expected_str}', actual='{actual_str}'")
|
||||
if expected_str not in actual_str:
|
||||
logger.info(f"[DEBUG] Expected string '{expected_str}' not found in actual string '{actual_str}' for key '{key}', returning 0.0")
|
||||
return 0.
|
||||
else:
|
||||
logger.debug("check_direct_json_object: expected value type not supported")
|
||||
return 0.
|
||||
logger.info("[DEBUG] All expect_in_result comparisons passed, returning 1.0")
|
||||
return 1.0
|
||||
except:
|
||||
logger.debug("check_direct_json_object: result is not a valid json object")
|
||||
except Exception as e:
|
||||
logger.debug(f"check_direct_json_object: result is not a valid json object, error: {e}")
|
||||
return 0.
|
||||
|
||||
|
||||
|
||||
@@ -36,16 +36,6 @@
|
||||
"parameters": {
|
||||
"window_name": "Google Chrome"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "execute",
|
||||
"parameters": {
|
||||
"command": [
|
||||
"python",
|
||||
"-c",
|
||||
"import pyautogui; import time; pyautogui.hotkey('alt', 'f10'); time.sleep(0.5);"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"trajectory": "trajectories/",
|
||||
|
||||
@@ -37,16 +37,6 @@
|
||||
"parameters": {
|
||||
"window_name": "Google Chrome"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "execute",
|
||||
"parameters": {
|
||||
"command": [
|
||||
"python",
|
||||
"-c",
|
||||
"import pyautogui; import time; pyautogui.hotkey('alt', 'f10'); time.sleep(0.5);"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"trajectory": "trajectories/",
|
||||
|
||||
@@ -91,6 +91,7 @@
|
||||
"from": "next Monday split",
|
||||
"to": "next Friday split"
|
||||
},
|
||||
"timezone": "Europe/Zurich",
|
||||
"expected": {
|
||||
"puDay": "{DayD}",
|
||||
"puMonth": "{MonthD}",
|
||||
@@ -103,5 +104,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"proxy": true
|
||||
"proxy": true,
|
||||
"possibility_of_env_change": "medium"
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"id": "2888b4e6-5b47-4b57-8bf5-c73827890774",
|
||||
"snapshot": "chrome",
|
||||
"instruction": "Show me all men's large-size short-sleeve T-shirts with a discount of 50% or more.",
|
||||
"instruction": "Show me all men's large-size short-sleeve shirts with a discount of 50% or more.",
|
||||
"source": "test_task_1",
|
||||
"config": [
|
||||
{
|
||||
@@ -49,10 +49,10 @@
|
||||
"goto_prefix": "https://www.",
|
||||
"parse_keys": [
|
||||
"mens_clothing",
|
||||
"t_shirts",
|
||||
"shirts",
|
||||
"Men_regular_size_t",
|
||||
"Price_discount_range",
|
||||
"Sleeve_length"
|
||||
"short_sleeve"
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
@@ -60,13 +60,14 @@
|
||||
"rules": {
|
||||
"expected": {
|
||||
"mens_clothing": true,
|
||||
"t_shirts": true,
|
||||
"shirts": true,
|
||||
"Men_regular_size_t": "L",
|
||||
"Price_discount_range": "50_PERCENT_ off & more",
|
||||
"Sleeve_length": "Short Sleeve"
|
||||
"short_sleeve": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"proxy": true
|
||||
"proxy": false,
|
||||
"possibility_of_env_change": "medium"
|
||||
}
|
||||
@@ -36,16 +36,6 @@
|
||||
"parameters": {
|
||||
"window_name": "Google Chrome"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "execute",
|
||||
"parameters": {
|
||||
"command": [
|
||||
"python",
|
||||
"-c",
|
||||
"import pyautogui; import time; pyautogui.hotkey('alt', 'f10'); time.sleep(0.5);"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"trajectory": "trajectories/",
|
||||
|
||||
@@ -63,6 +63,7 @@
|
||||
"fT28tf":[
|
||||
"Black",
|
||||
"$25 - $60",
|
||||
"On sale",
|
||||
"is_other_exist"
|
||||
]
|
||||
}
|
||||
@@ -84,6 +85,7 @@
|
||||
"expected": {
|
||||
"Black": true,
|
||||
"$25 - $60": true,
|
||||
"On sale": true,
|
||||
"is_other_exist": false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,16 +36,6 @@
|
||||
"parameters": {
|
||||
"window_name": "Google Chrome"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "execute",
|
||||
"parameters": {
|
||||
"command": [
|
||||
"python",
|
||||
"-c",
|
||||
"import pyautogui; import time; pyautogui.hotkey('alt', 'f10'); time.sleep(0.5);"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"trajectory": "trajectories/",
|
||||
|
||||
@@ -36,16 +36,6 @@
|
||||
"parameters": {
|
||||
"window_name": "Google Chrome"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "execute",
|
||||
"parameters": {
|
||||
"command": [
|
||||
"python",
|
||||
"-c",
|
||||
"import pyautogui; import time; pyautogui.hotkey('alt', 'f10'); time.sleep(0.5);"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"trajectory": "trajectories/",
|
||||
|
||||
@@ -62,5 +62,6 @@
|
||||
"order": "2"
|
||||
}
|
||||
},
|
||||
"proxy": true
|
||||
"proxy": false,
|
||||
"possibility_of_env_change": "high"
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"id": "b7895e80-f4d1-4648-bee0-4eb45a6f1fa8",
|
||||
"snapshot": "chrome",
|
||||
"instruction": "Find a Hotel in New York City with lowest price possible for 2 adults this weekend.",
|
||||
"instruction": "Find a Hotel in New York City with lowest price possible for 2 adults next weekend.",
|
||||
"source": "test_task_0",
|
||||
"config": [
|
||||
{
|
||||
@@ -43,35 +43,74 @@
|
||||
"chrome"
|
||||
],
|
||||
"evaluator": {
|
||||
"func": "check_direct_json_object",
|
||||
"result": {
|
||||
"type": "active_tab_html_parse",
|
||||
"goto_prefix": "https://www.",
|
||||
"category": "xpath",
|
||||
"xpathObject": {
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[1]/div/button/div[3]": "from",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[2]/button/div[3]": "to",
|
||||
"/html/body/div[1]/main/div[3]/div[2]/div/div[1]/div/h2": "city",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[3]/button/div[3]/span/span[2]": "adult",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[3]/div/div[2]/div/div/div[2]/div/button/div/div": "rank"
|
||||
}
|
||||
},
|
||||
"expected": {
|
||||
"type": "rule_relativeTime",
|
||||
"rules": {
|
||||
"relativeTime": {
|
||||
"from": "this Saturday",
|
||||
"to": "this Sunday"
|
||||
},
|
||||
"expected": {
|
||||
"from": "{DoW}, {Month} {Day0D}",
|
||||
"to": "{DoW}, {Month} {Day0D}",
|
||||
"city": "New York City Hotels",
|
||||
"adult": "2 guests",
|
||||
"rank": "Price (low to high)"
|
||||
"func": [
|
||||
"check_direct_json_object",
|
||||
"check_direct_json_object"
|
||||
],
|
||||
"conj": "or",
|
||||
"result": [
|
||||
{
|
||||
"type": "active_tab_html_parse",
|
||||
"goto_prefix": "https://www.",
|
||||
"category": "xpath",
|
||||
"xpathObject": {
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[1]/div/button/div[3]": "from",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[2]/button/div[3]": "to",
|
||||
"/html/body/div[1]/main/div[3]/div[2]/div/div[1]/div/h2": "city",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[3]/button/div[3]/span/span[2]": "adult",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[3]/div/div[2]/div/div/div[2]/div/button/div/div": "rank"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "active_tab_html_parse",
|
||||
"goto_prefix": "https://www.",
|
||||
"category": "xpath",
|
||||
"xpathObject": {
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[1]/div/button/div[3]": "from",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[2]/button/div[3]": "to",
|
||||
"/html/body/div[1]/main/div[3]/div[2]/div/div[1]/div/h2": "city",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[1]/div/div/div/div[3]/button/div[3]/span/span[2]": "adult",
|
||||
"/html/body/div[1]/main/div[3]/div[5]/div[2]/div/div[3]/div/div[2]/div/div/div[2]/div/button/div/div": "rank"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"expected": [
|
||||
{
|
||||
"type": "rule_relativeTime",
|
||||
"rules": {
|
||||
"relativeTime": {
|
||||
"from": "next week Saturday",
|
||||
"to": "next week Sunday"
|
||||
},
|
||||
"timezone": "America/New_York",
|
||||
"expected": {
|
||||
"from": "{DoW}, {Month} {Day0D}",
|
||||
"to": "{DoW}, {Month} {Day0D}",
|
||||
"city": "New York City Hotels",
|
||||
"adult": "2 guests",
|
||||
"rank": "Price (low to high)"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "rule_relativeTime",
|
||||
"rules": {
|
||||
"relativeTime": {
|
||||
"from": "next week Friday",
|
||||
"to": "next week Sunday"
|
||||
},
|
||||
"timezone": "America/New_York",
|
||||
"expected": {
|
||||
"from": "{DoW}, {Month} {Day0D}",
|
||||
"to": "{DoW}, {Month} {Day0D}",
|
||||
"city": "New York City Hotels",
|
||||
"adult": "2 guests",
|
||||
"rank": "Price (low to high)"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"proxy": true
|
||||
"proxy": true,
|
||||
"possibility_of_env_change": "medium"
|
||||
}
|
||||
@@ -36,16 +36,6 @@
|
||||
"parameters": {
|
||||
"window_name": "Google Chrome"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "execute",
|
||||
"parameters": {
|
||||
"command": [
|
||||
"python",
|
||||
"-c",
|
||||
"import pyautogui; import time; pyautogui.hotkey('alt', 'f10'); time.sleep(0.5);"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"trajectory": "trajectories/",
|
||||
@@ -67,5 +57,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"proxy": true
|
||||
"proxy": true,
|
||||
"possibility_of_env_change": "medium"
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"id": "da46d875-6b82-4681-9284-653b0c7ae241",
|
||||
"snapshot": "chrome",
|
||||
"instruction": "Book an appointment to apply for a transportation access pass at the Charlie Card store on the first Monday eight months later, 10:15 am, fill in my details (James Smith, james.smith@gmail.com). And don not click \"book\" directly. Let me review it.",
|
||||
"instruction": "Book an appointment to apply for a transportation access pass at the Charlie Card store on the first Monday eight months later, 10:15 am, fill in my details (James Smith, james.smith@gmail.com). And do not click \"book\" directly. Let me review it.",
|
||||
"source": "test_task_2",
|
||||
"config": [
|
||||
{
|
||||
|
||||
@@ -36,16 +36,6 @@
|
||||
"parameters": {
|
||||
"window_name": "Google Chrome"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "execute",
|
||||
"parameters": {
|
||||
"command": [
|
||||
"python",
|
||||
"-c",
|
||||
"import pyautogui; import time; pyautogui.hotkey('alt', 'f10'); time.sleep(0.5);"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"trajectory": "trajectories/",
|
||||
|
||||
@@ -36,16 +36,6 @@
|
||||
"parameters": {
|
||||
"window_name": "Google Chrome"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "execute",
|
||||
"parameters": {
|
||||
"command": [
|
||||
"python",
|
||||
"-c",
|
||||
"import pyautogui; import time; pyautogui.hotkey('alt', 'f10'); time.sleep(0.5);"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"trajectory": "trajectories/",
|
||||
@@ -76,5 +66,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"proxy": true
|
||||
"proxy": true,
|
||||
"possibility_of_env_change": "medium"
|
||||
}
|
||||
@@ -2,12 +2,12 @@
|
||||
{
|
||||
"host": "gw.dataimpulse.com",
|
||||
"port": 823,
|
||||
"username": "your_username",
|
||||
"password": "your_password",
|
||||
"username": "fba5ac061fe18be70c6c",
|
||||
"password": "e225c50bf56bdd6c",
|
||||
"protocol": "http",
|
||||
"provider": "dataimpulse",
|
||||
"type": "residential",
|
||||
"country": "US",
|
||||
"note": "Dataimpulse Residential Proxy"
|
||||
}
|
||||
]
|
||||
]
|
||||
@@ -3,6 +3,7 @@ Pillow~=11.0.0
|
||||
fabric
|
||||
gymnasium~=0.28.1
|
||||
requests~=2.31.0
|
||||
pytz~=2024.1
|
||||
transformers~=4.35.2
|
||||
torch~=2.5.0
|
||||
accelerate
|
||||
|
||||
Reference in New Issue
Block a user