Merge branch 'zdy'
This commit is contained in:
@@ -1,50 +1,92 @@
|
||||
import os
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Dict, List, Set
|
||||
from typing import Optional, Any, Union
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def get_cloud_file(env, config: Dict[str, str]) -> str:
|
||||
def get_cloud_file(env, config: Dict[str, Any]) -> Union[str, List[str]]:
|
||||
"""
|
||||
Config:
|
||||
path (str): the url to download from
|
||||
dest (str): file name of the downloaded file
|
||||
path (str|List[str]): the url to download from
|
||||
dest (str|List[str])): file name of the downloaded file
|
||||
multi (bool) : optional. if path and dest are lists providing
|
||||
information of multiple files. defaults to False
|
||||
gives (List[int]): optional. defaults to [0]. which files are directly
|
||||
returned to the metric. if len==1, str is returned; else, list is
|
||||
returned.
|
||||
"""
|
||||
|
||||
_path = os.path.join(env.cache_dir, config["dest"])
|
||||
if os.path.exists(_path):
|
||||
return _path
|
||||
if not config.get("multi", False):
|
||||
paths: List[str] = [config["path"]]
|
||||
dests: List[str] = [config["dest"]]
|
||||
else:
|
||||
paths: List[str] = config["path"]
|
||||
dests: List[str] = config["dest"]
|
||||
cache_paths: List[str] = []
|
||||
|
||||
url = config["path"]
|
||||
response = requests.get(url, stream=True)
|
||||
response.raise_for_status()
|
||||
gives: Set[int] = set(config.get("gives", [0]))
|
||||
|
||||
with open(_path, 'wb') as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
for i, (p, d) in enumerate(zip(paths, dests)):
|
||||
_path = os.path.join(env.cache_dir, d)
|
||||
if i in gives:
|
||||
cache_paths.append(_path)
|
||||
|
||||
return _path
|
||||
if os.path.exists(_path):
|
||||
#return _path
|
||||
continue
|
||||
|
||||
url = p
|
||||
response = requests.get(url, stream=True)
|
||||
response.raise_for_status()
|
||||
|
||||
with open(_path, 'wb') as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
|
||||
return cache_paths[0] if len(cache_paths)==1 else cache_paths
|
||||
|
||||
|
||||
def get_vm_file(env, config: Dict[str, str]) -> Optional[str]:
|
||||
def get_vm_file(env, config: Dict[str, Any]) -> Union[Optional[str], List[Optional[str]]]:
|
||||
"""
|
||||
Config:
|
||||
path (str): absolute path on the VM to fetch
|
||||
dest (str): file name of the downloaded file
|
||||
multi (bool) : optional. if path and dest are lists providing
|
||||
information of multiple files. defaults to False
|
||||
gives (List[int]): optional. defaults to [0]. which files are directly
|
||||
returned to the metric. if len==1, str is returned; else, list is
|
||||
returned.
|
||||
"""
|
||||
|
||||
_path = os.path.join(env.cache_dir, config["dest"])
|
||||
if not config.get("multi", False):
|
||||
paths: List[str] = [config["path"]]
|
||||
dests: List[str] = [config["dest"]]
|
||||
else:
|
||||
paths: List[str] = config["path"]
|
||||
dests: List[str] = config["dest"]
|
||||
cache_paths: List[str] = []
|
||||
|
||||
file = env.controller.get_file(config["path"])
|
||||
if file is None:
|
||||
return None
|
||||
# raise FileNotFoundError("File not found on VM: {:}".format(config["path"]))
|
||||
with open(_path, "wb") as f:
|
||||
f.write(file)
|
||||
gives: Set[int] = set(config.get("gives", [0]))
|
||||
|
||||
return _path
|
||||
for i, (p, d) in enumerate(zip(paths, dests)):
|
||||
_path = os.path.join(env.cache_dir, d)
|
||||
|
||||
file = env.controller.get_file(p)
|
||||
if file is None:
|
||||
#return None
|
||||
# raise FileNotFoundError("File not found on VM: {:}".format(config["path"]))
|
||||
if i in gives:
|
||||
cache_paths.append(None)
|
||||
continue
|
||||
|
||||
if i in gives:
|
||||
cache_paths.append(_path)
|
||||
with open(_path, "wb") as f:
|
||||
f.write(file)
|
||||
|
||||
return cache_paths[0] if len(cache_paths)==1 else cache_paths
|
||||
|
||||
|
||||
def get_cache_file(env, config: Dict[str, str]) -> str:
|
||||
|
||||
@@ -3,6 +3,8 @@ import operator
|
||||
from numbers import Number
|
||||
from typing import Any, Union
|
||||
from typing import Dict, List
|
||||
import os.path
|
||||
import itertools
|
||||
|
||||
import openpyxl
|
||||
import pandas as pd
|
||||
@@ -26,6 +28,7 @@ def compare_table(actual: str, expected: str, **options) -> float:
|
||||
* chart
|
||||
* number_format
|
||||
"chart_props": list of str, giving the converned chart properties
|
||||
"as_shown": bool, TODO
|
||||
}
|
||||
|
||||
Return:
|
||||
@@ -35,10 +38,35 @@ def compare_table(actual: str, expected: str, **options) -> float:
|
||||
if actual is None:
|
||||
return 0.
|
||||
|
||||
df1 = pd.read_excel(expected)
|
||||
df2 = pd.read_excel(actual)
|
||||
metric: bool = df1.equals(df2)
|
||||
logger.debug("Normal Contents Metric: {:}".format(metric))
|
||||
if options.get("as_shown", False):
|
||||
expected_csv: str = os.path.splitext(expected)[0] + ".csv"
|
||||
actual_csv: str = os.path.splitext(actual)[0] + ".csv"
|
||||
|
||||
with open(expected_csv) as f:
|
||||
expected_lines: List[str] = list( itertools.dropwhile( lambda l: len(l)==0
|
||||
, map( lambda l: l.strip()
|
||||
, reversed(f.read().splitlines())
|
||||
)
|
||||
)
|
||||
)
|
||||
if options.get("ignore_case", False):
|
||||
expected_lines = [l.lower() for l in expected_lines]
|
||||
with open(actual_csv) as f:
|
||||
actual_lines: List[str] = list( itertools.dropwhile( lambda l: len(l)==0
|
||||
, map( lambda l: l.strip()
|
||||
, reversed(f.read().splitlines())
|
||||
)
|
||||
)
|
||||
)
|
||||
if options.get("ignore_case", False):
|
||||
actual_lines = [l.lower() for l in actual_lines]
|
||||
metric: bool = expected_lines==actual_lines
|
||||
logger.debug("Content Metric just as shown: %s", metric)
|
||||
else:
|
||||
df1 = pd.read_excel(expected)
|
||||
df2 = pd.read_excel(actual)
|
||||
metric: bool = df1.equals(df2)
|
||||
logger.debug("Normal Content Metric: {:}".format(metric))
|
||||
|
||||
features: List[str] = options.get("features", [])
|
||||
for ftr in features:
|
||||
@@ -219,6 +247,8 @@ if __name__ == '__main__':
|
||||
# print(check_zoom(path1, {"relation": "lt", "ref_value": 100}))
|
||||
# print(check_zoom(path2, {"relation": "lt", "ref_value": 100}))
|
||||
|
||||
path1 = "../../任务数据/LibreOffice Calc/Padding_Decimals_In_Formular_gold.xlsx"
|
||||
data_frame: pd.DataFrame = pd.read_excel(path1)
|
||||
print(data_frame)
|
||||
path1 = "../../任务数据/LibreOffice Calc/Customers_New_7digit_Id.xlsx"
|
||||
path2 = "../../任务数据/LibreOffice Calc/Customers_New_7digit_Id_gold.xlsx"
|
||||
#data_frame: pd.DataFrame = pd.read_excel(path1)
|
||||
#print(data_frame)
|
||||
print(compare_table(path1, path2, as_shown=True))
|
||||
|
||||
Reference in New Issue
Block a user