Merge branch 'main' into zdy

This commit is contained in:
David Chang
2024-01-13 17:15:32 +08:00
61 changed files with 4616 additions and 273 deletions

View File

@@ -11,8 +11,9 @@ logger = logging.getLogger("desktopenv.pycontroller")
class PythonController:
def __init__(self, http_server: str, pkgs_prefix: str = "python -c \"import pyautogui; {command}\""):
self.http_server = http_server
def __init__(self, vm_ip: str, pkgs_prefix: str = "import pyautogui; {command}"):
self.vm_ip = vm_ip
self.http_server = f"http://{vm_ip}:5000"
self.pkgs_prefix = pkgs_prefix # fixme: this is a hacky way to execute python commands. fix it and combine it with installation of packages
def get_screenshot(self):
@@ -26,6 +27,16 @@ class PythonController:
logger.error("Failed to get screenshot. Status code: %d", response.status_code)
return None
def get_terminal_output(self):
""" Gets the terminal output from the server. None -> no terminal output or unexpected error.
"""
response = requests.get(self.http_server + "/terminal")
if response.status_code == 200:
return response.json()["output"]
else:
logger.error("Failed to get terminal output. Status code: %d", response.status_code)
return None
def get_accessibility_tree(self) -> Optional[str]:
response: requests.Response = requests.get(self.http_server + "/accessibility")
@@ -52,8 +63,8 @@ class PythonController:
Executes a python command on the server.
It can be used to execute the pyautogui commands, or... any other python command. who knows?
"""
command = self.pkgs_prefix.format(command=command)
payload = json.dumps({"command": command, "shell": True})
command_list = ["python", "-c", self.pkgs_prefix.format(command=command)]
payload = json.dumps({"command": command_list, "shell": False})
headers = {
'Content-Type': 'application/json'
}
@@ -185,7 +196,8 @@ class PythonController:
elif action_type == "TYPING":
if "text" not in parameters:
raise Exception(f"Unknown parameters: {parameters}")
text = parameters["text"]
# deal with special ' and \ characters
text = parameters["text"].replace("\\", "\\\\").replace("'", "\\'")
self.execute_python_command(f"pyautogui.typewrite('{text}')")
elif action_type == "PRESS":
@@ -268,15 +280,3 @@ class PythonController:
else:
logger.error("Failed to get wallpaper. Status code: %d", response.status_code)
return None
# VLC
def get_vlc_status(self, host='localhost', port=8080, password='password'):
url = f'http://{host}:{port}/requests/status.xml'
response = requests.get(url, auth=('', password))
if response.status_code == 200:
return response.content
else:
logger.error("Failed to get vlc status. Status code: %d", response.status_code)
return None

View File

@@ -1,23 +1,25 @@
import requests
import json
from requests_toolbelt.multipart.encoder import MultipartEncoder
import uuid
import time
import os.path
import traceback
import uuid
from typing import Dict, List
from typing import Any, Union, Optional
import requests
from playwright.sync_api import sync_playwright
from requests_toolbelt.multipart.encoder import MultipartEncoder
from desktop_env.evaluators.metrics.utils import compare_urls
import logging
logger = logging.getLogger("desktopenv.setup")
import traceback
import time
class SetupController:
def __init__(self, http_server: str, cache_dir: str):
self.http_server: str = http_server
self.http_server_setup_root = http_server + "/setup"
def __init__(self, vm_ip: str, cache_dir: str):
self.vm_ip: str = vm_ip
self.http_server: str = f"http://{vm_ip}:5000"
self.cache_dir: str = cache_dir
def reset_cache_dir(self, cache_dir: str):
@@ -141,8 +143,8 @@ class SetupController:
# send request to server to upload file
try:
logger.debug("REQUEST ADDRESS: %s", self.http_server_setup_root + "/upload")
response = requests.post(self.http_server_setup_root + "/upload", headers=headers, data=form)
logger.debug("REQUEST ADDRESS: %s", self.http_server + "/setup" + "/upload")
response = requests.post(self.http_server + "/setup" + "/upload", headers=headers, data=form)
if response.status_code == 200:
logger.info("Command executed successfully: %s", response.text)
else:
@@ -167,7 +169,7 @@ class SetupController:
# send request to server to change wallpaper
try:
response = requests.post(self.http_server_setup_root + "/change_wallpaper", headers=headers, data=payload)
response = requests.post(self.http_server + "/setup" + "/change_wallpaper", headers=headers, data=payload)
if response.status_code == 200:
logger.info("Command executed successfully: %s", response.text)
else:
@@ -194,7 +196,7 @@ class SetupController:
# send request to server to open file
try:
response = requests.post(self.http_server_setup_root + "/open_file", headers=headers, data=payload)
response = requests.post(self.http_server + "/setup" + "/open_file", headers=headers, data=payload)
if response.status_code == 200:
logger.info("Command executed successfully: %s", response.text)
else:
@@ -214,7 +216,7 @@ class SetupController:
headers = {"Content-Type": "application/json"}
try:
response = requests.post(self.http_server_setup_root + "/launch", headers=headers, data=payload)
response = requests.post(self.http_server + "/setup" + "/launch", headers=headers, data=payload)
if response.status_code == 200:
logger.info("Command executed successfully: %s", response.text)
else:
@@ -280,6 +282,7 @@ class SetupController:
def _act_setup(self, action_seq: List[Union[Dict[str, Any], str]]):
# TODO
raise NotImplementedError()
def _replay_setup(self, trajectory: str):
"""
Args:
@@ -288,3 +291,84 @@ class SetupController:
# TODO
raise NotImplementedError()
# Chrome setup
def _chrome_open_tabs_setup(self, urls_to_open: List[str]):
host = self.vm_ip
port = 9222 # fixme: this port is hard-coded, need to be changed from config file
remote_debugging_url = f"http://{host}:{port}"
with sync_playwright() as p:
browser = None
for attempt in range(15):
try:
browser = p.chromium.connect_over_cdp(remote_debugging_url)
break
except Exception as e:
if attempt < 14:
logger.error(f"Attempt {attempt + 1}: Failed to connect, retrying. Error: {e}")
time.sleep(1)
else:
logger.error(f"Failed to connect after multiple attempts: {e}")
raise e
if not browser:
return
for i, url in enumerate(urls_to_open):
# Use the first context (which should be the only one if using default profile)
if i == 0:
context = browser.contexts[0]
page = context.new_page() # Create a new page (tab) within the existing context
page.goto(url)
logger.info(f"Opened tab {i + 1}: {url}")
if i == 0:
# clear the default tab
default_page = context.pages[0]
default_page.close()
# Do not close the context or browser; they will remain open after script ends
return browser, context
def _chrome_close_tabs_setup(self, urls_to_close: List[str]):
time.sleep(5) # Wait for Chrome to finish launching
host = self.vm_ip
port = 9222 # fixme: this port is hard-coded, need to be changed from config file
remote_debugging_url = f"http://{host}:{port}"
with sync_playwright() as p:
browser = None
for attempt in range(15):
try:
browser = p.chromium.connect_over_cdp(remote_debugging_url)
break
except Exception as e:
if attempt < 14:
logger.error(f"Attempt {attempt + 1}: Failed to connect, retrying. Error: {e}")
time.sleep(1)
else:
logger.error(f"Failed to connect after multiple attempts: {e}")
raise e
if not browser:
return
for i, url in enumerate(urls_to_close):
# Use the first context (which should be the only one if using default profile)
if i == 0:
context = browser.contexts[0]
for page in context.pages:
# if two urls are the same, close the tab
if compare_urls(page.url, url):
context.pages.pop(context.pages.index(page))
page.close()
logger.info(f"Closed tab {i + 1}: {url}")
break
# Do not close the context or browser; they will remain open after script ends
return browser, context

View File

@@ -80,13 +80,12 @@ class DesktopEnv(gym.Env):
logger.info("Initializing...")
self._start_emulator()
self.vm_ip = self._get_vm_ip()
self.host = f"http://{self.vm_ip}:5000"
self.controller = PythonController(http_server=self.host)
self.setup_controller = SetupController(http_server=self.host, cache_dir=self.cache_dir)
self.controller = PythonController(vm_ip=self.vm_ip)
self.setup_controller = SetupController(vm_ip=self.vm_ip, cache_dir=self.cache_dir)
# Meta info of the VM
self.vm_platform = self.controller.get_vm_platform()
self.vm_screen_size = self.controller.get_vm_screen_size()
# Meta info of the VM, move to the reset() function
self.vm_platform: str = "" # self.controller.get_vm_platform()
self.vm_screen_size = None # self.controller.get_vm_screen_size()
# mode: human or machine
assert action_space in ["computer_13", "pyautogui"]
@@ -193,6 +192,10 @@ class DesktopEnv(gym.Env):
self._start_emulator()
logger.info("Emulator started.")
logger.info("Get meta info of the VM...")
self.vm_platform = self.controller.get_vm_platform()
self.vm_screen_size = self.controller.get_vm_screen_size()
logger.info("Setting up environment...")
self.setup_controller.setup(self.config)
@@ -218,6 +221,7 @@ class DesktopEnv(gym.Env):
time.sleep(pause)
observation = {
"screenshot": self._get_obs(),
"terminal": self.controller.get_terminal_output(),
"instruction": self.instruction
}
reward = 0 # todo: Define reward calculation for each example

View File

@@ -2,4 +2,4 @@ from .file import get_cloud_file, get_vm_file, get_cache_file
from .info import get_vm_screen_size, get_vm_window_size, get_vm_wallpaper
from .misc import get_rule, get_accessibility_tree
from .vlc import get_vlc_playing_info, get_vlc_config
from .chrome import get_default_search_engine, get_bookmarks
from .chrome import get_default_search_engine, get_bookmarks, get_open_tabs_info

View File

@@ -4,6 +4,8 @@ import os
import sqlite3
from typing import Dict
from playwright.sync_api import sync_playwright
logger = logging.getLogger("desktopenv.getters.chrome")
"""
@@ -13,15 +15,20 @@ WARNING:
"""
# The following ones just need to load info from the files of software, no need to connect to the software
def get_default_search_engine(env, config: Dict[str, str]):
os_type = env.vm_platform
if os_type == 'Windows':
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
elif os_type == 'Darwin':
preference_file_path = env.controller.execute_python_command("import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")['output'].strip()
preference_file_path = env.controller.execute_python_command(
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
'output'].strip()
elif os_type == 'Linux':
preference_file_path = env.controller.execute_python_command("import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")['output'].strip()
preference_file_path = env.controller.execute_python_command(
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
'output'].strip()
else:
raise Exception('Unsupported operating system')
@@ -41,12 +48,16 @@ def get_default_search_engine(env, config: Dict[str, str]):
def get_cookie_data(env, config: Dict[str, str]):
os_type = env.vm_platform
if os_type == 'Windows':
chrome_cookie_file_path = os.path.join(os.getenv('LOCALAPPDATA'), 'Google\\Chrome\\User Data\\Default\\Cookies')
chrome_cookie_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
'Google\\Chrome\\User Data\\Default\\Cookies'))""")['output'].strip()
elif os_type == 'Darwin':
chrome_cookie_file_path = os.path.join(os.getenv('HOME'),
'Library/Application Support/Google/Chrome/Default/Cookies')
chrome_cookie_file_path = env.controller.execute_python_command(
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Cookies'))")[
'output'].strip()
elif os_type == 'Linux':
chrome_cookie_file_path = os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Cookies')
chrome_cookie_file_path = env.controller.execute_python_command(
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Cookies'))")[
'output'].strip()
else:
raise Exception('Unsupported operating system')
@@ -70,13 +81,16 @@ def get_cookie_data(env, config: Dict[str, str]):
def get_bookmarks(env, config: Dict[str, str]):
os_type = env.vm_platform
if os_type == 'Windows':
preference_file_path = os.path.join(os.getenv('LOCALAPPDATA'),
'Google\\Chrome\\User Data\\Default\\Bookmarks')
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
'Google\\Chrome\\User Data\\Default\\Bookmarks'))""")['output'].strip()
elif os_type == 'Darwin':
preference_file_path = os.path.join(os.getenv('HOME'),
'Library/Application Support/Google/Chrome/Default/Bookmarks')
preference_file_path = env.controller.execute_python_command(
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Bookmarks'))")[
'output'].strip()
elif os_type == 'Linux':
preference_file_path = os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Bookmarks')
preference_file_path = env.controller.execute_python_command(
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Bookmarks'))")[
'output'].strip()
else:
raise Exception('Unsupported operating system')
@@ -98,13 +112,16 @@ def get_extensions_installed_from_shop(env, config: Dict[str, str]):
"""Find the Chrome extensions directory based on the operating system."""
os_type = env.vm_platform
if os_type == 'Windows':
chrome_extension_dir = os.path.expanduser(
'~') + '\\AppData\\Local\\Google\\Chrome\\User Data\\Default\\Extensions\\'
chrome_extension_dir = env.controller.execute_python_command(
"""os.path.expanduser('~') + '\\AppData\\Local\\Google\\Chrome\\User Data\\Default\\Extensions\\'""")[
'output'].strip()
elif os_type == 'Darwin': # macOS
chrome_extension_dir = os.path.expanduser(
'~') + '/Library/Application Support/Google/Chrome/Default/Extensions/'
chrome_extension_dir = env.controller.execute_python_command(
"""os.path.expanduser('~') + '/Library/Application Support/Google/Chrome/Default/Extensions/'""")[
'output'].strip()
elif os_type == 'Linux':
chrome_extension_dir = os.path.expanduser('~') + '/.config/google-chrome/Default/Extensions/'
chrome_extension_dir = env.controller.execute_python_command(
"""os.path.expanduser('~') + '/.config/google-chrome/Default/Extensions/'""")['output'].strip()
else:
raise Exception('Unsupported operating system')
@@ -124,3 +141,52 @@ def get_extensions_installed_from_shop(env, config: Dict[str, str]):
except json.JSONDecodeError:
logger.error(f"Error reading {manifest_path}")
return manifests
# The following ones require Playwright to be installed on the target machine, and the chrome needs to be pre-config on
# port info to allow remote debugging, see README.md for details
def get_open_tabs_info(env, config: Dict[str, str]):
host = env.vm_ip
port = 9222 # fixme: this port is hard-coded, need to be changed from config file
remote_debugging_url = f"http://{host}:{port}"
with sync_playwright() as p:
# connect to remote Chrome instance
browser = p.chromium.connect_over_cdp(remote_debugging_url)
tabs_info = []
for context in browser.contexts:
for page in context.pages:
title = page.title()
url = page.url
tabs_info.append({'title': title, 'url': url})
browser.close()
return tabs_info
def get_active_tab_info(env, config: Dict[str, str]):
host = env.vm_ip
port = 9222 # fixme: this port is hard-coded, need to be changed from config file
remote_debugging_url = f"http://{host}:{port}"
with sync_playwright() as p:
# connect to remote Chrome instance
browser = p.chromium.connect_over_cdp(remote_debugging_url)
active_tab_info = {}
for context in browser.contexts:
for page in context.pages:
if page.is_visible("body"): # check the visibility of the page body to determine the active status
active_tab_info = {
'title': page.title(),
'url': page.url,
'content': page.content() # get the HTML content of the page
}
break
if active_tab_info:
break
browser.close()
return active_tab_info

View File

@@ -2,6 +2,8 @@ import logging
import os
from typing import Dict
import requests
logger = logging.getLogger("desktopenv.getters.vlc")
@@ -15,7 +17,14 @@ def get_vlc_playing_info(env, config: Dict[str, str]):
password = 'password'
_path = os.path.join(env.cache_dir, config["dest"])
content = env.controller.get_vlc_status(host, port, password)
url = f'http://{host}:{port}/requests/status.xml'
response = requests.get(url, auth=('', password))
if response.status_code == 200:
content = response.content
else:
logger.error("Failed to get vlc status. Status code: %d", response.status_code)
return None
with open(_path, "wb") as f:
f.write(content)

View File

@@ -1,3 +1,4 @@
from .chrome import is_expected_tabs, is_expected_bookmarks
from .docs import compare_font_names, compare_subscript_contains, has_page_numbers_in_footers
from .docs import find_default_font, contains_page_break, compare_docx_files, compare_docx_tables, compare_line_spacing, \
compare_insert_equation
@@ -9,5 +10,7 @@ from .table import check_sheet_list, check_xlsx_freeze, check_xlsx_zoom
from .table import compare_table
from .vlc import is_vlc_playing, is_vlc_recordings_folder, is_vlc_fullscreen, compare_images, compare_audios, \
compare_videos
from .gimp import increase_saturation, decrease_brightness, check_file_exists, compare_triangle_positions
from .general import check_csv, check_accessibility_tree, check_list, run_sqlite3
from .thunderbird import check_thunderbird_prefs, check_thunderbird_filter

View File

@@ -1,52 +1,39 @@
import logging
from playwright.sync_api import sync_playwright
from typing import Any, Dict, List
from desktop_env.evaluators.metrics.utils import are_lists_equal, compare_urls
logger = logging.getLogger("desktopenv.metrics.chrome")
def is_expected_tabs(open_tabs: List[Dict[str, str]], rule: Dict[str, Any]) -> float:
"""
Checks if the expected tabs are open in Chrome.
"""
print(open_tabs, rule)
match_type = rule['type']
if match_type == "url":
expected_urls = rule['urls']
actual_urls = [tab['url'] for tab in open_tabs]
return 1 if are_lists_equal(expected_urls, actual_urls, compare_urls) else 0
else:
logger.error(f"Unknown type: {match_type}")
return 0
# todo: move to getter module
def is_expected_bookmarks(bookmarks: List[Dict[str, Any]], rule: Dict[str, Any]) -> float:
"""
Checks if the expected bookmarks are in Chrome.
"""
# The following ones just need to load info from the files of software, no need to connect to the software
# todo
match_type = rule['type']
# The following ones require Playwright to be installed on the target machine, and the chrome needs to be pre-config on port info to allow remote debugging, see README.md for details
def get_open_tabs_info(remote_debugging_url):
with sync_playwright() as p:
# connect to remote Chrome instance
browser = p.chromium.connect_over_cdp(remote_debugging_url)
tabs_info = []
for context in browser.contexts:
for page in context.pages:
title = page.title()
url = page.url
tabs_info.append({'title': title, 'url': url})
browser.close()
return tabs_info
def get_active_tab_info(remote_debugging_url):
with sync_playwright() as p:
# connect to remote Chrome instance
browser = p.chromium.connect_over_cdp(remote_debugging_url)
active_tab_info = {}
for context in browser.contexts:
for page in context.pages():
if page.is_visible("body"): # check the visibility of the page body to determine the active status
active_tab_info = {
'title': page.title(),
'url': page.url,
'content': page.content() # get the HTML content of the page
}
break
if active_tab_info:
break
browser.close()
return active_tab_info
if match_type == "url":
expected_urls = rule['urls']
actual_urls = [bookmark['url'] for bookmark in bookmarks]
return 1 if are_lists_equal(expected_urls, actual_urls, compare_urls) else 0
else:
logger.error(f"Unknown type: {match_type}")
return 0

View File

@@ -1,5 +1,5 @@
import os
from PIL import Image, ImageChops, ImageStat
def get_gimp_export_path():
# Path to GIMP's configuration file. This example assumes GIMP version 2.10.
@@ -20,3 +20,99 @@ def get_gimp_export_path():
# Handle the case where the configuration file is not found
print("GIMP configuration file not found")
return False
def check_file_exists(directory, filename):
file_path = os.path.join(directory, filename)
return 1 if os.path.isfile(file_path) else 0
def increase_saturation(image1_path: str, image2_path: str) -> float:
def calculate_saturation(image):
# convert the image to HSV mode
hsv_image = image.convert("HSV")
saturation_channel = hsv_image.split()[1]
# calculate the mean saturation level
stat = ImageStat.Stat(saturation_channel)
mean_saturation = stat.mean[0]
return mean_saturation
image1 = Image.open(image1_path)
image2 = Image.open(image2_path)
# calculate the saturation level of each image
saturation1 = calculate_saturation(image1)
saturation2 = calculate_saturation(image2)
return 1 if saturation1 < saturation2 else 0
def decrease_brightness(image1_path: str, image2_path: str) -> float:
def calculate_brightness(image):
# Convert the image to grayscale mode
grayscale_image = image.convert("L")
# Get the image data
pixels = list(grayscale_image.getdata())
brightness = sum(pixels) / len(pixels)
return brightness
image1 = Image.open(image1_path)
image2 = Image.open(image2_path)
brightness1 = calculate_brightness(image1)
brightness2 = calculate_brightness(image2)
return 1 if brightness1 > brightness2 else 0
import cv2
import numpy as np
def find_yellow_triangle(image):
# Convert the image to RGBA
rgba = cv2.cvtColor(image, cv2.COLOR_BGR2RGBA)
# define range of yellow color in HSV
lower_yellow = np.array([0, 0, 0], dtype=np.uint8)
upper_yellow = np.array([255, 255, 255], dtype=np.uint8)
# expand the dimensions of lower and upper yellow to match the image dimensions
lower_yellow = np.reshape(lower_yellow, (1, 1, 3))
upper_yellow = np.reshape(upper_yellow, (1, 1, 3))
# build a mask for the yellow color
mask = cv2.inRange(rgba, lower_yellow, upper_yellow)
# search for contours in the mask
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# choose the largest contour
max_contour = max(contours, key=cv2.contourArea)
# calculate the center of the contour
M = cv2.moments(max_contour)
cx = int(M['m10'] / M['m00'])
cy = int(M['m01'] / M['m00'])
return cx, cy
def compare_triangle_positions(image1, image2):
image1 = cv2.imread(image1, cv2.IMREAD_COLOR)
image2 = cv2.imread(image2, cv2.IMREAD_COLOR)
# find the center of the yellow triangle in each image
cx1, cy1 = find_yellow_triangle(image1)
cx2, cy2 = find_yellow_triangle(image2)
# calculate the distance between the center of the triangle and the center of the image
center_distance1 = np.sqrt((cx1 - image1.shape[1] // 2)**2 + (cy1 - image1.shape[0] // 2)**2)
center_distance2 = np.sqrt((cx2 - image2.shape[1] // 2)**2 + (cy2 - image2.shape[0] // 2)**2)
return 1 if center_distance1 > center_distance2 else 0
if __name__ == "__main__":
image1_path = "../Downloads/1.png"
image2_path = "../Downloads/edited_darker.png"
decrease_brightness(image1_path, image2_path)
increase_saturation(image1_path, image2_path)

View File

@@ -1,26 +1,29 @@
import zipfile
import lxml.etree
import lxml.cssselect
from lxml.etree import _Element
import xmltodict
import openpyxl
from openpyxl import Workbook
from openpyxl.worksheet.worksheet import Worksheet
from openpyxl.chart._chart import ChartBase
from typing import Dict, List, Set
from typing import Any
import logging
import zipfile
from typing import Any
from typing import Dict, List, Set
from urllib.parse import urlparse, urlunparse
import lxml.cssselect
import lxml.etree
import openpyxl
import xmltodict
from lxml.etree import _Element
from openpyxl import Workbook
from openpyxl.chart._chart import ChartBase
from openpyxl.worksheet.worksheet import Worksheet
logger = logging.getLogger("desktopenv.metrics.utils")
_xlsx_namespaces = [ ("x14", "http://schemas.microsoft.com/office/spreadsheetml/2009/9/main")
, ("xm", "http://schemas.microsoft.com/office/excel/2006/main")
]
_xlsx_namespaces = [("x14", "http://schemas.microsoft.com/office/spreadsheetml/2009/9/main")
, ("xm", "http://schemas.microsoft.com/office/excel/2006/main")
]
_xlsx_ns_mapping = dict(_xlsx_namespaces)
_xlsx_ns_imapping = dict(map(lambda itm: (itm[1], itm[0]), _xlsx_namespaces))
_sparklines_selector = lxml.cssselect.CSSSelector("x14|sparkline", namespaces=_xlsx_ns_mapping)
#print(_sparklines_selector.css)
# print(_sparklines_selector.css)
def load_sparklines(xlsx_file: str) -> Dict[str, str]:
"""
This function modifies data_frame in-place
@@ -44,13 +47,14 @@ def load_sparklines(xlsx_file: str) -> Dict[str, str]:
sparklines_dict: Dict[str, str] = {}
for sp_l in sparklines:
sparkline_xml: str = lxml.etree.tostring(sp_l, encoding="unicode")
sparkline: Dict[str, Dict[str, str]] = xmltodict.parse( sparkline_xml
, process_namespaces=True
, namespaces=_xlsx_ns_imapping
)
sparkline: Dict[str, Dict[str, str]] = xmltodict.parse(sparkline_xml
, process_namespaces=True
, namespaces=_xlsx_ns_imapping
)
sparklines_dict[sparkline["x14:sparkline"]["xm:sqref"]] = sparkline["x14:sparkline"]["xm:f"]
return sparklines_dict
# Available Chart Properties:
# title: str
# anchor: ["oneCell" | "twoCell" | "absolute", col0, row0, col1, row1]
@@ -70,7 +74,7 @@ def load_charts(xlsx_file: Workbook, **options) -> Dict[str, Any]:
Dict[str, Any]: information of charts
"""
#workbook: Workbook = openpyxl.load_workbook(filename=xlsx_file)
# workbook: Workbook = openpyxl.load_workbook(filename=xlsx_file)
worksheet: Worksheet = xlsx_file.active
charts: List[ChartBase] = worksheet._charts
@@ -79,22 +83,22 @@ def load_charts(xlsx_file: Workbook, **options) -> Dict[str, Any]:
for ch in charts:
series: List[str] = []
for ser in ch.series:
value_num = ser.val.numRef.f\
if hasattr(ser.val, "numRef") and hasattr(ser.val.numRef, "f")\
else ""
value_str = ser.val.strRef.f\
if hasattr(ser.val, "strRef") and hasattr(ser.val.strRef, "f")\
else ""
categ_num = ser.cat.numRef.f\
if hasattr(ser.cat, "numRef") and hasattr(ser.cat.numRef, "f")\
else ""
categ_str = ser.cat.strRef.f\
if hasattr(ser.cat, "strRef") and hasattr(ser.cat.strRef, "f")\
else ""
series.append( "{:},{:},{:},{:}".format( value_num, value_str
value_num = ser.val.numRef.f \
if hasattr(ser.val, "numRef") and hasattr(ser.val.numRef, "f") \
else ""
value_str = ser.val.strRef.f \
if hasattr(ser.val, "strRef") and hasattr(ser.val.strRef, "f") \
else ""
categ_num = ser.cat.numRef.f \
if hasattr(ser.cat, "numRef") and hasattr(ser.cat.numRef, "f") \
else ""
categ_str = ser.cat.strRef.f \
if hasattr(ser.cat, "strRef") and hasattr(ser.cat.strRef, "f") \
else ""
series.append("{:},{:},{:},{:}".format(value_num, value_str
, categ_num, categ_str
)
)
)
series: str = ";".join(series)
# TODO: maybe more aspects, like chart type
@@ -103,10 +107,10 @@ def load_charts(xlsx_file: Workbook, **options) -> Dict[str, Any]:
if "title" in chart_props:
info["title"] = ch.title.tx.rich.p[0].r[0].t
if "anchor" in chart_props:
info["anchor"] = [ ch.anchor.editAs
, ch.anchor._from.col, ch.anchor.to.row
, ch.anchor.to.col, ch.anchor.to.row
]
info["anchor"] = [ch.anchor.editAs
, ch.anchor._from.col, ch.anchor.to.row
, ch.anchor.to.col, ch.anchor.to.row
]
if "width" in chart_props:
info["width"] = ch.width
if "height" in chart_props:
@@ -125,43 +129,87 @@ def load_charts(xlsx_file: Workbook, **options) -> Dict[str, Any]:
chart_set[series] = info
return chart_set
def _match_record(pattern: Dict[str, Any], item: Dict[str, Any]) -> bool:
return all(k in item and item[k]==val for k, val in pattern.items())
return all(k in item and item[k] == val for k, val in pattern.items())
def are_lists_equal(list1, list2, comparison_func):
# First check if both lists have the same length
if len(list1) != len(list2):
return False
# Now make sure each element in one list has an equal element in the other list
for item1 in list1:
# Use the supplied function to test for an equal item
if not any(comparison_func(item1, item2) for item2 in list2):
return False
# If all items match, the lists are equal
return True
def compare_urls(url1, url2):
def normalize_url(url):
# Parse the URL
parsed_url = urlparse(url)
# If no scheme is present, assume 'http'
scheme = parsed_url.scheme if parsed_url.scheme else 'http'
# Lowercase the scheme and netloc, remove 'www.', and handle trailing slash
normalized_netloc = parsed_url.netloc.lower().replace("www.", "")
normalized_path = parsed_url.path if parsed_url.path != '/' else ''
# Reassemble the URL with normalized components
normalized_parsed_url = parsed_url._replace(scheme=scheme.lower(), netloc=normalized_netloc,
path=normalized_path)
normalized_url = urlunparse(normalized_parsed_url)
return normalized_url
# Normalize both URLs for comparison
norm_url1 = normalize_url(url1)
norm_url2 = normalize_url(url2)
# Compare the normalized URLs
return norm_url1 == norm_url2
if __name__ == "__main__":
path1 = "../../../../../任务数据/LibreOffice Calc/Create_column_charts_using_statistics_gold_line_scatter.xlsx"
workbook1: Workbook = openpyxl.load_workbook(filename=path1)
worksheet1: Worksheet = workbook1.active
charts: List[ChartBase] = worksheet1._charts
#print(len(charts))
#print(type(charts[0]))
#
#print(len(charts[0].series))
#print(type(charts[0].series[0]))
#print(type(charts[0].series[0].val))
# print(len(charts))
# print(type(charts[0]))
#
# print(len(charts[0].series))
# print(type(charts[0].series[0]))
# print(type(charts[0].series[0].val))
##print(charts[0].series[0].val)
#print(charts[0].series[0].val.numRef.f)
#
#print(type(charts[0].series[0].cat))
# print(charts[0].series[0].val.numRef.f)
#
# print(type(charts[0].series[0].cat))
##print(charts[0].series[0].cat)
#print(charts[0].series[0].cat.numRef)
#print(charts[0].series[0].cat.strRef)
#print(charts[0].series[0].cat.strRef.f)
# print(charts[0].series[0].cat.numRef)
# print(charts[0].series[0].cat.strRef)
# print(charts[0].series[0].cat.strRef.f)
#print(type(charts[0].title.tx.strRef))
#print(type(charts[0].title.tx.rich))
#print(type(charts[0].title.txPr))
#print(len(charts[0].title.tx.rich.p))
#print(len(charts[0].title.tx.rich.p[0].r))
#print(type(charts[0].title.tx.rich.p[0].r[0]))
#print(type(charts[0].title.tx.rich.p[0].r[0].t))
#print(charts[0].title.tx.rich.p[0].r[0].t)
# print(type(charts[0].title.tx.strRef))
# print(type(charts[0].title.tx.rich))
# print(type(charts[0].title.txPr))
# print(len(charts[0].title.tx.rich.p))
# print(len(charts[0].title.tx.rich.p[0].r))
# print(type(charts[0].title.tx.rich.p[0].r[0]))
# print(type(charts[0].title.tx.rich.p[0].r[0].t))
# print(charts[0].title.tx.rich.p[0].r[0].t)
#print(type(charts[0].anchor))
#print(charts[0].anchor.editAs)
#print(charts[0].anchor._from.col, charts[0].anchor.to.row)
#print(charts[0].anchor.to.col, charts[0].anchor.to.row)
# print(type(charts[0].anchor))
# print(charts[0].anchor.editAs)
# print(charts[0].anchor._from.col, charts[0].anchor.to.row)
# print(charts[0].anchor.to.col, charts[0].anchor.to.row)
#df1 = pd.read_excel(path1)
#print(df1)
# df1 = pd.read_excel(path1)
# print(df1)
print(load_charts(path1, chart_props=["title", "xtitle", "ytitle", "type"]))

View File

@@ -14,7 +14,7 @@ from pyatspi import Value as ATValue
from pyatspi import Action as ATAction
from typing import List, Dict
from typing import Any
from typing import Any, Optional
import Xlib
import pyautogui
@@ -114,6 +114,39 @@ def capture_screen_with_cursor():
return send_file(file_path, mimetype='image/png')
def _has_active_terminal(desktop: Accessible) -> bool:
""" A quick check whether the terminal window is open and active.
"""
for app in desktop:
if app.getRoleName() == "application" and app.name == "gnome-terminal-server":
for frame in app:
if frame.getRoleName() == "frame" and frame.getState().contains(pyatspi.STATE_ACTIVE):
return True
return False
@app.route('/terminal', methods=['GET'])
def get_terminal_output():
user_platform = platform.system()
output: Optional[str] = None
try:
if user_platform == "Linux":
desktop: Accessible = pyatspi.Registry.getDesktop(0)
if _has_active_terminal(desktop):
desktop_xml: _Element = _create_node(desktop)
# 1. the terminal window (frame of application is st:active) is open and active
# 2. the terminal tab (terminal status is st:focused) is focused
xpath = '//application[@name="gnome-terminal-server"]/frame[@st:active="true"]//terminal[@st:focused="true"]'
terminals: List[_Element] = desktop_xml.xpath(xpath, namespaces=_accessibility_ns_map)
output = terminals[0].text.rstrip() if len(terminals) == 1 else None
else: # windows and macos platform is not implemented currently
raise NotImplementedError
return jsonify({"output": output, "status": "success"})
except:
return jsonify({"output": None, "status": "error"})
_accessibility_ns_map = { "st": "uri:deskat:state.at-spi.gnome.org"
, "attr": "uri:deskat:attributes.at-spi.gnome.org"
, "cp": "uri:deskat:component.at-spi.gnome.org"

View File

@@ -3,16 +3,64 @@
"snapshot": "chrome",
"instruction": "Can you make my computer bring back the last tab I shut down?",
"source": "https://www.wikihow.com/Switch-Tabs-in-Chrome",
"config": [],
"config": [
{
"type": "launch",
"parameters": {
"command": [
"google-chrome",
"--remote-debugging-port=1337"
]
}
},
{
"type": "launch",
"parameters": {
"command": [
"socat",
"tcp-listen:9222,fork",
"tcp:localhost:1337"
]
}
},
{
"type": "chrome_open_tabs",
"parameters": {
"urls_to_open": [
"https://www.lonelyplanet.com",
"https://www.airbnb.com",
"https://www.tripadvisor.com"
]
}
},
{
"type": "chrome_close_tabs",
"parameters": {
"urls_to_close": [
"https://www.tripadvisor.com"
]
}
}
],
"trajectory": "trajectories/",
"related_apps": [
"chrome"
],
"evaluator": {
"func": "",
"func": "is_expected_tabs",
"result": {
"type": "open_tabs_info"
},
"expected": {
"type": "rule",
"rules": {
"type": "url",
"urls": [
"https://www.lonelyplanet.com",
"https://www.airbnb.com",
"https://www.tripadvisor.com"
]
}
}
}
}

View File

@@ -0,0 +1,43 @@
{
"id": "2ad9387a-65d8-4e33-ad5b-7580065a27ca",
"snapshot": "chrome",
"instruction": "Can you make a new folder for me on that bookmarks bar in my internet browser? Let's call it 'Favorites.'",
"source": "https://www.youtube.com/watch?v=IN-Eq_UripQ",
"config": [
{
"type": "launch",
"parameters": {
"command": [
"google-chrome",
"--remote-debugging-port=1337"
]
}
},
{
"type": "launch",
"parameters": {
"command": [
"socat",
"tcp-listen:9222,fork",
"tcp:localhost:1337"
]
}
}
],
"trajectory": "trajectories/",
"related_apps": [
"chrome"
],
"evaluator": {
"func": "is_expected_bookmarks",
"result": {
"type": "bookmarks"
},
"expected": {
"type": "rule",
"rules": {
}
}
}
}

View File

@@ -0,0 +1,18 @@
{
"id": "35253b65-1c19-4304-8aa4-6884b8218fc0",
"snapshot": "chrome",
"instruction": "Hey, I need a quick way back to this site. Could you whip up a shortcut on my desktop for me?",
"source": "https://www.laptopmag.com/articles/how-to-create-desktop-shortcuts-for-web-pages-using-chrome",
"config": [],
"trajectory": "trajectories/",
"related_apps": [
"chrome"
],
"evaluator": {
"func": "",
"result": {
},
"expected": {
}
}
}

View File

@@ -0,0 +1,18 @@
{
"id": "3d1682a7-0fb0-49ae-a4dc-a73afd2d06d5",
"snapshot": "chrome",
"instruction": "Computer, could you set up Chrome so it automatically fills in forms with my address, say, 123 Main Street, Anytown, USA, for me?",
"source": "https://in5stepstutorials.com/google-chrome/add-change-delete-autofill-address.php",
"config": [],
"trajectory": "trajectories/",
"related_apps": [
"chrome"
],
"evaluator": {
"func": "",
"result": {
},
"expected": {
}
}
}

View File

@@ -0,0 +1,18 @@
{
"id": "6766f2b8-8a72-417f-a9e5-56fcaa735837",
"snapshot": "chrome",
"instruction": "Could you help me install the unpacked extension at /to/path in Chrome?",
"source": "https://support.google.com/chrome/thread/205881926/it-s-possible-to-load-unpacked-extension-automatically-in-chrome?hl=en",
"config": [],
"trajectory": "trajectories/",
"related_apps": [
"chrome"
],
"evaluator": {
"func": "",
"result": {
},
"expected": {
}
}
}

View File

@@ -0,0 +1,18 @@
{
"id": "7a5a7856-f1b6-42a4-ade9-1ca81ca0f263",
"snapshot": "chrome",
"instruction": "Can you save this webpage I'm looking at to my bookmarks so I can come back to it later?",
"source": "https://www.youtube.com/watch?v=ZaZ8GcTxjXA",
"config": [],
"trajectory": "trajectories/",
"related_apps": [
"chrome"
],
"evaluator": {
"func": "",
"result": {
},
"expected": {
}
}
}

View File

@@ -0,0 +1,18 @@
{
"id": "d088f539-cab4-4f9a-ac92-9999fc3a656e",
"snapshot": "chrome",
"instruction": "I've got a bunch of fun stuff open right now. Can you group all these tabs at the top of Chrome and name it 'Leisure' for me? It'll help me get back to them quickly when I want to relax.",
"source": "https://medium.com/@inkverseuk2/useful-tips-and-tricks-for-the-google-chrome-browser-ac7d0d24b3cc",
"config": [],
"trajectory": "trajectories/",
"related_apps": [
"chrome"
],
"evaluator": {
"func": "",
"result": {
},
"expected": {
}
}
}

View File

@@ -0,0 +1,18 @@
{
"id": "e1e75309-3ddb-4d09-92ec-de869c928143",
"snapshot": "chrome",
"instruction": "Computer, can you turn the webpage I'm looking at into a PDF file and put it on my main screen, you know, the Desktop?",
"source": "https://in5stepstutorials.com/google-chrome/save-web-page-as-pdf-in-chrome.php",
"config": [],
"trajectory": "trajectories/",
"related_apps": [
"chrome"
],
"evaluator": {
"func": "",
"result": {
},
"expected": {
}
}
}

View File

@@ -0,0 +1,42 @@
{
"id": "554785e9-4523-4e7a-b8e1-8016f565f56a",
"snapshot": "gimp",
"instruction": "Could you help me increase the saturation of my photo to make it more colorful?",
"source": "https://www.quora.com/How-do-I-edit-a-photo-in-GIMP",
"config": [
{
"type": "download",
"parameters": {
"files": [
{
"url": "https://drive.usercontent.google.com/download?id=1-3cc3qqjlJ_3d9IpwsuvSuYpyJ2LxrIA&export=download&authuser=0&confirm=t&uuid=6b08e902-ed25-47d1-97d9-7794f9508288&at=APZUnTW74TY2hHtBbWDa0N7-cE58:1705060487948",
"path": "Desktop/woman_sitting_by_the_tree2.png"
}
]
}
},
{
"type": "open",
"parameters": {
"path": "Desktop/woman_sitting_by_the_tree2.png"
}
}
],
"trajectory": "trajectories/",
"related_apps": [
"gimp"
],
"evaluator": {
"func": "increase_saturation",
"expected": {
"type": "vm_file",
"path": "Desktop/woman_sitting_by_the_tree2.png",
"dest": "woman_sitting_by_the_tree2.png"
},
"result": {
"type": "vm_file",
"path": "Desktop/edited_colorful.png",
"dest": "edited_colorful.png"
}
}
}

View File

@@ -0,0 +1,34 @@
{
"id": "77b8ab4d-994f-43ac-8930-8ca087d7c4b4",
"snapshot": "gimp",
"instruction": "Could you help me keep my photo on the desktop and rename it export.jpg?",
"source": "https://superuser.com/questions/1636113/how-to-get-gimp-to-recognize-images-or-pictures-folder-as-the-default-folder-for",
"config": [
{
"type": "download",
"parameters": {
"files": [
{
"url": "https://drive.usercontent.google.com/download?id=13ECFsPxznuoCANto21ijj9OzP0APukIH&export=download&authuser=0&confirm=t&uuid=d8f2dd03-8992-4646-be62-3a3cf89583f2&at=APZUnTVsR0xmbXvpFIpXLzCcLrMa:1705062951627",
"path": "Desktop/The_Lost_River_Of_Dreams.jpg"
}
]
}
},
{
"type": "open",
"parameters": {
"path": "Desktop/The_Lost_River_Of_Dreams.jpg"
}
}
],
"trajectory": "trajectories/",
"related_apps": [
"gimp"
],
"evaluator": {
"func": "check_file_exists",
"file_name": "export.png",
"directory": "/home/user/Desktop/"
}
}

View File

@@ -0,0 +1,42 @@
{
"id": "7a4deb26-d57d-4ea9-9a73-630f66a7b568",
"snapshot": "gimp",
"instruction": "Make my picture less bright, please.",
"source": "https://www.quora.com/How-do-I-edit-a-photo-in-GIMP",
"config": [
{
"type": "download",
"parameters": {
"files": [
{
"url": "https://drive.usercontent.google.com/download?id=1SIvX9Wimyw6i2UvnoLTNDHIObvDLAsIM&export=download&authuser=0&confirm=t&uuid=a48447ab-13a2-421f-9662-6ffff8f6f6d5&at=APZUnTVRxofs822XxgEv33WwYCkb:1705046264363",
"path": "Desktop/woman_sitting_by_the_tree.png"
}
]
}
},
{
"type": "open",
"parameters": {
"path": "Desktop/woman_sitting_by_the_tree.png"
}
}
],
"trajectory": "trajectories/",
"related_apps": [
"gimp"
],
"evaluator": {
"func": "decrease_brightness",
"expected": {
"type": "vm_file",
"path": "Desktop/woman_sitting_by_the_tree.png",
"dest": "woman_sitting_by_the_tree.png"
},
"result": {
"type": "vm_file",
"path": "Desktop/edited_darker.png",
"dest": "edited_darker.png"
}
}
}

View File

@@ -0,0 +1,42 @@
{
"id": "f4aec372-4fb0-4df5-a52b-79e0e2a5d6ce",
"snapshot": "gimp",
"instruction": "Help me select the yellow triangle and move it to the center of my photo.",
"source": "https://superuser.com/questions/612338/how-do-i-select-and-move-an-object-in-gimp",
"config": [
{
"type": "download",
"parameters": {
"files": [
{
"url": "https://drive.usercontent.google.com/download?id=1IMFTLy3lLSP8ee5b-940JjaoOd5-oGwn&export=download&authuser=0&confirm=t&uuid=0e06dffa-b602-42c6-8841-cf6eb3524e2b&at=APZUnTVpN4xHqAIAYmHZgk-UXvY4:1705081430356",
"path": "Desktop/Triangle_On_The_Side.png"
}
]
}
},
{
"type": "open",
"parameters": {
"path": "Desktop/Triangle_On_The_Side.png"
}
}
],
"trajectory": "trajectories/",
"related_apps": [
"gimp"
],
"evaluator": {
"func": "compare_triangle_positions",
"expected": {
"type": "vm_file",
"path": "Desktop/Triangle_On_The_Side.png",
"dest": "Triangle_On_The_Side.png"
},
"result": {
"type": "vm_file",
"path": "Desktop/Triangle_In_The_Middle.png",
"dest": "Triangle_In_The_Middle.png"
}
}
}

View File

@@ -1,71 +0,0 @@
# eval README
This is the README for your extension "eval". After writing up a brief description, we recommend including the following sections.
## Features
Describe specific features of your extension including screenshots of your extension in action. Image paths are relative to this README file.
For example if there is an image subfolder under your extension project workspace:
\!\[feature X\]\(images/feature-x.png\)
> Tip: Many popular extensions utilize animations. This is an excellent way to show off your extension! We recommend short, focused animations that are easy to follow.
## Requirements
If you have any requirements or dependencies, add a section describing those and how to install and configure them.
## Extension Settings
Include if your extension adds any VS Code settings through the `contributes.configuration` extension point.
For example:
This extension contributes the following settings:
* `myExtension.enable`: Enable/disable this extension.
* `myExtension.thing`: Set to `blah` to do something.
## Known Issues
Calling out known issues can help limit users opening duplicate issues against your extension.
## Release Notes
Users appreciate release notes as you update your extension.
### 1.0.0
Initial release of ...
### 1.0.1
Fixed issue #.
### 1.1.0
Added features X, Y, and Z.
---
## Following extension guidelines
Ensure that you've read through the extensions guidelines and follow the best practices for creating your extension.
* [Extension Guidelines](https://code.visualstudio.com/api/references/extension-guidelines)
## Working with Markdown
You can author your README using Visual Studio Code. Here are some useful editor keyboard shortcuts:
* Split the editor (`Cmd+\` on macOS or `Ctrl+\` on Windows and Linux).
* Toggle preview (`Shift+Cmd+V` on macOS or `Shift+Ctrl+V` on Windows and Linux).
* Press `Ctrl+Space` (Windows, Linux, macOS) to see a list of Markdown snippets.
## For more information
* [Visual Studio Code's Markdown Support](http://code.visualstudio.com/docs/languages/markdown)
* [Markdown Syntax Reference](https://help.github.com/articles/markdown-basics/)
**Enjoy!**

Binary file not shown.

View File

@@ -244,12 +244,29 @@
"node": ">= 6"
}
},
"node_modules/@types/fs-extra": {
"version": "11.0.4",
"resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-11.0.4.tgz",
"integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==",
"dependencies": {
"@types/jsonfile": "*",
"@types/node": "*"
}
},
"node_modules/@types/json-schema": {
"version": "7.0.15",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
"dev": true
},
"node_modules/@types/jsonfile": {
"version": "6.1.4",
"resolved": "https://registry.npmjs.org/@types/jsonfile/-/jsonfile-6.1.4.tgz",
"integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/mocha": {
"version": "10.0.6",
"resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.6.tgz",
@@ -260,7 +277,6 @@
"version": "18.19.4",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.4.tgz",
"integrity": "sha512-xNzlUhzoHotIsnFoXmJB+yWmBvFZgKCI9TtPIEdYIMM1KWfwuY8zh7wvc1u1OAXlC7dlf6mZVx/s+Y5KfFz19A==",
"dev": true,
"dependencies": {
"undici-types": "~5.26.4"
}
@@ -1234,6 +1250,19 @@
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/fs-extra": {
"version": "11.2.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz",
"integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"engines": {
"node": ">=14.14"
}
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
@@ -1332,6 +1361,11 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="
},
"node_modules/graphemer": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
@@ -1580,6 +1614,17 @@
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
"dev": true
},
"node_modules/jsonfile": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
"integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
"dependencies": {
"universalify": "^2.0.0"
},
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/jszip": {
"version": "3.10.1",
"resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz",
@@ -2538,8 +2583,15 @@
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"dev": true
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
},
"node_modules/universalify": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
"integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
"engines": {
"node": ">= 10.0.0"
}
},
"node_modules/uri-js": {
"version": "4.4.1",

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) Microsoft Corporation.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View File

@@ -0,0 +1,15 @@
# Installation
> `npm install --save @types/fs-extra`
# Summary
This package contains type definitions for fs-extra (https://github.com/jprichardson/node-fs-extra).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/fs-extra.
### Additional Details
* Last updated: Tue, 07 Nov 2023 20:08:00 GMT
* Dependencies: [@types/jsonfile](https://npmjs.com/package/@types/jsonfile), [@types/node](https://npmjs.com/package/@types/node)
# Credits
These definitions were written by [Alan Agius](https://github.com/alan-agius4), [midknight41](https://github.com/midknight41), [Brendan Forster](https://github.com/shiftkey), [Mees van Dijk](https://github.com/mees-), [Justin Rockwood](https://github.com/jrockwood), [Sang Dang](https://github.com/sangdth), [Florian Keller](https://github.com/ffflorian), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Tiger Oakes](https://github.com/NotWoods), and [BendingBender](https://github.com/BendingBender).

View File

@@ -0,0 +1,111 @@
import * as fse from "./index.js";
export {
copy,
CopyFilterAsync,
CopyFilterSync,
CopyOptions,
CopyOptionsSync,
copySync,
createFile,
createFileSync,
createLink,
createLinkSync,
createSymlink,
createSymlinkSync,
emptyDir,
emptydir,
emptyDirSync,
emptydirSync,
ensureDir,
EnsureDirOptions,
ensureDirSync,
ensureFile,
ensureFileSync,
ensureLink,
ensureLinkSync,
ensureSymlink,
ensureSymlinkSync,
JsonOutputOptions,
JsonReadOptions,
JsonWriteOptions,
mkdirp,
mkdirpSync,
mkdirs,
mkdirsSync,
move,
MoveOptions,
moveSync,
NoParamCallback,
NoParamCallbackWithUndefined,
outputFile,
outputFileSync,
outputJSON,
outputJson,
outputJSONSync,
outputJsonSync,
pathExists,
pathExistsSync,
PathLike,
readJSON,
readJson,
readJSONSync,
readJsonSync,
remove,
removeSync,
SymlinkType,
WriteFileOptions,
writeJSON,
writeJson,
writeJSONSync,
writeJsonSync,
} from "./index.js";
declare const fsExtra: {
copy: typeof fse.copy;
copySync: typeof fse.copySync;
emptyDirSync: typeof fse.emptyDirSync;
emptydirSync: typeof fse.emptydirSync;
emptyDir: typeof fse.emptyDir;
emptydir: typeof fse.emptydir;
createFile: typeof fse.createFile;
createFileSync: typeof fse.createFileSync;
ensureFile: typeof fse.ensureFile;
ensureFileSync: typeof fse.ensureFileSync;
createLink: typeof fse.createLink;
createLinkSync: typeof fse.createLinkSync;
ensureLink: typeof fse.ensureLink;
ensureLinkSync: typeof fse.ensureLinkSync;
createSymlink: typeof fse.createSymlink;
createSymlinkSync: typeof fse.createSymlinkSync;
ensureSymlink: typeof fse.ensureSymlink;
ensureSymlinkSync: typeof fse.ensureSymlinkSync;
readJson: typeof fse.readJson;
readJSON: typeof fse.readJSON;
readJsonSync: typeof fse.readJsonSync;
readJSONSync: typeof fse.readJSONSync;
writeJson: typeof fse.writeJson;
writeJSON: typeof fse.writeJSON;
writeJsonSync: typeof fse.writeJsonSync;
writeJSONSync: typeof fse.writeJSONSync;
outputJson: typeof fse.outputJson;
outputJSON: typeof fse.outputJSON;
outputJsonSync: typeof fse.outputJsonSync;
outputJSONSync: typeof fse.outputJSONSync;
mkdirs: typeof fse.mkdirs;
mkdirsSync: typeof fse.mkdirsSync;
mkdirp: typeof fse.mkdirp;
mkdirpSync: typeof fse.mkdirpSync;
ensureDir: typeof fse.ensureDir;
ensureDirSync: typeof fse.ensureDirSync;
move: typeof fse.move;
moveSync: typeof fse.moveSync;
outputFile: typeof fse.outputFile;
outputFileSync: typeof fse.outputFileSync;
pathExists: typeof fse.pathExists;
pathExistsSync: typeof fse.pathExistsSync;
remove: typeof fse.remove;
removeSync: typeof fse.removeSync;
};
export default fsExtra;

View File

@@ -0,0 +1,996 @@
/// <reference types="node" />
import * as fs from "fs";
import * as jsonfile from "jsonfile";
import { StringifyOptions } from "jsonfile/utils";
export * from "fs";
/**
* Copy a file or directory. The directory can have contents.
*
* @param src Note that if `src` is a directory it will copy everything inside of this directory,
* not the entire directory itself (see [issue #537](https://github.com/jprichardson/node-fs-extra/issues/537)).
* @param dest Note that if `src` is a file, `dest` cannot be a directory
* (see [issue #323](https://github.com/jprichardson/node-fs-extra/issues/323)).
*
* @example
* import * as fs from 'fs-extra'
*
* // With a callback:
* fs.copy('/tmp/myfile', '/tmp/mynewfile', err => {
* if (err) return console.error(err)
* console.log('success!')
* }) // copies file
*
* fs.copy('/tmp/mydir', '/tmp/mynewdir', err => {
* if (err) return console.error(err)
* console.log('success!')
* }) // copies directory, even if it has subdirectories or files
*
* // With Promises:
* fs.copy('/tmp/myfile', '/tmp/mynewfile')
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.copy('/tmp/myfile', '/tmp/mynewfile')
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*
* // Using filter function
* fs.copy(
* '/tmp/mydir',
* '/tmp/mynewdir',
* {
* filter(src, dest) {
* // your logic here
* // it will be copied if return true
* }
* },
* err => {
* if (err) return console.error(err)
* console.log('success!')
* }
* )
*/
export function copy(src: string, dest: string, options?: CopyOptions): Promise<void>;
export function copy(src: string, dest: string, callback: NoParamCallbackWithUndefined): void;
export function copy(src: string, dest: string, options: CopyOptions, callback: NoParamCallbackWithUndefined): void;
/**
* Copy a file or directory. The directory can have contents.
*
* @param src Note that if `src` is a directory it will copy everything inside of this directory,
* not the entire directory itself (see [issue #537](https://github.com/jprichardson/node-fs-extra/issues/537)).
* @param dest Note that if `src` is a file, `dest` cannot be a directory
* (see [issue #323](https://github.com/jprichardson/node-fs-extra/issues/323)).
*
* @example
* import * as fs from 'fs-extra'
*
* // copy file
* fs.copySync('/tmp/myfile', '/tmp/mynewfile')
*
* // copy directory, even if it has subdirectories or files
* fs.copySync('/tmp/mydir', '/tmp/mynewdir')
*
* // Using filter function
* fs.copySync('/tmp/mydir', '/tmp/mynewdir', {
* filter(src, dest) {
* // your logic here
* // it will be copied if return true
* }
* })
*/
export function copySync(src: string, dest: string, options?: CopyOptionsSync): void;
/**
* Moves a file or directory, even across devices.
*
* @param dest Note: When `src` is a file, `dest` must be a file and when `src` is a directory, `dest` must be a directory.
*
* @example
* import * as fs from 'fs-extra'
*
* const src = '/tmp/file.txt'
* const dest = '/tmp/this/path/does/not/exist/file.txt'
*
* // With a callback:
* fs.move(src, dest, err => {
* if (err) return console.error(err)
* console.log('success!')
* })
*
* // With Promises:
* fs.move(src, dest)
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.move(src, dest)
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*
* // Using `overwrite` option
* fs.move('/tmp/somedir', '/tmp/may/already/exist/somedir', { overwrite: true }, err => {
* if (err) return console.error(err)
* console.log('success!')
* })
*/
export function move(src: string, dest: string, options?: MoveOptions): Promise<void>;
export function move(src: string, dest: string, callback: NoParamCallbackWithUndefined): void;
export function move(src: string, dest: string, options: MoveOptions, callback: NoParamCallbackWithUndefined): void;
/**
* Moves a file or directory, even across devices.
*
* @param dest Note: When `src` is a file, `dest` must be a file and when `src` is a directory, `dest` must be a directory.
*
* @example
* import * as fs from 'fs-extra'
*
* fs.moveSync('/tmp/somefile', '/tmp/does/not/exist/yet/somefile')
*
* // Using `overwrite` option
* fs.moveSync('/tmp/somedir', '/tmp/may/already/exist/somedir', { overwrite: true })
*/
export function moveSync(src: string, dest: string, options?: MoveOptions): void;
/**
* Ensures that the file exists. If the file that is requested to be created is in
* directories that do not exist, these directories are created. If the file already
* exists, it is **NOT MODIFIED**.
*
* @example
* import * as fs from 'fs-extra'
*
* const file = '/tmp/this/path/does/not/exist/file.txt'
*
* // With a callback:
* fs.ensureFile(file, err => {
* console.log(err) // => null
* // file has now been created, including the directory it is to be placed in
* })
*
* // With Promises:
* fs.ensureFile(file)
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.ensureFile(file)
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*/
export function ensureFile(file: string): Promise<void>;
export function ensureFile(file: string, callback: NoParamCallbackWithUndefined): void;
/**
* @see ensureFile
*/
export const createFile: typeof ensureFile;
/**
* Ensures that the file exists. If the file that is requested to be created is in
* directories that do not exist, these directories are created. If the file already
* exists, it is **NOT MODIFIED**.
*
* @example
* import * as fs from 'fs-extra'
*
* const file = '/tmp/this/path/does/not/exist/file.txt'
* fs.ensureFileSync(file)
* // file has now been created, including the directory it is to be placed in
*/
export function ensureFileSync(file: string): void;
/**
* @see ensureFileSync
*/
export const createFileSync: typeof ensureFileSync;
/**
* Ensures that the link exists. If the directory structure does not exist, it is created.
*
* @example
* import * as fs from 'fs-extra'
*
* const srcPath = '/tmp/file.txt'
* const destPath = '/tmp/this/path/does/not/exist/file.txt'
*
* // With a callback:
* fs.ensureLink(srcPath, destPath, err => {
* console.log(err) // => null
* // link has now been created, including the directory it is to be placed in
* })
*
* // With Promises:
* fs.ensureLink(srcPath, destPath)
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.ensureLink(srcPath, destPath)
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*/
export function ensureLink(src: string, dest: string): Promise<void>;
export function ensureLink(src: string, dest: string, callback: fs.NoParamCallback): void;
/**
* @see ensureLink
*/
export const createLink: typeof ensureLink;
/**
* Ensures that the link exists. If the directory structure does not exist, it is created.
*
* @example
* import * as fs from 'fs-extra'
*
* const srcPath = '/tmp/file.txt'
* const destPath = '/tmp/this/path/does/not/exist/file.txt'
* fs.ensureLinkSync(srcPath, destPath)
* // link has now been created, including the directory it is to be placed in
*/
export function ensureLinkSync(src: string, dest: string): void;
/**
* @see ensureLinkSync
*/
export const createLinkSync: typeof ensureLinkSync;
/**
* Ensures that the symlink exists. If the directory structure does not exist, it is created.
*
* @param type It is only available on Windows and ignored on other platforms.
*
* @example
* import * as fs from 'fs-extra'
*
* const srcPath = '/tmp/file.txt'
* const destPath = '/tmp/this/path/does/not/exist/file.txt'
*
* // With a callback:
* fs.ensureSymlink(srcPath, destPath, err => {
* console.log(err) // => null
* // symlink has now been created, including the directory it is to be placed in
* })
*
* // With Promises:
* fs.ensureSymlink(srcPath, destPath)
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.ensureSymlink(srcPath, destPath)
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*/
export function ensureSymlink(src: string, dest: string, type?: SymlinkType): Promise<void>;
export function ensureSymlink(src: string, dest: string, callback: fs.NoParamCallback): void;
export function ensureSymlink(src: string, dest: string, type: SymlinkType, callback: fs.NoParamCallback): void;
/**
* @see ensureSymlink
*/
export const createSymlink: typeof ensureSymlink;
/**
* Ensures that the symlink exists. If the directory structure does not exist, it is created.
*
* @param type It is only available on Windows and ignored on other platforms.
*
* @example
* import * as fs from 'fs-extra'
*
* const srcPath = '/tmp/file.txt'
* const destPath = '/tmp/this/path/does/not/exist/file.txt'
* fs.ensureSymlinkSync(srcPath, destPath)
* // symlink has now been created, including the directory it is to be placed in
*/
export function ensureSymlinkSync(src: string, dest: string, type?: SymlinkType): void;
/**
* @see ensureSymlinkSync
*/
export const createSymlinkSync: typeof ensureSymlinkSync;
/**
* Ensures that the directory exists. If the directory structure does not exist, it is created.
*
* @example
* import * as fs from 'fs-extra'
*
* const dir = '/tmp/this/path/does/not/exist'
* const desiredMode = 0o2775
* const options = {
* mode: 0o2775
* }
*
* // With a callback:
* fs.ensureDir(dir, err => {
* console.log(err) // => null
* // dir has now been created, including the directory it is to be placed in
* })
*
* // With a callback and a mode integer
* fs.ensureDir(dir, desiredMode, err => {
* console.log(err) // => null
* // dir has now been created with mode 0o2775, including the directory it is to be placed in
* })
*
* // With Promises:
* fs.ensureDir(dir)
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With Promises and a mode integer:
* fs.ensureDir(dir, desiredMode)
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.ensureDir(dir)
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
* asyncAwait()
*
* // With async/await and an options object, containing mode:
* async function asyncAwaitMode () {
* try {
* await fs.ensureDir(dir, options)
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
* asyncAwaitMode()
*/
export function ensureDir(path: string, options?: EnsureDirOptions | number): Promise<void>;
export function ensureDir(path: string, callback: fs.NoParamCallback): void;
export function ensureDir(path: string, options: EnsureDirOptions | number, callback: fs.NoParamCallback): void;
/**
* Ensures that the directory exists. If the directory structure does not exist, it is created.
* If provided, options may specify the desired mode for the directory.
*
* @example
* import * as fs from 'fs-extra'
*
* const dir = '/tmp/this/path/does/not/exist'
*
* const desiredMode = 0o2775
* const options = {
* mode: 0o2775
* }
*
* fs.ensureDirSync(dir)
* // dir has now been created, including the directory it is to be placed in
*
* fs.ensureDirSync(dir, desiredMode)
* // dir has now been created, including the directory it is to be placed in with permission 0o2775
*
* fs.ensureDirSync(dir, options)
* // dir has now been created, including the directory it is to be placed in with permission 0o2775
*/
export function ensureDirSync(path: string, options?: EnsureDirOptions | number): void;
/**
* @see ensureDir
*/
export const mkdirs: typeof ensureDir;
/**
* @see ensureDirSync
*/
export const mkdirsSync: typeof ensureDirSync;
/**
* @see ensureDir
*/
export const mkdirp: typeof ensureDir;
/**
* @see ensureDirSync
*/
export const mkdirpSync: typeof ensureDirSync;
/**
* Almost the same as `writeFile` (i.e. it overwrites), except that if the parent directory
* does not exist, it's created.
*
* @example
* import * as fs from 'fs-extra'
*
* const file = '/tmp/this/path/does/not/exist/file.txt'
*
* // With a callback:
* fs.outputFile(file, 'hello!', err => {
* console.log(err) // => null
*
* fs.readFile(file, 'utf8', (err, data) => {
* if (err) return console.error(err)
* console.log(data) // => hello!
* })
* })
*
* // With Promises:
* fs.outputFile(file, 'hello!')
* .then(() => fs.readFile(file, 'utf8'))
* .then(data => {
* console.log(data) // => hello!
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.outputFile(file, 'hello!')
*
* const data = await fs.readFile(file, 'utf8')
*
* console.log(data) // => hello!
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*/
export function outputFile(
file: string,
data: string | NodeJS.ArrayBufferView,
options?: fs.WriteFileOptions,
): Promise<void>;
export function outputFile(file: string, data: string | NodeJS.ArrayBufferView, callback: fs.NoParamCallback): void;
export function outputFile(
file: string,
data: string | NodeJS.ArrayBufferView,
options: fs.WriteFileOptions,
callback: fs.NoParamCallback,
): void;
/**
* Almost the same as `writeFileSync` (i.e. it overwrites), except that if the parent directory
* does not exist, it's created.
*
* @example
* import * as fs from 'fs-extra'
*
* const file = '/tmp/this/path/does/not/exist/file.txt'
* fs.outputFileSync(file, 'hello!')
*
* const data = fs.readFileSync(file, 'utf8')
* console.log(data) // => hello!
*/
export function outputFileSync(
file: string,
data: string | NodeJS.ArrayBufferView,
options?: fs.WriteFileOptions,
): void;
/**
* Reads a JSON file and then parses it into an object.
*
* @example
* import * as fs from 'fs-extra'
*
* // With a callback:
* fs.readJson('./package.json', (err, packageObj) => {
* if (err) console.error(err)
* console.log(packageObj.version) // => 0.1.3
* })
*
* // With Promises:
* fs.readJson('./package.json')
* .then(packageObj => {
* console.log(packageObj.version) // => 0.1.3
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* const packageObj = await fs.readJson('./package.json')
* console.log(packageObj.version) // => 0.1.3
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*
* // `readJsonSync()` can take a `throws` option set to `false` and it won't throw if the JSON is invalid. Example:
* const file = '/tmp/some-invalid.json'
* const data = '{not valid JSON'
* fs.writeFileSync(file, data)
*
* // With a callback:
* fs.readJson(file, { throws: false }, (err, obj) => {
* if (err) console.error(err)
* console.log(obj) // => null
* })
*
* // With Promises:
* fs.readJson(file, { throws: false })
* .then(obj => {
* console.log(obj) // => null
* })
* .catch(err => {
* console.error(err) // Not called
* })
*
* // With async/await:
* async function asyncAwaitThrows () {
* const obj = await fs.readJson(file, { throws: false })
* console.log(obj) // => null
* }
*
* asyncAwaitThrows()
*/
export const readJson: typeof jsonfile.readFile;
/**
* @see readJson
*/
export const readJSON: typeof jsonfile.readFile;
/**
* Reads a JSON file and then parses it into an object.
*
* @example
* import * as fs from 'fs-extra'
*
* const packageObj = fs.readJsonSync('./package.json')
* console.log(packageObj.version) // => 2.0.0
*
* // `readJsonSync()` can take a `throws` option set to `false` and it won't throw if the JSON is invalid. Example:
* const file = '/tmp/some-invalid.json'
* const data = '{not valid JSON'
* fs.writeFileSync(file, data)
*
* const obj = fs.readJsonSync(file, { throws: false })
* console.log(obj) // => null
*/
export const readJsonSync: typeof jsonfile.readFileSync;
/**
* @see readJsonSync
*/
export const readJSONSync: typeof jsonfile.readFileSync;
/**
* Writes an object to a JSON file.
*
* @example
* import * as fs from 'fs-extra'
*
* // With a callback:
* fs.writeJson('./package.json', {name: 'fs-extra'}, err => {
* if (err) return console.error(err)
* console.log('success!')
* })
*
* // With Promises:
* fs.writeJson('./package.json', {name: 'fs-extra'})
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.writeJson('./package.json', {name: 'fs-extra'})
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*/
export const writeJson: typeof jsonfile.writeFile;
/**
* @see writeJson
*/
export const writeJSON: typeof jsonfile.writeFile;
/**
* Writes an object to a JSON file.
*
* @example
* import * as fs from 'fs-extra'
*
* fs.writeJsonSync('./package.json', {name: 'fs-extra'})
*/
export const writeJsonSync: typeof jsonfile.writeFileSync;
/**
* @see writeJsonSync
*/
export const writeJSONSync: typeof jsonfile.writeFileSync;
/**
* Almost the same as `writeJson`, except that if the directory does not exist, it's created.
*
* @example
* import * as fs from 'fs-extra'
*
* const file = '/tmp/this/path/does/not/exist/file.json'
*
* // With a callback:
* fs.outputJson(file, {name: 'JP'}, err => {
* console.log(err) // => null
*
* fs.readJson(file, (err, data) => {
* if (err) return console.error(err)
* console.log(data.name) // => JP
* })
* })
*
* // With Promises:
* fs.outputJson(file, {name: 'JP'})
* .then(() => fs.readJson(file))
* .then(data => {
* console.log(data.name) // => JP
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.outputJson(file, {name: 'JP'})
*
* const data = await fs.readJson(file)
*
* console.log(data.name) // => JP
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*/
export function outputJson(file: string, data: any, options?: JsonOutputOptions): Promise<void>;
export function outputJson(file: string, data: any, options: JsonOutputOptions, callback: fs.NoParamCallback): void;
export function outputJson(file: string, data: any, callback: fs.NoParamCallback): void;
/**
* @see outputJson
*/
export const outputJSON: typeof outputJson;
/**
* Almost the same as `writeJsonSync`, except that if the directory does not exist, it's created.
*
* @example
* import * as fs from 'fs-extra'
*
* const file = '/tmp/this/path/does/not/exist/file.json'
* fs.outputJsonSync(file, {name: 'JP'})
*
* const data = fs.readJsonSync(file)
* console.log(data.name) // => JP
*/
export function outputJsonSync(file: string, data: any, options?: JsonOutputOptions): void;
/**
* @see outputJsonSync
*/
export const outputJSONSync: typeof outputJsonSync;
/**
* Removes a file or directory. The directory can have contents. If the path does not exist, silently does nothing.
*
* @example
* import * as fs from 'fs-extra'
*
* // remove file
* // With a callback:
* fs.remove('/tmp/myfile', err => {
* if (err) return console.error(err)
* console.log('success!')
* })
*
* fs.remove('/home/jprichardson', err => {
* if (err) return console.error(err)
* console.log('success!') // I just deleted my entire HOME directory.
* })
*
* // With Promises:
* fs.remove('/tmp/myfile')
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.remove('/tmp/myfile')
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*/
export function remove(dir: string): Promise<void>;
export function remove(dir: string, callback: fs.NoParamCallback): void;
/**
* Removes a file or directory. The directory can have contents. If the path does not exist, silently does nothing.
*
* @example
* import * as fs from 'fs-extra'
*
* // remove file
* fs.removeSync('/tmp/myfile')
*
* fs.removeSync('/home/jprichardson') // I just deleted my entire HOME directory.
*/
export function removeSync(dir: string): void;
/**
* Ensures that a directory is empty. Deletes directory contents if the directory is not empty.
* If the directory does not exist, it is created. The directory itself is not deleted.
*
* @example
* import * as fs from 'fs-extra'
*
* // assume this directory has a lot of files and folders
* // With a callback:
* fs.emptyDir('/tmp/some/dir', err => {
* if (err) return console.error(err)
* console.log('success!')
* })
*
* // With Promises:
* fs.emptyDir('/tmp/some/dir')
* .then(() => {
* console.log('success!')
* })
* .catch(err => {
* console.error(err)
* })
*
* // With async/await:
* async function asyncAwait () {
* try {
* await fs.emptyDir('/tmp/some/dir')
* console.log('success!')
* } catch (err) {
* console.error(err)
* }
* }
*
* asyncAwait()
*/
export function emptyDir(path: string): Promise<void>;
export function emptyDir(path: string, callback: fs.NoParamCallback): void;
/**
* @see emptyDir
*/
export const emptydir: typeof emptyDir;
/**
* Ensures that a directory is empty. Deletes directory contents if the directory is not empty.
* If the directory does not exist, it is created. The directory itself is not deleted.
*
* @example
* import * as fs from 'fs-extra'
*
* // assume this directory has a lot of files and folders
* fs.emptyDirSync('/tmp/some/dir')
*/
export function emptyDirSync(path: string): void;
/**
* @see emptyDirSync
*/
export const emptydirSync: typeof emptyDirSync;
/**
* Test whether or not the given path exists by checking with the file system. Like
* [`fs.exists`](https://nodejs.org/api/fs.html#fs_fs_exists_path_callback), but with a normal
* callback signature (err, exists). Uses `fs.access` under the hood.
*
* @example
* import * as fs from 'fs-extra'
*
* const file = '/tmp/this/path/does/not/exist/file.txt'
*
* // With a callback:
* fs.pathExists(file, (err, exists) => {
* console.log(err) // => null
* console.log(exists) // => false
* })
*
* // Promise usage:
* fs.pathExists(file)
* .then(exists => console.log(exists)) // => false
*
* // With async/await:
* async function asyncAwait () {
* const exists = await fs.pathExists(file)
*
* console.log(exists) // => false
* }
*
* asyncAwait()
*/
export function pathExists(path: string): Promise<boolean>;
export function pathExists(path: string, callback: (err: NodeJS.ErrnoException | null, exists: boolean) => void): void;
/**
* An alias for [`fs.existsSync`](https://nodejs.org/api/fs.html#fs_fs_existssync_path), created for
* consistency with `pathExists`.
*/
export function pathExistsSync(path: string): boolean;
export const access: typeof fs.access.__promisify__ & typeof fs.access;
export const appendFile: typeof fs.appendFile.__promisify__ & typeof fs.appendFile;
export const chmod: typeof fs.chmod.__promisify__ & typeof fs.chmod;
export const chown: typeof fs.chown.__promisify__ & typeof fs.chown;
export const close: typeof fs.close.__promisify__ & typeof fs.close;
export const copyFile: typeof fs.copyFile.__promisify__ & typeof fs.copyFile;
export const exists: typeof fs.exists.__promisify__ & typeof fs.exists;
export const fchmod: typeof fs.fchmod.__promisify__ & typeof fs.fchmod;
export const fchown: typeof fs.fchown.__promisify__ & typeof fs.fchown;
export const fdatasync: typeof fs.fdatasync.__promisify__ & typeof fs.fdatasync;
export const fstat: typeof fs.fstat.__promisify__ & typeof fs.fstat;
export const fsync: typeof fs.fsync.__promisify__ & typeof fs.fsync;
export const ftruncate: typeof fs.ftruncate.__promisify__ & typeof fs.ftruncate;
export const futimes: typeof fs.futimes.__promisify__ & typeof fs.futimes;
export const lchmod: typeof fs.lchmod.__promisify__ & typeof fs.lchmod;
export const lchown: typeof fs.lchown.__promisify__ & typeof fs.lchown;
export const link: typeof fs.link.__promisify__ & typeof fs.link;
export const lstat: typeof fs.lstat.__promisify__ & typeof fs.lstat;
export const mkdir: typeof fs.mkdir.__promisify__ & typeof fs.mkdir;
export const mkdtemp: typeof fs.mkdtemp.__promisify__ & typeof fs.mkdtemp;
export const open: typeof fs.open.__promisify__ & typeof fs.open;
export const opendir: typeof fs.opendir.__promisify__ & typeof fs.opendir;
export const read: typeof fs.read.__promisify__ & typeof fs.read;
export const readv: typeof fs.readv.__promisify__ & typeof fs.readv;
export const readdir: typeof fs.readdir.__promisify__ & typeof fs.readdir;
export const readFile: typeof fs.readFile.__promisify__ & typeof fs.readFile;
export const readlink: typeof fs.readlink.__promisify__ & typeof fs.readlink;
export const realpath:
& typeof fs.realpath.__promisify__
& typeof fs.realpath
& {
native(path: fs.PathLike, options?: fs.EncodingOption): Promise<string>;
native(path: fs.PathLike, options: fs.BufferEncodingOption): Promise<Buffer>;
};
export const rename: typeof fs.rename.__promisify__ & typeof fs.rename;
export const rm: typeof fs.rm.__promisify__ & typeof fs.rm;
export const rmdir: typeof fs.rmdir.__promisify__ & typeof fs.rmdir;
export const stat: typeof fs.stat.__promisify__ & typeof fs.stat;
export const symlink: typeof fs.symlink.__promisify__ & typeof fs.symlink;
export const truncate: typeof fs.truncate.__promisify__ & typeof fs.truncate;
export const unlink: typeof fs.unlink.__promisify__ & typeof fs.unlink;
export const utimes: typeof fs.utimes.__promisify__ & typeof fs.utimes;
export const write: typeof fs.write.__promisify__ & typeof fs.write;
export const writev: typeof fs.writev.__promisify__ & typeof fs.writev;
export const writeFile: typeof fs.writeFile.__promisify__ & typeof fs.writeFile;
export type NoParamCallbackWithUndefined = (err: NodeJS.ErrnoException | null | undefined) => void;
export type SymlinkType = fs.symlink.Type;
export type CopyFilterSync = (src: string, dest: string) => boolean;
export type CopyFilterAsync = (src: string, dest: string) => Promise<boolean>;
export interface CopyOptions {
/**
* Dereference symlinks.
* @default false
*/
dereference?: boolean | undefined;
/**
* Overwrite existing file or directory.
* _Note that the copy operation will silently fail if you set this to `false` and the destination exists._
* Use the `errorOnExist` option to change this behavior.
* @default true
*/
overwrite?: boolean | undefined;
/**
* When `true`, will set last modification and access times to the ones of the original source files.
* When `false`, timestamp behavior is OS-dependent.
* @default false
*/
preserveTimestamps?: boolean | undefined;
/**
* When `overwrite` is `false` and the destination exists, throw an error.
* @default false
*/
errorOnExist?: boolean | undefined;
/**
* Function to filter copied files/directories. Return `true` to copy the item, `false` to ignore it.
* Can also return a `Promise` that resolves to `true` or `false` (or pass in an `async` function).
*/
filter?: CopyFilterSync | CopyFilterAsync | undefined;
}
export interface CopyOptionsSync extends CopyOptions {
/**
* Function to filter copied files/directories. Return `true` to copy the item, `false` to ignore it.
*/
filter?: CopyFilterSync | undefined;
}
export interface EnsureDirOptions {
mode?: number | undefined;
}
export interface MoveOptions {
/**
* Overwrite existing file or directory.
* @default false
*/
overwrite?: boolean | undefined;
/**
* Dereference symlinks.
* @default false
*/
dereference?: boolean | undefined;
}
export { JFReadOptions as JsonReadOptions, JFWriteOptions as JsonWriteOptions } from "jsonfile";
export type JsonOutputOptions = fs.WriteFileOptions & StringifyOptions;

View File

@@ -0,0 +1,84 @@
{
"name": "@types/fs-extra",
"version": "11.0.4",
"description": "TypeScript definitions for fs-extra",
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/fs-extra",
"license": "MIT",
"contributors": [
{
"name": "Alan Agius",
"githubUsername": "alan-agius4",
"url": "https://github.com/alan-agius4"
},
{
"name": "midknight41",
"githubUsername": "midknight41",
"url": "https://github.com/midknight41"
},
{
"name": "Brendan Forster",
"githubUsername": "shiftkey",
"url": "https://github.com/shiftkey"
},
{
"name": "Mees van Dijk",
"githubUsername": "mees-",
"url": "https://github.com/mees-"
},
{
"name": "Justin Rockwood",
"githubUsername": "jrockwood",
"url": "https://github.com/jrockwood"
},
{
"name": "Sang Dang",
"githubUsername": "sangdth",
"url": "https://github.com/sangdth"
},
{
"name": "Florian Keller",
"githubUsername": "ffflorian",
"url": "https://github.com/ffflorian"
},
{
"name": "Piotr Błażejewicz",
"githubUsername": "peterblazejewicz",
"url": "https://github.com/peterblazejewicz"
},
{
"name": "Tiger Oakes",
"githubUsername": "NotWoods",
"url": "https://github.com/NotWoods"
},
{
"name": "BendingBender",
"githubUsername": "BendingBender",
"url": "https://github.com/BendingBender"
}
],
"main": "",
"types": "index.d.ts",
"exports": {
".": {
"types": "./index.d.ts"
},
"./esm": {
"types": {
"import": "./esm.d.mts"
}
},
"./package.json": "./package.json"
},
"repository": {
"type": "git",
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
"directory": "types/fs-extra"
},
"scripts": {},
"dependencies": {
"@types/jsonfile": "*",
"@types/node": "*"
},
"typesPublisherContentHash": "2929b595f5fdba90096984c89127b1a93f583ee50680a2ad02fad15b1f571bb0",
"typeScriptVersion": "4.5"
}

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) Microsoft Corporation.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View File

@@ -0,0 +1,15 @@
# Installation
> `npm install --save @types/jsonfile`
# Summary
This package contains type definitions for jsonfile (https://github.com/jprichardson/node-jsonfile#readme).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/jsonfile.
### Additional Details
* Last updated: Tue, 07 Nov 2023 03:09:37 GMT
* Dependencies: [@types/node](https://npmjs.com/package/@types/node)
# Credits
These definitions were written by [Daniel Bowring](https://github.com/dbowring), [BendingBender](https://github.com/BendingBender), and [Piotr Błażejewicz](https://github.com/peterblazejewicz).

View File

@@ -0,0 +1,71 @@
/// <reference types="node"/>
import {
PathLike,
readFile as fsReadFile,
readFileSync as fsReadFileSync,
writeFile as fsWriteFile,
writeFileSync as fsWriteFileSync,
} from "fs";
import { Url } from "url";
export type Path = PathLike | Url;
export interface FS {
readFile: typeof fsReadFile;
readFileSync: typeof fsReadFileSync;
writeFile: typeof fsWriteFile;
writeFileSync: typeof fsWriteFileSync;
}
export type JFReadOptions =
| {
encoding?: string | null | undefined;
flag?: string | undefined;
throws?: boolean | undefined;
fs?: FS | undefined;
reviver?: ((key: any, value: any) => any) | undefined;
}
| string
| null
| undefined;
export type JFWriteOptions =
| {
encoding?: string | null | undefined;
mode?: string | number | undefined;
flag?: string | undefined;
fs?: FS | undefined;
EOL?: string | undefined;
spaces?: string | number | undefined;
replacer?: ((key: string, value: any) => any) | undefined;
}
| string
| null;
export type ReadCallback = (err: NodeJS.ErrnoException | null, data: any) => void;
export type WriteCallback = (err: NodeJS.ErrnoException | null) => void;
/**
* @see {@link https://github.com/jprichardson/node-jsonfile#readfilefilename-options-callback}
*/
export function readFile(file: Path, options: JFReadOptions, callback: ReadCallback): void;
export function readFile(file: Path, callback: ReadCallback): void;
export function readFile(file: Path, options?: JFReadOptions): Promise<any>;
/**
* @see {@link https://github.com/jprichardson/node-jsonfile#readfilesyncfilename-options}
*/
export function readFileSync(file: Path, options?: JFReadOptions): any;
/**
* @see {@link https://github.com/jprichardson/node-jsonfile#writefilefilename-obj-options-callback}
*/
export function writeFile(file: Path, obj: any, options: JFWriteOptions, callback: WriteCallback): void;
export function writeFile(file: Path, obj: any, callback: WriteCallback): void;
export function writeFile(file: Path, obj: any, options?: JFWriteOptions): Promise<void>;
/**
* @see {@link https://github.com/jprichardson/node-jsonfile#writefilesyncfilename-obj-options}
*/
export function writeFileSync(file: Path, obj: any, options?: JFWriteOptions): void;

View File

@@ -0,0 +1,37 @@
{
"name": "@types/jsonfile",
"version": "6.1.4",
"description": "TypeScript definitions for jsonfile",
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/jsonfile",
"license": "MIT",
"contributors": [
{
"name": "Daniel Bowring",
"githubUsername": "dbowring",
"url": "https://github.com/dbowring"
},
{
"name": "BendingBender",
"githubUsername": "BendingBender",
"url": "https://github.com/BendingBender"
},
{
"name": "Piotr Błażejewicz",
"githubUsername": "peterblazejewicz",
"url": "https://github.com/peterblazejewicz"
}
],
"main": "",
"types": "index.d.ts",
"repository": {
"type": "git",
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
"directory": "types/jsonfile"
},
"scripts": {},
"dependencies": {
"@types/node": "*"
},
"typesPublisherContentHash": "c4c437c24ccd22e0fd74368adceefc3c6ed726b74d1cc5188deeb5b0119b9523",
"typeScriptVersion": "4.5"
}

View File

@@ -0,0 +1,9 @@
export function stringify(obj: any, options?: StringifyOptions): string;
export function stripBom(content: string): string;
export interface StringifyOptions {
EOL?: string | undefined;
finalEOL?: boolean | undefined;
replacer?: ((key: string, value: any) => any) | undefined;
spaces?: string | number | undefined;
}

15
vscodeEvalExtension/node_modules/fs-extra/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
(The MIT License)
Copyright (c) 2011-2017 JP Richardson
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

292
vscodeEvalExtension/node_modules/fs-extra/README.md generated vendored Normal file
View File

@@ -0,0 +1,292 @@
Node.js: fs-extra
=================
`fs-extra` adds file system methods that aren't included in the native `fs` module and adds promise support to the `fs` methods. It also uses [`graceful-fs`](https://github.com/isaacs/node-graceful-fs) to prevent `EMFILE` errors. It should be a drop in replacement for `fs`.
[![npm Package](https://img.shields.io/npm/v/fs-extra.svg)](https://www.npmjs.org/package/fs-extra)
[![License](https://img.shields.io/npm/l/fs-extra.svg)](https://github.com/jprichardson/node-fs-extra/blob/master/LICENSE)
[![build status](https://img.shields.io/github/actions/workflow/status/jprichardson/node-fs-extra/ci.yml?branch=master)](https://github.com/jprichardson/node-fs-extra/actions/workflows/ci.yml?query=branch%3Amaster)
[![downloads per month](http://img.shields.io/npm/dm/fs-extra.svg)](https://www.npmjs.org/package/fs-extra)
[![JavaScript Style Guide](https://img.shields.io/badge/code_style-standard-brightgreen.svg)](https://standardjs.com)
Why?
----
I got tired of including `mkdirp`, `rimraf`, and `ncp` in most of my projects.
Installation
------------
npm install fs-extra
Usage
-----
### CommonJS
`fs-extra` is a drop in replacement for native `fs`. All methods in `fs` are attached to `fs-extra`. All `fs` methods return promises if the callback isn't passed.
You don't ever need to include the original `fs` module again:
```js
const fs = require('fs') // this is no longer necessary
```
you can now do this:
```js
const fs = require('fs-extra')
```
or if you prefer to make it clear that you're using `fs-extra` and not `fs`, you may want
to name your `fs` variable `fse` like so:
```js
const fse = require('fs-extra')
```
you can also keep both, but it's redundant:
```js
const fs = require('fs')
const fse = require('fs-extra')
```
### ESM
There is also an `fs-extra/esm` import, that supports both default and named exports. However, note that `fs` methods are not included in `fs-extra/esm`; you still need to import `fs` and/or `fs/promises` seperately:
```js
import { readFileSync } from 'fs'
import { readFile } from 'fs/promises'
import { outputFile, outputFileSync } from 'fs-extra/esm'
```
Default exports are supported:
```js
import fs from 'fs'
import fse from 'fs-extra/esm'
// fse.readFileSync is not a function; must use fs.readFileSync
```
but you probably want to just use regular `fs-extra` instead of `fs-extra/esm` for default exports:
```js
import fs from 'fs-extra'
// both fs and fs-extra methods are defined
```
Sync vs Async vs Async/Await
-------------
Most methods are async by default. All async methods will return a promise if the callback isn't passed.
Sync methods on the other hand will throw if an error occurs.
Also Async/Await will throw an error if one occurs.
Example:
```js
const fs = require('fs-extra')
// Async with promises:
fs.copy('/tmp/myfile', '/tmp/mynewfile')
.then(() => console.log('success!'))
.catch(err => console.error(err))
// Async with callbacks:
fs.copy('/tmp/myfile', '/tmp/mynewfile', err => {
if (err) return console.error(err)
console.log('success!')
})
// Sync:
try {
fs.copySync('/tmp/myfile', '/tmp/mynewfile')
console.log('success!')
} catch (err) {
console.error(err)
}
// Async/Await:
async function copyFiles () {
try {
await fs.copy('/tmp/myfile', '/tmp/mynewfile')
console.log('success!')
} catch (err) {
console.error(err)
}
}
copyFiles()
```
Methods
-------
### Async
- [copy](docs/copy.md)
- [emptyDir](docs/emptyDir.md)
- [ensureFile](docs/ensureFile.md)
- [ensureDir](docs/ensureDir.md)
- [ensureLink](docs/ensureLink.md)
- [ensureSymlink](docs/ensureSymlink.md)
- [mkdirp](docs/ensureDir.md)
- [mkdirs](docs/ensureDir.md)
- [move](docs/move.md)
- [outputFile](docs/outputFile.md)
- [outputJson](docs/outputJson.md)
- [pathExists](docs/pathExists.md)
- [readJson](docs/readJson.md)
- [remove](docs/remove.md)
- [writeJson](docs/writeJson.md)
### Sync
- [copySync](docs/copy-sync.md)
- [emptyDirSync](docs/emptyDir-sync.md)
- [ensureFileSync](docs/ensureFile-sync.md)
- [ensureDirSync](docs/ensureDir-sync.md)
- [ensureLinkSync](docs/ensureLink-sync.md)
- [ensureSymlinkSync](docs/ensureSymlink-sync.md)
- [mkdirpSync](docs/ensureDir-sync.md)
- [mkdirsSync](docs/ensureDir-sync.md)
- [moveSync](docs/move-sync.md)
- [outputFileSync](docs/outputFile-sync.md)
- [outputJsonSync](docs/outputJson-sync.md)
- [pathExistsSync](docs/pathExists-sync.md)
- [readJsonSync](docs/readJson-sync.md)
- [removeSync](docs/remove-sync.md)
- [writeJsonSync](docs/writeJson-sync.md)
**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()`, `fs.write()`, & `fs.writev()`](docs/fs-read-write-writev.md)
### What happened to `walk()` and `walkSync()`?
They were removed from `fs-extra` in v2.0.0. If you need the functionality, `walk` and `walkSync` are available as separate packages, [`klaw`](https://github.com/jprichardson/node-klaw) and [`klaw-sync`](https://github.com/manidlou/node-klaw-sync).
Third Party
-----------
### CLI
[fse-cli](https://www.npmjs.com/package/@atao60/fse-cli) allows you to run `fs-extra` from a console or from [npm](https://www.npmjs.com) scripts.
### TypeScript
If you like TypeScript, you can use `fs-extra` with it: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/fs-extra
### File / Directory Watching
If you want to watch for changes to files or directories, then you should use [chokidar](https://github.com/paulmillr/chokidar).
### Obtain Filesystem (Devices, Partitions) Information
[fs-filesystem](https://github.com/arthurintelligence/node-fs-filesystem) allows you to read the state of the filesystem of the host on which it is run. It returns information about both the devices and the partitions (volumes) of the system.
### Misc.
- [fs-extra-debug](https://github.com/jdxcode/fs-extra-debug) - Send your fs-extra calls to [debug](https://npmjs.org/package/debug).
- [mfs](https://github.com/cadorn/mfs) - Monitor your fs-extra calls.
Hacking on fs-extra
-------------------
Wanna hack on `fs-extra`? Great! Your help is needed! [fs-extra is one of the most depended upon Node.js packages](http://nodei.co/npm/fs-extra.png?downloads=true&downloadRank=true&stars=true). This project
uses [JavaScript Standard Style](https://github.com/feross/standard) - if the name or style choices bother you,
you're gonna have to get over it :) If `standard` is good enough for `npm`, it's good enough for `fs-extra`.
[![js-standard-style](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard)
What's needed?
- First, take a look at existing issues. Those are probably going to be where the priority lies.
- More tests for edge cases. Specifically on different platforms. There can never be enough tests.
- Improve test coverage.
Note: If you make any big changes, **you should definitely file an issue for discussion first.**
### Running the Test Suite
fs-extra contains hundreds of tests.
- `npm run lint`: runs the linter ([standard](http://standardjs.com/))
- `npm run unit`: runs the unit tests
- `npm run unit-esm`: runs tests for `fs-extra/esm` exports
- `npm test`: runs the linter and all tests
When running unit tests, set the environment variable `CROSS_DEVICE_PATH` to the absolute path of an empty directory on another device (like a thumb drive) to enable cross-device move tests.
### Windows
If you run the tests on the Windows and receive a lot of symbolic link `EPERM` permission errors, it's
because on Windows you need elevated privilege to create symbolic links. You can add this to your Windows's
account by following the instructions here: http://superuser.com/questions/104845/permission-to-make-symbolic-links-in-windows-7
However, I didn't have much luck doing this.
Since I develop on Mac OS X, I use VMWare Fusion for Windows testing. I create a shared folder that I map to a drive on Windows.
I open the `Node.js command prompt` and run as `Administrator`. I then map the network drive running the following command:
net use z: "\\vmware-host\Shared Folders"
I can then navigate to my `fs-extra` directory and run the tests.
Naming
------
I put a lot of thought into the naming of these functions. Inspired by @coolaj86's request. So he deserves much of the credit for raising the issue. See discussion(s) here:
* https://github.com/jprichardson/node-fs-extra/issues/2
* https://github.com/flatiron/utile/issues/11
* https://github.com/ryanmcgrath/wrench-js/issues/29
* https://github.com/substack/node-mkdirp/issues/17
First, I believe that in as many cases as possible, the [Node.js naming schemes](http://nodejs.org/api/fs.html) should be chosen. However, there are problems with the Node.js own naming schemes.
For example, `fs.readFile()` and `fs.readdir()`: the **F** is capitalized in *File* and the **d** is not capitalized in *dir*. Perhaps a bit pedantic, but they should still be consistent. Also, Node.js has chosen a lot of POSIX naming schemes, which I believe is great. See: `fs.mkdir()`, `fs.rmdir()`, `fs.chown()`, etc.
We have a dilemma though. How do you consistently name methods that perform the following POSIX commands: `cp`, `cp -r`, `mkdir -p`, and `rm -rf`?
My perspective: when in doubt, err on the side of simplicity. A directory is just a hierarchical grouping of directories and files. Consider that for a moment. So when you want to copy it or remove it, in most cases you'll want to copy or remove all of its contents. When you want to create a directory, if the directory that it's suppose to be contained in does not exist, then in most cases you'll want to create that too.
So, if you want to remove a file or a directory regardless of whether it has contents, just call `fs.remove(path)`. If you want to copy a file or a directory whether it has contents, just call `fs.copy(source, destination)`. If you want to create a directory regardless of whether its parent directories exist, just call `fs.mkdirs(path)` or `fs.mkdirp(path)`.
Credit
------
`fs-extra` wouldn't be possible without using the modules from the following authors:
- [Isaac Shlueter](https://github.com/isaacs)
- [Charlie McConnel](https://github.com/avianflu)
- [James Halliday](https://github.com/substack)
- [Andrew Kelley](https://github.com/andrewrk)
License
-------
Licensed under MIT
Copyright (c) 2011-2017 [JP Richardson](https://github.com/jprichardson)
[1]: http://nodejs.org/docs/latest/api/fs.html
[jsonfile]: https://github.com/jprichardson/node-jsonfile

71
vscodeEvalExtension/node_modules/fs-extra/package.json generated vendored Normal file
View File

@@ -0,0 +1,71 @@
{
"name": "fs-extra",
"version": "11.2.0",
"description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as recursive mkdir, copy, and remove.",
"engines": {
"node": ">=14.14"
},
"homepage": "https://github.com/jprichardson/node-fs-extra",
"repository": {
"type": "git",
"url": "https://github.com/jprichardson/node-fs-extra"
},
"keywords": [
"fs",
"file",
"file system",
"copy",
"directory",
"extra",
"mkdirp",
"mkdir",
"mkdirs",
"recursive",
"json",
"read",
"write",
"extra",
"delete",
"remove",
"touch",
"create",
"text",
"output",
"move",
"promise"
],
"author": "JP Richardson <jprichardson@gmail.com>",
"license": "MIT",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"devDependencies": {
"klaw": "^2.1.1",
"klaw-sync": "^3.0.2",
"minimist": "^1.1.1",
"mocha": "^10.1.0",
"nyc": "^15.0.0",
"proxyquire": "^2.0.1",
"read-dir-files": "^0.1.1",
"standard": "^17.0.0"
},
"main": "./lib/index.js",
"exports": {
".": "./lib/index.js",
"./esm": "./lib/esm.mjs"
},
"files": [
"lib/",
"!lib/**/__tests__/"
],
"scripts": {
"lint": "standard",
"test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha",
"test": "npm run lint && npm run unit && npm run unit-esm",
"unit": "nyc node test.js",
"unit-esm": "node test.mjs"
},
"sideEffects": false
}

15
vscodeEvalExtension/node_modules/graceful-fs/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

143
vscodeEvalExtension/node_modules/graceful-fs/README.md generated vendored Normal file
View File

@@ -0,0 +1,143 @@
# graceful-fs
graceful-fs functions as a drop-in replacement for the fs module,
making various improvements.
The improvements are meant to normalize behavior across different
platforms and environments, and to make filesystem access more
resilient to errors.
## Improvements over [fs module](https://nodejs.org/api/fs.html)
* Queues up `open` and `readdir` calls, and retries them once
something closes if there is an EMFILE error from too many file
descriptors.
* fixes `lchmod` for Node versions prior to 0.6.2.
* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
`lchown` if the user isn't root.
* makes `lchmod` and `lchown` become noops, if not available.
* retries reading a file if `read` results in EAGAIN error.
On Windows, it retries renaming a file for up to one second if `EACCESS`
or `EPERM` error occurs, likely because antivirus software has locked
the directory.
## USAGE
```javascript
// use just like fs
var fs = require('graceful-fs')
// now go and do stuff with it...
fs.readFile('some-file-or-whatever', (err, data) => {
// Do stuff here.
})
```
## Sync methods
This module cannot intercept or handle `EMFILE` or `ENFILE` errors from sync
methods. If you use sync methods which open file descriptors then you are
responsible for dealing with any errors.
This is a known limitation, not a bug.
## Global Patching
If you want to patch the global fs module (or any other fs-like
module) you can do this:
```javascript
// Make sure to read the caveat below.
var realFs = require('fs')
var gracefulFs = require('graceful-fs')
gracefulFs.gracefulify(realFs)
```
This should only ever be done at the top-level application layer, in
order to delay on EMFILE errors from any fs-using dependencies. You
should **not** do this in a library, because it can cause unexpected
delays in other parts of the program.
## Changes
This module is fairly stable at this point, and used by a lot of
things. That being said, because it implements a subtle behavior
change in a core part of the node API, even modest changes can be
extremely breaking, and the versioning is thus biased towards
bumping the major when in doubt.
The main change between major versions has been switching between
providing a fully-patched `fs` module vs monkey-patching the node core
builtin, and the approach by which a non-monkey-patched `fs` was
created.
The goal is to trade `EMFILE` errors for slower fs operations. So, if
you try to open a zillion files, rather than crashing, `open`
operations will be queued up and wait for something else to `close`.
There are advantages to each approach. Monkey-patching the fs means
that no `EMFILE` errors can possibly occur anywhere in your
application, because everything is using the same core `fs` module,
which is patched. However, it can also obviously cause undesirable
side-effects, especially if the module is loaded multiple times.
Implementing a separate-but-identical patched `fs` module is more
surgical (and doesn't run the risk of patching multiple times), but
also imposes the challenge of keeping in sync with the core module.
The current approach loads the `fs` module, and then creates a
lookalike object that has all the same methods, except a few that are
patched. It is safe to use in all versions of Node from 0.8 through
7.0.
### v4
* Do not monkey-patch the fs module. This module may now be used as a
drop-in dep, and users can opt into monkey-patching the fs builtin
if their app requires it.
### v3
* Monkey-patch fs, because the eval approach no longer works on recent
node.
* fixed possible type-error throw if rename fails on windows
* verify that we *never* get EMFILE errors
* Ignore ENOSYS from chmod/chown
* clarify that graceful-fs must be used as a drop-in
### v2.1.0
* Use eval rather than monkey-patching fs.
* readdir: Always sort the results
* win32: requeue a file if error has an OK status
### v2.0
* A return to monkey patching
* wrap process.cwd
### v1.1
* wrap readFile
* Wrap fs.writeFile.
* readdir protection
* Don't clobber the fs builtin
* Handle fs.read EAGAIN errors by trying again
* Expose the curOpen counter
* No-op lchown/lchmod if not implemented
* fs.rename patch only for win32
* Patch fs.rename to handle AV software on Windows
* Close #4 Chown should not fail on einval or eperm if non-root
* Fix isaacs/fstream#1 Only wrap fs one time
* Fix #3 Start at 1024 max files, then back off on EMFILE
* lutimes that doens't blow up on Linux
* A full on-rewrite using a queue instead of just swallowing the EMFILE error
* Wrap Read/Write streams as well
### 1.0
* Update engines for node 0.6
* Be lstat-graceful on Windows
* first

23
vscodeEvalExtension/node_modules/graceful-fs/clone.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
'use strict'
module.exports = clone
var getPrototypeOf = Object.getPrototypeOf || function (obj) {
return obj.__proto__
}
function clone (obj) {
if (obj === null || typeof obj !== 'object')
return obj
if (obj instanceof Object)
var copy = { __proto__: getPrototypeOf(obj) }
else
var copy = Object.create(null)
Object.getOwnPropertyNames(obj).forEach(function (key) {
Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))
})
return copy
}

View File

@@ -0,0 +1,448 @@
var fs = require('fs')
var polyfills = require('./polyfills.js')
var legacy = require('./legacy-streams.js')
var clone = require('./clone.js')
var util = require('util')
/* istanbul ignore next - node 0.x polyfill */
var gracefulQueue
var previousSymbol
/* istanbul ignore else - node 0.x polyfill */
if (typeof Symbol === 'function' && typeof Symbol.for === 'function') {
gracefulQueue = Symbol.for('graceful-fs.queue')
// This is used in testing by future versions
previousSymbol = Symbol.for('graceful-fs.previous')
} else {
gracefulQueue = '___graceful-fs.queue'
previousSymbol = '___graceful-fs.previous'
}
function noop () {}
function publishQueue(context, queue) {
Object.defineProperty(context, gracefulQueue, {
get: function() {
return queue
}
})
}
var debug = noop
if (util.debuglog)
debug = util.debuglog('gfs4')
else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || ''))
debug = function() {
var m = util.format.apply(util, arguments)
m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ')
console.error(m)
}
// Once time initialization
if (!fs[gracefulQueue]) {
// This queue can be shared by multiple loaded instances
var queue = global[gracefulQueue] || []
publishQueue(fs, queue)
// Patch fs.close/closeSync to shared queue version, because we need
// to retry() whenever a close happens *anywhere* in the program.
// This is essential when multiple graceful-fs instances are
// in play at the same time.
fs.close = (function (fs$close) {
function close (fd, cb) {
return fs$close.call(fs, fd, function (err) {
// This function uses the graceful-fs shared queue
if (!err) {
resetQueue()
}
if (typeof cb === 'function')
cb.apply(this, arguments)
})
}
Object.defineProperty(close, previousSymbol, {
value: fs$close
})
return close
})(fs.close)
fs.closeSync = (function (fs$closeSync) {
function closeSync (fd) {
// This function uses the graceful-fs shared queue
fs$closeSync.apply(fs, arguments)
resetQueue()
}
Object.defineProperty(closeSync, previousSymbol, {
value: fs$closeSync
})
return closeSync
})(fs.closeSync)
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) {
process.on('exit', function() {
debug(fs[gracefulQueue])
require('assert').equal(fs[gracefulQueue].length, 0)
})
}
}
if (!global[gracefulQueue]) {
publishQueue(global, fs[gracefulQueue]);
}
module.exports = patch(clone(fs))
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {
module.exports = patch(fs)
fs.__patched = true;
}
function patch (fs) {
// Everything that references the open() function needs to be in here
polyfills(fs)
fs.gracefulify = patch
fs.createReadStream = createReadStream
fs.createWriteStream = createWriteStream
var fs$readFile = fs.readFile
fs.readFile = readFile
function readFile (path, options, cb) {
if (typeof options === 'function')
cb = options, options = null
return go$readFile(path, options, cb)
function go$readFile (path, options, cb, startTime) {
return fs$readFile(path, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$writeFile = fs.writeFile
fs.writeFile = writeFile
function writeFile (path, data, options, cb) {
if (typeof options === 'function')
cb = options, options = null
return go$writeFile(path, data, options, cb)
function go$writeFile (path, data, options, cb, startTime) {
return fs$writeFile(path, data, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$appendFile = fs.appendFile
if (fs$appendFile)
fs.appendFile = appendFile
function appendFile (path, data, options, cb) {
if (typeof options === 'function')
cb = options, options = null
return go$appendFile(path, data, options, cb)
function go$appendFile (path, data, options, cb, startTime) {
return fs$appendFile(path, data, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$copyFile = fs.copyFile
if (fs$copyFile)
fs.copyFile = copyFile
function copyFile (src, dest, flags, cb) {
if (typeof flags === 'function') {
cb = flags
flags = 0
}
return go$copyFile(src, dest, flags, cb)
function go$copyFile (src, dest, flags, cb, startTime) {
return fs$copyFile(src, dest, flags, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$readdir = fs.readdir
fs.readdir = readdir
var noReaddirOptionVersions = /^v[0-5]\./
function readdir (path, options, cb) {
if (typeof options === 'function')
cb = options, options = null
var go$readdir = noReaddirOptionVersions.test(process.version)
? function go$readdir (path, options, cb, startTime) {
return fs$readdir(path, fs$readdirCallback(
path, options, cb, startTime
))
}
: function go$readdir (path, options, cb, startTime) {
return fs$readdir(path, options, fs$readdirCallback(
path, options, cb, startTime
))
}
return go$readdir(path, options, cb)
function fs$readdirCallback (path, options, cb, startTime) {
return function (err, files) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([
go$readdir,
[path, options, cb],
err,
startTime || Date.now(),
Date.now()
])
else {
if (files && files.sort)
files.sort()
if (typeof cb === 'function')
cb.call(this, err, files)
}
}
}
}
if (process.version.substr(0, 4) === 'v0.8') {
var legStreams = legacy(fs)
ReadStream = legStreams.ReadStream
WriteStream = legStreams.WriteStream
}
var fs$ReadStream = fs.ReadStream
if (fs$ReadStream) {
ReadStream.prototype = Object.create(fs$ReadStream.prototype)
ReadStream.prototype.open = ReadStream$open
}
var fs$WriteStream = fs.WriteStream
if (fs$WriteStream) {
WriteStream.prototype = Object.create(fs$WriteStream.prototype)
WriteStream.prototype.open = WriteStream$open
}
Object.defineProperty(fs, 'ReadStream', {
get: function () {
return ReadStream
},
set: function (val) {
ReadStream = val
},
enumerable: true,
configurable: true
})
Object.defineProperty(fs, 'WriteStream', {
get: function () {
return WriteStream
},
set: function (val) {
WriteStream = val
},
enumerable: true,
configurable: true
})
// legacy names
var FileReadStream = ReadStream
Object.defineProperty(fs, 'FileReadStream', {
get: function () {
return FileReadStream
},
set: function (val) {
FileReadStream = val
},
enumerable: true,
configurable: true
})
var FileWriteStream = WriteStream
Object.defineProperty(fs, 'FileWriteStream', {
get: function () {
return FileWriteStream
},
set: function (val) {
FileWriteStream = val
},
enumerable: true,
configurable: true
})
function ReadStream (path, options) {
if (this instanceof ReadStream)
return fs$ReadStream.apply(this, arguments), this
else
return ReadStream.apply(Object.create(ReadStream.prototype), arguments)
}
function ReadStream$open () {
var that = this
open(that.path, that.flags, that.mode, function (err, fd) {
if (err) {
if (that.autoClose)
that.destroy()
that.emit('error', err)
} else {
that.fd = fd
that.emit('open', fd)
that.read()
}
})
}
function WriteStream (path, options) {
if (this instanceof WriteStream)
return fs$WriteStream.apply(this, arguments), this
else
return WriteStream.apply(Object.create(WriteStream.prototype), arguments)
}
function WriteStream$open () {
var that = this
open(that.path, that.flags, that.mode, function (err, fd) {
if (err) {
that.destroy()
that.emit('error', err)
} else {
that.fd = fd
that.emit('open', fd)
}
})
}
function createReadStream (path, options) {
return new fs.ReadStream(path, options)
}
function createWriteStream (path, options) {
return new fs.WriteStream(path, options)
}
var fs$open = fs.open
fs.open = open
function open (path, flags, mode, cb) {
if (typeof mode === 'function')
cb = mode, mode = null
return go$open(path, flags, mode, cb)
function go$open (path, flags, mode, cb, startTime) {
return fs$open(path, flags, mode, function (err, fd) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
return fs
}
function enqueue (elem) {
debug('ENQUEUE', elem[0].name, elem[1])
fs[gracefulQueue].push(elem)
retry()
}
// keep track of the timeout between retry() calls
var retryTimer
// reset the startTime and lastTime to now
// this resets the start of the 60 second overall timeout as well as the
// delay between attempts so that we'll retry these jobs sooner
function resetQueue () {
var now = Date.now()
for (var i = 0; i < fs[gracefulQueue].length; ++i) {
// entries that are only a length of 2 are from an older version, don't
// bother modifying those since they'll be retried anyway.
if (fs[gracefulQueue][i].length > 2) {
fs[gracefulQueue][i][3] = now // startTime
fs[gracefulQueue][i][4] = now // lastTime
}
}
// call retry to make sure we're actively processing the queue
retry()
}
function retry () {
// clear the timer and remove it to help prevent unintended concurrency
clearTimeout(retryTimer)
retryTimer = undefined
if (fs[gracefulQueue].length === 0)
return
var elem = fs[gracefulQueue].shift()
var fn = elem[0]
var args = elem[1]
// these items may be unset if they were added by an older graceful-fs
var err = elem[2]
var startTime = elem[3]
var lastTime = elem[4]
// if we don't have a startTime we have no way of knowing if we've waited
// long enough, so go ahead and retry this item now
if (startTime === undefined) {
debug('RETRY', fn.name, args)
fn.apply(null, args)
} else if (Date.now() - startTime >= 60000) {
// it's been more than 60 seconds total, bail now
debug('TIMEOUT', fn.name, args)
var cb = args.pop()
if (typeof cb === 'function')
cb.call(null, err)
} else {
// the amount of time between the last attempt and right now
var sinceAttempt = Date.now() - lastTime
// the amount of time between when we first tried, and when we last tried
// rounded up to at least 1
var sinceStart = Math.max(lastTime - startTime, 1)
// backoff. wait longer than the total time we've been retrying, but only
// up to a maximum of 100ms
var desiredDelay = Math.min(sinceStart * 1.2, 100)
// it's been long enough since the last retry, do it again
if (sinceAttempt >= desiredDelay) {
debug('RETRY', fn.name, args)
fn.apply(null, args.concat([startTime]))
} else {
// if we can't do this job yet, push it to the end of the queue
// and let the next iteration check again
fs[gracefulQueue].push(elem)
}
}
// schedule our next run if one isn't already scheduled
if (retryTimer === undefined) {
retryTimer = setTimeout(retry, 0)
}
}

View File

@@ -0,0 +1,118 @@
var Stream = require('stream').Stream
module.exports = legacy
function legacy (fs) {
return {
ReadStream: ReadStream,
WriteStream: WriteStream
}
function ReadStream (path, options) {
if (!(this instanceof ReadStream)) return new ReadStream(path, options);
Stream.call(this);
var self = this;
this.path = path;
this.fd = null;
this.readable = true;
this.paused = false;
this.flags = 'r';
this.mode = 438; /*=0666*/
this.bufferSize = 64 * 1024;
options = options || {};
// Mixin options into this
var keys = Object.keys(options);
for (var index = 0, length = keys.length; index < length; index++) {
var key = keys[index];
this[key] = options[key];
}
if (this.encoding) this.setEncoding(this.encoding);
if (this.start !== undefined) {
if ('number' !== typeof this.start) {
throw TypeError('start must be a Number');
}
if (this.end === undefined) {
this.end = Infinity;
} else if ('number' !== typeof this.end) {
throw TypeError('end must be a Number');
}
if (this.start > this.end) {
throw new Error('start must be <= end');
}
this.pos = this.start;
}
if (this.fd !== null) {
process.nextTick(function() {
self._read();
});
return;
}
fs.open(this.path, this.flags, this.mode, function (err, fd) {
if (err) {
self.emit('error', err);
self.readable = false;
return;
}
self.fd = fd;
self.emit('open', fd);
self._read();
})
}
function WriteStream (path, options) {
if (!(this instanceof WriteStream)) return new WriteStream(path, options);
Stream.call(this);
this.path = path;
this.fd = null;
this.writable = true;
this.flags = 'w';
this.encoding = 'binary';
this.mode = 438; /*=0666*/
this.bytesWritten = 0;
options = options || {};
// Mixin options into this
var keys = Object.keys(options);
for (var index = 0, length = keys.length; index < length; index++) {
var key = keys[index];
this[key] = options[key];
}
if (this.start !== undefined) {
if ('number' !== typeof this.start) {
throw TypeError('start must be a Number');
}
if (this.start < 0) {
throw new Error('start must be >= zero');
}
this.pos = this.start;
}
this.busy = false;
this._queue = [];
if (this.fd === null) {
this._open = fs.open;
this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);
this.flush();
}
}
}

View File

@@ -0,0 +1,53 @@
{
"name": "graceful-fs",
"description": "A drop-in replacement for fs, making various improvements.",
"version": "4.2.11",
"repository": {
"type": "git",
"url": "https://github.com/isaacs/node-graceful-fs"
},
"main": "graceful-fs.js",
"directories": {
"test": "test"
},
"scripts": {
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --follow-tags",
"test": "nyc --silent node test.js | tap -c -",
"posttest": "nyc report"
},
"keywords": [
"fs",
"module",
"reading",
"retry",
"retries",
"queue",
"error",
"errors",
"handling",
"EMFILE",
"EAGAIN",
"EINVAL",
"EPERM",
"EACCESS"
],
"license": "ISC",
"devDependencies": {
"import-fresh": "^2.0.0",
"mkdirp": "^0.5.0",
"rimraf": "^2.2.8",
"tap": "^16.3.4"
},
"files": [
"fs.js",
"graceful-fs.js",
"legacy-streams.js",
"polyfills.js",
"clone.js"
],
"tap": {
"reporter": "classic"
}
}

View File

@@ -0,0 +1,355 @@
var constants = require('constants')
var origCwd = process.cwd
var cwd = null
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
process.cwd = function() {
if (!cwd)
cwd = origCwd.call(process)
return cwd
}
try {
process.cwd()
} catch (er) {}
// This check is needed until node.js 12 is required
if (typeof process.chdir === 'function') {
var chdir = process.chdir
process.chdir = function (d) {
cwd = null
chdir.call(process, d)
}
if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)
}
module.exports = patch
function patch (fs) {
// (re-)implement some things that are known busted or missing.
// lchmod, broken prior to 0.6.2
// back-port the fix here.
if (constants.hasOwnProperty('O_SYMLINK') &&
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
patchLchmod(fs)
}
// lutimes implementation, or no-op
if (!fs.lutimes) {
patchLutimes(fs)
}
// https://github.com/isaacs/node-graceful-fs/issues/4
// Chown should not fail on einval or eperm if non-root.
// It should not fail on enosys ever, as this just indicates
// that a fs doesn't support the intended operation.
fs.chown = chownFix(fs.chown)
fs.fchown = chownFix(fs.fchown)
fs.lchown = chownFix(fs.lchown)
fs.chmod = chmodFix(fs.chmod)
fs.fchmod = chmodFix(fs.fchmod)
fs.lchmod = chmodFix(fs.lchmod)
fs.chownSync = chownFixSync(fs.chownSync)
fs.fchownSync = chownFixSync(fs.fchownSync)
fs.lchownSync = chownFixSync(fs.lchownSync)
fs.chmodSync = chmodFixSync(fs.chmodSync)
fs.fchmodSync = chmodFixSync(fs.fchmodSync)
fs.lchmodSync = chmodFixSync(fs.lchmodSync)
fs.stat = statFix(fs.stat)
fs.fstat = statFix(fs.fstat)
fs.lstat = statFix(fs.lstat)
fs.statSync = statFixSync(fs.statSync)
fs.fstatSync = statFixSync(fs.fstatSync)
fs.lstatSync = statFixSync(fs.lstatSync)
// if lchmod/lchown do not exist, then make them no-ops
if (fs.chmod && !fs.lchmod) {
fs.lchmod = function (path, mode, cb) {
if (cb) process.nextTick(cb)
}
fs.lchmodSync = function () {}
}
if (fs.chown && !fs.lchown) {
fs.lchown = function (path, uid, gid, cb) {
if (cb) process.nextTick(cb)
}
fs.lchownSync = function () {}
}
// on Windows, A/V software can lock the directory, causing this
// to fail with an EACCES or EPERM if the directory contains newly
// created files. Try again on failure, for up to 60 seconds.
// Set the timeout this long because some Windows Anti-Virus, such as Parity
// bit9, may lock files for up to a minute, causing npm package install
// failures. Also, take care to yield the scheduler. Windows scheduling gives
// CPU to a busy looping process, which can cause the program causing the lock
// contention to be starved of CPU by node, so the contention doesn't resolve.
if (platform === "win32") {
fs.rename = typeof fs.rename !== 'function' ? fs.rename
: (function (fs$rename) {
function rename (from, to, cb) {
var start = Date.now()
var backoff = 0;
fs$rename(from, to, function CB (er) {
if (er
&& (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY")
&& Date.now() - start < 60000) {
setTimeout(function() {
fs.stat(to, function (stater, st) {
if (stater && stater.code === "ENOENT")
fs$rename(from, to, CB);
else
cb(er)
})
}, backoff)
if (backoff < 100)
backoff += 10;
return;
}
if (cb) cb(er)
})
}
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)
return rename
})(fs.rename)
}
// if read() returns EAGAIN, then just try it again.
fs.read = typeof fs.read !== 'function' ? fs.read
: (function (fs$read) {
function read (fd, buffer, offset, length, position, callback_) {
var callback
if (callback_ && typeof callback_ === 'function') {
var eagCounter = 0
callback = function (er, _, __) {
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
eagCounter ++
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
}
callback_.apply(this, arguments)
}
}
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
}
// This ensures `util.promisify` works as it does for native `fs.read`.
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)
return read
})(fs.read)
fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync
: (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
var eagCounter = 0
while (true) {
try {
return fs$readSync.call(fs, fd, buffer, offset, length, position)
} catch (er) {
if (er.code === 'EAGAIN' && eagCounter < 10) {
eagCounter ++
continue
}
throw er
}
}
}})(fs.readSync)
function patchLchmod (fs) {
fs.lchmod = function (path, mode, callback) {
fs.open( path
, constants.O_WRONLY | constants.O_SYMLINK
, mode
, function (err, fd) {
if (err) {
if (callback) callback(err)
return
}
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
fs.fchmod(fd, mode, function (err) {
fs.close(fd, function(err2) {
if (callback) callback(err || err2)
})
})
})
}
fs.lchmodSync = function (path, mode) {
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
var threw = true
var ret
try {
ret = fs.fchmodSync(fd, mode)
threw = false
} finally {
if (threw) {
try {
fs.closeSync(fd)
} catch (er) {}
} else {
fs.closeSync(fd)
}
}
return ret
}
}
function patchLutimes (fs) {
if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) {
fs.lutimes = function (path, at, mt, cb) {
fs.open(path, constants.O_SYMLINK, function (er, fd) {
if (er) {
if (cb) cb(er)
return
}
fs.futimes(fd, at, mt, function (er) {
fs.close(fd, function (er2) {
if (cb) cb(er || er2)
})
})
})
}
fs.lutimesSync = function (path, at, mt) {
var fd = fs.openSync(path, constants.O_SYMLINK)
var ret
var threw = true
try {
ret = fs.futimesSync(fd, at, mt)
threw = false
} finally {
if (threw) {
try {
fs.closeSync(fd)
} catch (er) {}
} else {
fs.closeSync(fd)
}
}
return ret
}
} else if (fs.futimes) {
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
fs.lutimesSync = function () {}
}
}
function chmodFix (orig) {
if (!orig) return orig
return function (target, mode, cb) {
return orig.call(fs, target, mode, function (er) {
if (chownErOk(er)) er = null
if (cb) cb.apply(this, arguments)
})
}
}
function chmodFixSync (orig) {
if (!orig) return orig
return function (target, mode) {
try {
return orig.call(fs, target, mode)
} catch (er) {
if (!chownErOk(er)) throw er
}
}
}
function chownFix (orig) {
if (!orig) return orig
return function (target, uid, gid, cb) {
return orig.call(fs, target, uid, gid, function (er) {
if (chownErOk(er)) er = null
if (cb) cb.apply(this, arguments)
})
}
}
function chownFixSync (orig) {
if (!orig) return orig
return function (target, uid, gid) {
try {
return orig.call(fs, target, uid, gid)
} catch (er) {
if (!chownErOk(er)) throw er
}
}
}
function statFix (orig) {
if (!orig) return orig
// Older versions of Node erroneously returned signed integers for
// uid + gid.
return function (target, options, cb) {
if (typeof options === 'function') {
cb = options
options = null
}
function callback (er, stats) {
if (stats) {
if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
}
if (cb) cb.apply(this, arguments)
}
return options ? orig.call(fs, target, options, callback)
: orig.call(fs, target, callback)
}
}
function statFixSync (orig) {
if (!orig) return orig
// Older versions of Node erroneously returned signed integers for
// uid + gid.
return function (target, options) {
var stats = options ? orig.call(fs, target, options)
: orig.call(fs, target)
if (stats) {
if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
}
return stats;
}
}
// ENOSYS means that the fs doesn't support the op. Just ignore
// that, because it doesn't matter.
//
// if there's no getuid, or if getuid() is something other
// than 0, and the error is EINVAL or EPERM, then just ignore
// it.
//
// This specific case is a silent failure in cp, install, tar,
// and most other unix tools that manage permissions.
//
// When running as root, or if other types of errors are
// encountered, then it's strict.
function chownErOk (er) {
if (!er)
return true
if (er.code === "ENOSYS")
return true
var nonroot = !process.getuid || process.getuid() !== 0
if (nonroot) {
if (er.code === "EINVAL" || er.code === "EPERM")
return true
}
return false
}
}

171
vscodeEvalExtension/node_modules/jsonfile/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,171 @@
6.1.0 / 2020-10-31
------------------
- Add `finalEOL` option to disable writing final EOL ([#115](https://github.com/jprichardson/node-jsonfile/issues/115), [#137](https://github.com/jprichardson/node-jsonfile/pull/137))
- Update dependency ([#138](https://github.com/jprichardson/node-jsonfile/pull/138))
6.0.1 / 2020-03-07
------------------
- Update dependency ([#130](https://github.com/jprichardson/node-jsonfile/pull/130))
- Fix code style ([#129](https://github.com/jprichardson/node-jsonfile/pull/129))
6.0.0 / 2020-02-24
------------------
- **BREAKING:** Drop support for Node 6 & 8 ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
- **BREAKING:** Do not allow passing `null` as options to `readFile()` or `writeFile()` ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
- Refactor internals ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
5.0.0 / 2018-09-08
------------------
- **BREAKING:** Drop Node 4 support
- **BREAKING:** If no callback is passed to an asynchronous method, a promise is now returned ([#109](https://github.com/jprichardson/node-jsonfile/pull/109))
- Cleanup docs
4.0.0 / 2017-07-12
------------------
- **BREAKING:** Remove global `spaces` option.
- **BREAKING:** Drop support for Node 0.10, 0.12, and io.js.
- Remove undocumented `passParsingErrors` option.
- Added `EOL` override option to `writeFile` when using `spaces`. [#89]
3.0.1 / 2017-07-05
------------------
- Fixed bug in `writeFile` when there was a serialization error & no callback was passed. In previous versions, an empty file would be written; now no file is written.
3.0.0 / 2017-04-25
------------------
- Changed behavior of `throws` option for `readFileSync`; now does not throw filesystem errors when `throws` is `false`
2.4.0 / 2016-09-15
------------------
### Changed
- added optional support for `graceful-fs` [#62]
2.3.1 / 2016-05-13
------------------
- fix to support BOM. [#45][#45]
2.3.0 / 2016-04-16
------------------
- add `throws` to `readFile()`. See [#39][#39]
- add support for any arbitrary `fs` module. Useful with [mock-fs](https://www.npmjs.com/package/mock-fs)
2.2.3 / 2015-10-14
------------------
- include file name in parse error. See: https://github.com/jprichardson/node-jsonfile/pull/34
2.2.2 / 2015-09-16
------------------
- split out tests into separate files
- fixed `throws` when set to `true` in `readFileSync()`. See: https://github.com/jprichardson/node-jsonfile/pull/33
2.2.1 / 2015-06-25
------------------
- fixed regression when passing in string as encoding for options in `writeFile()` and `writeFileSync()`. See: https://github.com/jprichardson/node-jsonfile/issues/28
2.2.0 / 2015-06-25
------------------
- added `options.spaces` to `writeFile()` and `writeFileSync()`
2.1.2 / 2015-06-22
------------------
- fixed if passed `readFileSync(file, 'utf8')`. See: https://github.com/jprichardson/node-jsonfile/issues/25
2.1.1 / 2015-06-19
------------------
- fixed regressions if `null` is passed for options. See: https://github.com/jprichardson/node-jsonfile/issues/24
2.1.0 / 2015-06-19
------------------
- cleanup: JavaScript Standard Style, rename files, dropped terst for assert
- methods now support JSON revivers/replacers
2.0.1 / 2015-05-24
------------------
- update license attribute https://github.com/jprichardson/node-jsonfile/pull/21
2.0.0 / 2014-07-28
------------------
* added `\n` to end of file on write. [#14](https://github.com/jprichardson/node-jsonfile/pull/14)
* added `options.throws` to `readFileSync()`
* dropped support for Node v0.8
1.2.0 / 2014-06-29
------------------
* removed semicolons
* bugfix: passed `options` to `fs.readFile` and `fs.readFileSync`. This technically changes behavior, but
changes it according to docs. [#12][#12]
1.1.1 / 2013-11-11
------------------
* fixed catching of callback bug (ffissore / #5)
1.1.0 / 2013-10-11
------------------
* added `options` param to methods, (seanodell / #4)
1.0.1 / 2013-09-05
------------------
* removed `homepage` field from package.json to remove NPM warning
1.0.0 / 2013-06-28
------------------
* added `.npmignore`, #1
* changed spacing default from `4` to `2` to follow Node conventions
0.0.1 / 2012-09-10
------------------
* Initial release.
[#89]: https://github.com/jprichardson/node-jsonfile/pull/89
[#45]: https://github.com/jprichardson/node-jsonfile/issues/45 "Reading of UTF8-encoded (w/ BOM) files fails"
[#44]: https://github.com/jprichardson/node-jsonfile/issues/44 "Extra characters in written file"
[#43]: https://github.com/jprichardson/node-jsonfile/issues/43 "Prettyfy json when written to file"
[#42]: https://github.com/jprichardson/node-jsonfile/pull/42 "Moved fs.readFileSync within the try/catch"
[#41]: https://github.com/jprichardson/node-jsonfile/issues/41 "Linux: Hidden file not working"
[#40]: https://github.com/jprichardson/node-jsonfile/issues/40 "autocreate folder doesn't work from Path-value"
[#39]: https://github.com/jprichardson/node-jsonfile/pull/39 "Add `throws` option for readFile (async)"
[#38]: https://github.com/jprichardson/node-jsonfile/pull/38 "Update README.md writeFile[Sync] signature"
[#37]: https://github.com/jprichardson/node-jsonfile/pull/37 "support append file"
[#36]: https://github.com/jprichardson/node-jsonfile/pull/36 "Add typescript definition file."
[#35]: https://github.com/jprichardson/node-jsonfile/pull/35 "Add typescript definition file."
[#34]: https://github.com/jprichardson/node-jsonfile/pull/34 "readFile JSON parse error includes filename"
[#33]: https://github.com/jprichardson/node-jsonfile/pull/33 "fix throw->throws typo in readFileSync()"
[#32]: https://github.com/jprichardson/node-jsonfile/issues/32 "readFile & readFileSync can possible have strip-comments as an option?"
[#31]: https://github.com/jprichardson/node-jsonfile/pull/31 "[Modify] Support string include is unicode escape string"
[#30]: https://github.com/jprichardson/node-jsonfile/issues/30 "How to use Jsonfile package in Meteor.js App?"
[#29]: https://github.com/jprichardson/node-jsonfile/issues/29 "writefile callback if no error?"
[#28]: https://github.com/jprichardson/node-jsonfile/issues/28 "writeFile options argument broken "
[#27]: https://github.com/jprichardson/node-jsonfile/pull/27 "Use svg instead of png to get better image quality"
[#26]: https://github.com/jprichardson/node-jsonfile/issues/26 "Breaking change to fs-extra"
[#25]: https://github.com/jprichardson/node-jsonfile/issues/25 "support string encoding param for read methods"
[#24]: https://github.com/jprichardson/node-jsonfile/issues/24 "readFile: Passing in null options with a callback throws an error"
[#23]: https://github.com/jprichardson/node-jsonfile/pull/23 "Add appendFile and appendFileSync"
[#22]: https://github.com/jprichardson/node-jsonfile/issues/22 "Default value for spaces in readme.md is outdated"
[#21]: https://github.com/jprichardson/node-jsonfile/pull/21 "Update license attribute"
[#20]: https://github.com/jprichardson/node-jsonfile/issues/20 "Add simple caching functionallity"
[#19]: https://github.com/jprichardson/node-jsonfile/pull/19 "Add appendFileSync method"
[#18]: https://github.com/jprichardson/node-jsonfile/issues/18 "Add updateFile and updateFileSync methods"
[#17]: https://github.com/jprichardson/node-jsonfile/issues/17 "seem read & write sync has sequentially problem"
[#16]: https://github.com/jprichardson/node-jsonfile/pull/16 "export spaces defaulted to null"
[#15]: https://github.com/jprichardson/node-jsonfile/issues/15 "`jsonfile.spaces` should default to `null`"
[#14]: https://github.com/jprichardson/node-jsonfile/pull/14 "Add EOL at EOF"
[#13]: https://github.com/jprichardson/node-jsonfile/issues/13 "Add a final newline"
[#12]: https://github.com/jprichardson/node-jsonfile/issues/12 "readFile doesn't accept options"
[#11]: https://github.com/jprichardson/node-jsonfile/pull/11 "Added try,catch to readFileSync"
[#10]: https://github.com/jprichardson/node-jsonfile/issues/10 "No output or error from writeFile"
[#9]: https://github.com/jprichardson/node-jsonfile/pull/9 "Change 'js' to 'jf' in example."
[#8]: https://github.com/jprichardson/node-jsonfile/pull/8 "Updated forgotten module.exports to me."
[#7]: https://github.com/jprichardson/node-jsonfile/pull/7 "Add file name in error message"
[#6]: https://github.com/jprichardson/node-jsonfile/pull/6 "Use graceful-fs when possible"
[#5]: https://github.com/jprichardson/node-jsonfile/pull/5 "Jsonfile doesn't behave nicely when used inside a test suite."
[#4]: https://github.com/jprichardson/node-jsonfile/pull/4 "Added options parameter to writeFile and writeFileSync"
[#3]: https://github.com/jprichardson/node-jsonfile/issues/3 "test2"
[#2]: https://github.com/jprichardson/node-jsonfile/issues/2 "homepage field must be a string url. Deleted."
[#1]: https://github.com/jprichardson/node-jsonfile/pull/1 "adding an `.npmignore` file"

15
vscodeEvalExtension/node_modules/jsonfile/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
(The MIT License)
Copyright (c) 2012-2015, JP Richardson <jprichardson@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

230
vscodeEvalExtension/node_modules/jsonfile/README.md generated vendored Normal file
View File

@@ -0,0 +1,230 @@
Node.js - jsonfile
================
Easily read/write JSON files in Node.js. _Note: this module cannot be used in the browser._
[![npm Package](https://img.shields.io/npm/v/jsonfile.svg?style=flat-square)](https://www.npmjs.org/package/jsonfile)
[![build status](https://secure.travis-ci.org/jprichardson/node-jsonfile.svg)](http://travis-ci.org/jprichardson/node-jsonfile)
[![windows Build status](https://img.shields.io/appveyor/ci/jprichardson/node-jsonfile/master.svg?label=windows%20build)](https://ci.appveyor.com/project/jprichardson/node-jsonfile/branch/master)
<a href="https://github.com/feross/standard"><img src="https://cdn.rawgit.com/feross/standard/master/sticker.svg" alt="Standard JavaScript" width="100"></a>
Why?
----
Writing `JSON.stringify()` and then `fs.writeFile()` and `JSON.parse()` with `fs.readFile()` enclosed in `try/catch` blocks became annoying.
Installation
------------
npm install --save jsonfile
API
---
* [`readFile(filename, [options], callback)`](#readfilefilename-options-callback)
* [`readFileSync(filename, [options])`](#readfilesyncfilename-options)
* [`writeFile(filename, obj, [options], callback)`](#writefilefilename-obj-options-callback)
* [`writeFileSync(filename, obj, [options])`](#writefilesyncfilename-obj-options)
----
### readFile(filename, [options], callback)
`options` (`object`, default `undefined`): Pass in any [`fs.readFile`](https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
- `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback.
If `false`, returns `null` for the object.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
jsonfile.readFile(file, function (err, obj) {
if (err) console.error(err)
console.dir(obj)
})
```
You can also use this method with promises. The `readFile` method will return a promise if you do not pass a callback function.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
jsonfile.readFile(file)
.then(obj => console.dir(obj))
.catch(error => console.error(error))
```
----
### readFileSync(filename, [options])
`options` (`object`, default `undefined`): Pass in any [`fs.readFileSync`](https://nodejs.org/api/fs.html#fs_fs_readfilesync_path_options) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
- `throws` (`boolean`, default: `true`). If an error is encountered reading or parsing the file, throw the error. If `false`, returns `null` for the object.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
console.dir(jsonfile.readFileSync(file))
```
----
### writeFile(filename, obj, [options], callback)
`options`: Pass in any [`fs.writeFile`](https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, function (err) {
if (err) console.error(err)
})
```
Or use with promises as follows:
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj)
.then(res => {
console.log('Write complete')
})
.catch(error => console.error(error))
```
**formatting with spaces:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, { spaces: 2 }, function (err) {
if (err) console.error(err)
})
```
**overriding EOL:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, { spaces: 2, EOL: '\r\n' }, function (err) {
if (err) console.error(err)
})
```
**disabling the EOL at the end of file:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, { spaces: 2, finalEOL: false }, function (err) {
if (err) console.log(err)
})
```
**appending to an existing JSON file:**
You can use `fs.writeFile` option `{ flag: 'a' }` to achieve this.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/mayAlreadyExistedData.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, { flag: 'a' }, function (err) {
if (err) console.error(err)
})
```
----
### writeFileSync(filename, obj, [options])
`options`: Pass in any [`fs.writeFileSync`](https://nodejs.org/api/fs.html#fs_fs_writefilesync_file_data_options) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj)
```
**formatting with spaces:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, { spaces: 2 })
```
**overriding EOL:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, { spaces: 2, EOL: '\r\n' })
```
**disabling the EOL at the end of file:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, { spaces: 2, finalEOL: false })
```
**appending to an existing JSON file:**
You can use `fs.writeFileSync` option `{ flag: 'a' }` to achieve this.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/mayAlreadyExistedData.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, { flag: 'a' })
```
License
-------
(MIT License)
Copyright 2012-2016, JP Richardson <jprichardson@gmail.com>

88
vscodeEvalExtension/node_modules/jsonfile/index.js generated vendored Normal file
View File

@@ -0,0 +1,88 @@
let _fs
try {
_fs = require('graceful-fs')
} catch (_) {
_fs = require('fs')
}
const universalify = require('universalify')
const { stringify, stripBom } = require('./utils')
async function _readFile (file, options = {}) {
if (typeof options === 'string') {
options = { encoding: options }
}
const fs = options.fs || _fs
const shouldThrow = 'throws' in options ? options.throws : true
let data = await universalify.fromCallback(fs.readFile)(file, options)
data = stripBom(data)
let obj
try {
obj = JSON.parse(data, options ? options.reviver : null)
} catch (err) {
if (shouldThrow) {
err.message = `${file}: ${err.message}`
throw err
} else {
return null
}
}
return obj
}
const readFile = universalify.fromPromise(_readFile)
function readFileSync (file, options = {}) {
if (typeof options === 'string') {
options = { encoding: options }
}
const fs = options.fs || _fs
const shouldThrow = 'throws' in options ? options.throws : true
try {
let content = fs.readFileSync(file, options)
content = stripBom(content)
return JSON.parse(content, options.reviver)
} catch (err) {
if (shouldThrow) {
err.message = `${file}: ${err.message}`
throw err
} else {
return null
}
}
}
async function _writeFile (file, obj, options = {}) {
const fs = options.fs || _fs
const str = stringify(obj, options)
await universalify.fromCallback(fs.writeFile)(file, str, options)
}
const writeFile = universalify.fromPromise(_writeFile)
function writeFileSync (file, obj, options = {}) {
const fs = options.fs || _fs
const str = stringify(obj, options)
// not sure if fs.writeFileSync returns anything, but just in case
return fs.writeFileSync(file, str, options)
}
const jsonfile = {
readFile,
readFileSync,
writeFile,
writeFileSync
}
module.exports = jsonfile

40
vscodeEvalExtension/node_modules/jsonfile/package.json generated vendored Normal file
View File

@@ -0,0 +1,40 @@
{
"name": "jsonfile",
"version": "6.1.0",
"description": "Easily read/write JSON files.",
"repository": {
"type": "git",
"url": "git@github.com:jprichardson/node-jsonfile.git"
},
"keywords": [
"read",
"write",
"file",
"json",
"fs",
"fs-extra"
],
"author": "JP Richardson <jprichardson@gmail.com>",
"license": "MIT",
"dependencies": {
"universalify": "^2.0.0"
},
"optionalDependencies": {
"graceful-fs": "^4.1.6"
},
"devDependencies": {
"mocha": "^8.2.0",
"rimraf": "^2.4.0",
"standard": "^16.0.1"
},
"main": "index.js",
"files": [
"index.js",
"utils.js"
],
"scripts": {
"lint": "standard",
"test": "npm run lint && npm run unit",
"unit": "mocha"
}
}

14
vscodeEvalExtension/node_modules/jsonfile/utils.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) {
const EOF = finalEOL ? EOL : ''
const str = JSON.stringify(obj, replacer, spaces)
return str.replace(/\n/g, EOL) + EOF
}
function stripBom (content) {
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
if (Buffer.isBuffer(content)) content = content.toString('utf8')
return content.replace(/^\uFEFF/, '')
}
module.exports = { stringify, stripBom }

20
vscodeEvalExtension/node_modules/universalify/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,20 @@
(The MIT License)
Copyright (c) 2017, Ryan Zimmerman <opensrc@ryanzim.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the 'Software'), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,76 @@
# universalify
![GitHub Workflow Status (branch)](https://img.shields.io/github/actions/workflow/status/RyanZim/universalify/ci.yml?branch=master)
![Coveralls github branch](https://img.shields.io/coveralls/github/RyanZim/universalify/master.svg)
![npm](https://img.shields.io/npm/dm/universalify.svg)
![npm](https://img.shields.io/npm/l/universalify.svg)
Make a callback- or promise-based function support both promises and callbacks.
Uses the native promise implementation.
## Installation
```bash
npm install universalify
```
## API
### `universalify.fromCallback(fn)`
Takes a callback-based function to universalify, and returns the universalified function.
Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with three or more arguments, and does not ensure that the callback is only called once.
```js
function callbackFn (n, cb) {
setTimeout(() => cb(null, n), 15)
}
const fn = universalify.fromCallback(callbackFn)
// Works with Promises:
fn('Hello World!')
.then(result => console.log(result)) // -> Hello World!
.catch(error => console.error(error))
// Works with Callbacks:
fn('Hi!', (error, result) => {
if (error) return console.error(error)
console.log(result)
// -> Hi!
})
```
### `universalify.fromPromise(fn)`
Takes a promise-based function to universalify, and returns the universalified function.
Function must return a valid JS promise. `universalify` does not ensure that a valid promise is returned.
```js
function promiseFn (n) {
return new Promise(resolve => {
setTimeout(() => resolve(n), 15)
})
}
const fn = universalify.fromPromise(promiseFn)
// Works with Promises:
fn('Hello World!')
.then(result => console.log(result)) // -> Hello World!
.catch(error => console.error(error))
// Works with Callbacks:
fn('Hi!', (error, result) => {
if (error) return console.error(error)
console.log(result)
// -> Hi!
})
```
## License
MIT

24
vscodeEvalExtension/node_modules/universalify/index.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
'use strict'
exports.fromCallback = function (fn) {
return Object.defineProperty(function (...args) {
if (typeof args[args.length - 1] === 'function') fn.apply(this, args)
else {
return new Promise((resolve, reject) => {
args.push((err, res) => (err != null) ? reject(err) : resolve(res))
fn.apply(this, args)
})
}
}, 'name', { value: fn.name })
}
exports.fromPromise = function (fn) {
return Object.defineProperty(function (...args) {
const cb = args[args.length - 1]
if (typeof cb !== 'function') return fn.apply(this, args)
else {
args.pop()
fn.apply(this, args).then(r => cb(null, r), cb)
}
}, 'name', { value: fn.name })
}

View File

@@ -0,0 +1,34 @@
{
"name": "universalify",
"version": "2.0.1",
"description": "Make a callback- or promise-based function support both promises and callbacks.",
"keywords": [
"callback",
"native",
"promise"
],
"homepage": "https://github.com/RyanZim/universalify#readme",
"bugs": "https://github.com/RyanZim/universalify/issues",
"license": "MIT",
"author": "Ryan Zimmerman <opensrc@ryanzim.com>",
"files": [
"index.js"
],
"repository": {
"type": "git",
"url": "git+https://github.com/RyanZim/universalify.git"
},
"scripts": {
"test": "standard && nyc --reporter text --reporter lcovonly tape test/*.js | colortape"
},
"devDependencies": {
"colortape": "^0.1.2",
"coveralls": "^3.0.1",
"nyc": "^15.0.0",
"standard": "^14.3.1",
"tape": "^5.0.1"
},
"engines": {
"node": ">= 10.0.0"
}
}

View File

@@ -27,31 +27,26 @@ exports.deactivate = exports.activate = void 0;
// The module 'vscode' contains the VS Code extensibility API
// Import the module and reference it with the alias vscode in your code below
const vscode = __importStar(require("vscode"));
const fs = __importStar(require("fs-extra"));
// This method is called when your extension is activated
// Your extension is activated the very first time the command is executed
function activate(context) {
// Use the console to output diagnostic information (console.log) and errors (console.error)
// This line of code will only be executed once when your extension is activated
// console.log('Congratulations, your extension "eval" is now active!');
// The command has been defined in the package.json file
// Now provide the implementation of the command with registerCommand
// The commandId parameter must match the command field in package.json
let OpenProject = vscode.commands.registerCommand('eval.OpenProject', () => {
// The code you place here will be executed every time your command is executed
// Display a message box to the user
// vscode.window.showInformationMessage(String(vscode.window.state.focused));
if (vscode.workspace.workspaceFolders) {
console.log(vscode.workspace.workspaceFolders[0].name);
fs.writeFileSync("/home/user/OpenProject.txt", vscode.workspace.workspaceFolders[0].name);
}
else {
console.log(false);
fs.writeFileSync("/home/user/OpenProject.txt", "");
}
});
let GetColorTheme = vscode.commands.registerCommand('eval.GetColorTheme', () => {
console.log(vscode.window.activeColorTheme.kind == 2);
fs.writeFileSync("/home/user/GetColorTheme.txt", String(vscode.window.activeColorTheme.kind)); // ==2
});
let GetBreakPoint = vscode.commands.registerCommand('eval.GetBreakPoint', () => {
console.log(vscode.debug.breakpoints.length == 1);
fs.writeFileSync("/home/user/GetBreakPoint.txt", String(vscode.debug.breakpoints.length)); // ==1
});
context.subscriptions.push(OpenProject, GetColorTheme, GetBreakPoint);
}

View File

@@ -1 +1 @@
{"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,6DAA6D;AAC7D,8EAA8E;AAC9E,+CAAiC;AAEjC,yDAAyD;AACzD,0EAA0E;AAC1E,SAAgB,QAAQ,CAAC,OAAgC;IAExD,4FAA4F;IAC5F,gFAAgF;IAChF,wEAAwE;IAExE,wDAAwD;IACxD,qEAAqE;IACrE,uEAAuE;IACvE,IAAI,WAAW,GAAG,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,kBAAkB,EAAE,GAAG,EAAE;QAC1E,+EAA+E;QAC/E,oCAAoC;QACpC,6EAA6E;QAC7E,IAAI,MAAM,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC;YACvC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACxD,CAAC;aAAM,CAAC;YACP,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACpB,CAAC;IACF,CAAC,CAAC,CAAC;IAEH,IAAI,aAAa,GAAG,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,oBAAoB,EAAE,GAAG,EAAE;QAC9E,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,gBAAgB,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC;IACvD,CAAC,CAAC,CAAC;IAEH,IAAI,aAAa,GAAG,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,oBAAoB,EAAE,GAAG,EAAE;QAC9E,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC;IACnD,CAAC,CAAC,CAAC;IAEH,OAAO,CAAC,aAAa,CAAC,IAAI,CAAC,WAAW,EAAE,aAAa,EAAE,aAAa,CAAC,CAAC;AACvE,CAAC;AA7BD,4BA6BC;AAED,2DAA2D;AAC3D,SAAgB,UAAU,KAAI,CAAC;AAA/B,gCAA+B"}
{"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,6DAA6D;AAC7D,8EAA8E;AAC9E,+CAAiC;AACjC,6CAA+B;AAE/B,yDAAyD;AACzD,0EAA0E;AAC1E,SAAgB,QAAQ,CAAC,OAAgC;IACxD,wDAAwD;IACxD,qEAAqE;IACrE,uEAAuE;IACvE,IAAI,WAAW,GAAG,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,kBAAkB,EAAE,GAAG,EAAE;QAC1E,IAAI,MAAM,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC;YACvC,EAAE,CAAC,aAAa,CAAC,4BAA4B,EAAE,MAAM,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QAC3F,CAAC;aAAM,CAAC;YACP,EAAE,CAAC,aAAa,CAAC,4BAA4B,EAAE,EAAE,CAAC,CAAC;QACpD,CAAC;IACF,CAAC,CAAC,CAAC;IAEH,IAAI,aAAa,GAAG,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,oBAAoB,EAAE,GAAG,EAAE;QAC9E,EAAE,CAAC,aAAa,CAAC,8BAA8B,EAAE,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM;IACtG,CAAC,CAAC,CAAC;IAEH,IAAI,aAAa,GAAG,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,oBAAoB,EAAE,GAAG,EAAE;QAC9E,EAAE,CAAC,aAAa,CAAC,8BAA8B,EAAE,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM;IAClG,CAAC,CAAC,CAAC;IAEH,OAAO,CAAC,aAAa,CAAC,IAAI,CAAC,WAAW,EAAE,aAAa,EAAE,aAAa,CAAC,CAAC;AACvE,CAAC;AArBD,4BAqBC;AAED,2DAA2D;AAC3D,SAAgB,UAAU,KAAI,CAAC;AAA/B,gCAA+B"}

View File

@@ -7,6 +7,10 @@
"": {
"name": "eval",
"version": "0.0.1",
"dependencies": {
"@types/fs-extra": "^11.0.4",
"fs-extra": "^11.2.0"
},
"devDependencies": {
"@types/mocha": "^10.0.6",
"@types/node": "18.x",
@@ -262,12 +266,29 @@
"node": ">= 6"
}
},
"node_modules/@types/fs-extra": {
"version": "11.0.4",
"resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-11.0.4.tgz",
"integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==",
"dependencies": {
"@types/jsonfile": "*",
"@types/node": "*"
}
},
"node_modules/@types/json-schema": {
"version": "7.0.15",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
"dev": true
},
"node_modules/@types/jsonfile": {
"version": "6.1.4",
"resolved": "https://registry.npmjs.org/@types/jsonfile/-/jsonfile-6.1.4.tgz",
"integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/mocha": {
"version": "10.0.6",
"resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.6.tgz",
@@ -278,7 +299,6 @@
"version": "18.19.4",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.4.tgz",
"integrity": "sha512-xNzlUhzoHotIsnFoXmJB+yWmBvFZgKCI9TtPIEdYIMM1KWfwuY8zh7wvc1u1OAXlC7dlf6mZVx/s+Y5KfFz19A==",
"dev": true,
"dependencies": {
"undici-types": "~5.26.4"
}
@@ -1252,6 +1272,19 @@
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/fs-extra": {
"version": "11.2.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz",
"integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"engines": {
"node": ">=14.14"
}
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
@@ -1350,6 +1383,11 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="
},
"node_modules/graphemer": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
@@ -1598,6 +1636,17 @@
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
"dev": true
},
"node_modules/jsonfile": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
"integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
"dependencies": {
"universalify": "^2.0.0"
},
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/jszip": {
"version": "3.10.1",
"resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz",
@@ -2556,8 +2605,15 @@
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"dev": true
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
},
"node_modules/universalify": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
"integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
"engines": {
"node": ">= 10.0.0"
}
},
"node_modules/uri-js": {
"version": "4.4.1",

View File

@@ -36,14 +36,18 @@
"test": "vscode-test"
},
"devDependencies": {
"@types/vscode": "^1.85.0",
"@types/mocha": "^10.0.6",
"@types/node": "18.x",
"@types/vscode": "^1.85.0",
"@typescript-eslint/eslint-plugin": "^6.15.0",
"@typescript-eslint/parser": "^6.15.0",
"eslint": "^8.56.0",
"typescript": "^5.3.3",
"@vscode/test-cli": "^0.0.4",
"@vscode/test-electron": "^2.3.8"
"@vscode/test-electron": "^2.3.8",
"eslint": "^8.56.0",
"typescript": "^5.3.3"
},
"dependencies": {
"@types/fs-extra": "^11.0.4",
"fs-extra": "^11.2.0"
}
}

View File

@@ -1,35 +1,28 @@
// The module 'vscode' contains the VS Code extensibility API
// Import the module and reference it with the alias vscode in your code below
import * as vscode from 'vscode';
import * as fs from 'fs-extra';
// This method is called when your extension is activated
// Your extension is activated the very first time the command is executed
export function activate(context: vscode.ExtensionContext) {
// Use the console to output diagnostic information (console.log) and errors (console.error)
// This line of code will only be executed once when your extension is activated
// console.log('Congratulations, your extension "eval" is now active!');
// The command has been defined in the package.json file
// Now provide the implementation of the command with registerCommand
// The commandId parameter must match the command field in package.json
let OpenProject = vscode.commands.registerCommand('eval.OpenProject', () => {
// The code you place here will be executed every time your command is executed
// Display a message box to the user
// vscode.window.showInformationMessage(String(vscode.window.state.focused));
if (vscode.workspace.workspaceFolders) {
console.log(vscode.workspace.workspaceFolders[0].name);
fs.writeFileSync("/home/user/OpenProject.txt", vscode.workspace.workspaceFolders[0].name);
} else {
console.log(false);
fs.writeFileSync("/home/user/OpenProject.txt", "");
}
});
let GetColorTheme = vscode.commands.registerCommand('eval.GetColorTheme', () => {
console.log(vscode.window.activeColorTheme.kind == 2);
fs.writeFileSync("/home/user/GetColorTheme.txt", String(vscode.window.activeColorTheme.kind)); // ==2
});
let GetBreakPoint = vscode.commands.registerCommand('eval.GetBreakPoint', () => {
console.log(vscode.debug.breakpoints.length == 1);
fs.writeFileSync("/home/user/GetBreakPoint.txt", String(vscode.debug.breakpoints.length)); // ==1
});
context.subscriptions.push(OpenProject, GetColorTheme, GetBreakPoint);