FileSystem
class FileSystem()
Provides file system operations within a Sandbox.
This class implements a high-level interface to file system operations that can be performed within a Daytona Sandbox.
FileSystem.__init__
def __init__(sandbox_id: str, toolbox_api: ToolboxApi, get_root_dir: Callable[[], str])
Initializes a new FileSystem instance.
Arguments:
sandbox_id
str - The Sandbox ID.toolbox_api
ToolboxApi - API client for Sandbox operations.get_root_dir
Callable[[], str] - A function to get the default root directory of the Sandbox.
FileSystem.create_folder
@intercept_errors(message_prefix="Failed to create folder: ")def create_folder(path: str, mode: str) -> None
Creates a new directory in the Sandbox at the specified path with the given permissions.
Arguments:
path
str - Path where the folder should be created. Relative paths are resolved based on the user’s root directory.mode
str - Folder permissions in octal format (e.g., “755” for rwxr-xr-x).
Example:
# Create a directory with standard permissionssandbox.fs.create_folder("workspace/data", "755")
# Create a private directorysandbox.fs.create_folder("workspace/secrets", "700")
FileSystem.delete_file
@intercept_errors(message_prefix="Failed to delete file: ")def delete_file(path: str) -> None
Deletes a file from the Sandbox.
Arguments:
path
str - Absolute path to the file to delete.
Example:
# Delete a filesandbox.fs.delete_file("workspace/data/old_file.txt")
FileSystem.download_file
@overloaddef download_file(remote_path: str, timeout: int = 30 * 60) -> bytes
Downloads a file from the Sandbox. Returns the file contents as a bytes object. This method is useful when you want to load the file into memory without saving it to disk. It can only be used for smaller files.
Arguments:
remote_path
str - Path to the file in the Sandbox. Relative paths are resolved based on the user’s root directory.timeout
int - Timeout for the download operation in seconds. 0 means no timeout. Default is 30 minutes.
Returns:
bytes
- The file contents as a bytes object.
Example:
# Download and save a file locallycontent = sandbox.fs.download_file("workspace/data/file.txt")with open("local_copy.txt", "wb") as f: f.write(content)
# Download and process text contentcontent = sandbox.fs.download_file("workspace/data/config.json")config = json.loads(content.decode('utf-8'))
FileSystem.download_file
@overloaddef download_file(remote_path: str, local_path: str, timeout: int = 30 * 60) -> None
Downloads a file from the Sandbox and saves it to a local file using stream. This method is useful when you want to download larger files that may not fit into memory.
Arguments:
remote_path
str - Path to the file in the Sandbox. Relative paths are resolved based on the user’s root directory.local_path
str - Path to save the file locally.timeout
int - Timeout for the download operation in seconds. 0 means no timeout. Default is 30 minutes.
Example:
local_path = "local_copy.txt"sandbox.fs.download_file("tmp/large_file.txt", local_path)size_mb = os.path.getsize(local_path) / 1024 / 1024print(f"Size of the downloaded file {local_path}: {size_mb} MB")
FileSystem.find_files
@intercept_errors(message_prefix="Failed to find files: ")def find_files(path: str, pattern: str) -> List[Match]
Searches for files containing a pattern, similar to the grep command.
Arguments:
path
str - Path to the file or directory to search. If the path is a directory, the search will be performed recursively. Relative paths are resolved based on the user’s root directory.pattern
str - Search pattern to match against file contents.
Returns:
List[Match]
- List of matches found in files. Each Match object includes:- file: Path to the file containing the match
- line: The line number where the match was found
- content: The matching line content
Example:
# Search for TODOs in Python filesmatches = sandbox.fs.find_files("workspace/src", "TODO:")for match in matches: print(f"{match.file}:{match.line}: {match.content.strip()}")
FileSystem.get_file_info
@intercept_errors(message_prefix="Failed to get file info: ")def get_file_info(path: str) -> FileInfo
Gets detailed information about a file or directory, including its size, permissions, and timestamps.
Arguments:
path
str - Path to the file or directory. Relative paths are resolved based on the user’s root directory.
Returns:
FileInfo
- Detailed file information including:- name: File name
- is_dir: Whether the path is a directory
- size: File size in bytes
- mode: File permissions
- mod_time: Last modification timestamp
- permissions: File permissions in octal format
- owner: File owner
- group: File group
Example:
# Get file metadatainfo = sandbox.fs.get_file_info("workspace/data/file.txt")print(f"Size: {info.size} bytes")print(f"Modified: {info.mod_time}")print(f"Mode: {info.mode}")
# Check if path is a directoryinfo = sandbox.fs.get_file_info("workspace/data")if info.is_dir: print("Path is a directory")
FileSystem.list_files
@intercept_errors(message_prefix="Failed to list files: ")def list_files(path: str) -> List[FileInfo]
Lists files and directories in a given path and returns their information, similar to the ls -l command.
Arguments:
path
str - Path to the directory to list contents from. Relative paths are resolved based on the user’s root directory.
Returns:
List[FileInfo]
- List of file and directory information. Each FileInfo object includes the same fields as described in get_file_info().
Example:
# List directory contentsfiles = sandbox.fs.list_files("workspace/data")
# Print files and their sizesfor file in files: if not file.is_dir: print(f"{file.name}: {file.size} bytes")
# List only directoriesdirs = [f for f in files if f.is_dir]print("Subdirectories:", ", ".join(d.name for d in dirs))
FileSystem.move_files
@intercept_errors(message_prefix="Failed to move files: ")def move_files(source: str, destination: str) -> None
Moves or renames a file or directory. The parent directory of the destination must exist.
Arguments:
source
str - Path to the source file or directory. Relative paths are resolved based on the user’s root directory.destination
str - Path to the destination. Relative paths are resolved based on the user’s root directory.
Example:
# Rename a filesandbox.fs.move_files( "workspace/data/old_name.txt", "workspace/data/new_name.txt")
# Move a file to a different directorysandbox.fs.move_files( "workspace/data/file.txt", "workspace/archive/file.txt")
# Move a directorysandbox.fs.move_files( "workspace/old_dir", "workspace/new_dir")
FileSystem.replace_in_files
@intercept_errors(message_prefix="Failed to replace in files: ")def replace_in_files(files: List[str], pattern: str, new_value: str) -> List[ReplaceResult]
Performs search and replace operations across multiple files.
Arguments:
files
List[str] - List of file paths to perform replacements in. Relative paths are resolved based on the user’s root directory.pattern
str - Pattern to search for.new_value
str - Text to replace matches with.
Returns:
List[ReplaceResult]
- List of results indicating replacements made in each file. Each ReplaceResult includes:- file: Path to the modified file
- success: Whether the operation was successful
- error: Error message if the operation failed
Example:
# Replace in specific filesresults = sandbox.fs.replace_in_files( files=["workspace/src/file1.py", "workspace/src/file2.py"], pattern="old_function", new_value="new_function")
# Print resultsfor result in results: if result.success: print(f"{result.file}: {result.success}") else: print(f"{result.file}: {result.error}")
FileSystem.search_files
@intercept_errors(message_prefix="Failed to search files: ")def search_files(path: str, pattern: str) -> SearchFilesResponse
Searches for files and directories whose names match the specified pattern. The pattern can be a simple string or a glob pattern.
Arguments:
path
str - Path to the root directory to start search from. Relative paths are resolved based on the user’s root directory.pattern
str - Pattern to match against file names. Supports glob patterns (e.g., “*.py” for Python files).
Returns:
SearchFilesResponse
- Search results containing:- files: List of matching file and directory paths
Example:
# Find all Python filesresult = sandbox.fs.search_files("workspace", "*.py")for file in result.files: print(file)
# Find files with specific prefixresult = sandbox.fs.search_files("workspace/data", "test_*")print(f"Found {len(result.files)} test files")
FileSystem.set_file_permissions
@intercept_errors(message_prefix="Failed to set file permissions: ")def set_file_permissions(path: str, mode: str = None, owner: str = None, group: str = None) -> None
Sets permissions and ownership for a file or directory. Any of the parameters can be None to leave that attribute unchanged.
Arguments:
path
str - Path to the file or directory. Relative paths are resolved based on the user’s root directory.mode
Optional[str] - File mode/permissions in octal format (e.g., “644” for rw-r—r—).owner
Optional[str] - User owner of the file.group
Optional[str] - Group owner of the file.
Example:
# Make a file executablesandbox.fs.set_file_permissions( path="workspace/scripts/run.sh", mode="755" # rwxr-xr-x)
# Change file ownersandbox.fs.set_file_permissions( path="workspace/data/file.txt", owner="daytona", group="daytona")
FileSystem.upload_file
@overloaddef upload_file(file: bytes, remote_path: str, timeout: int = 30 * 60) -> None
Uploads a file to the specified path in the Sandbox. If a file already exists at the destination path, it will be overwritten. This method is useful when you want to upload small files that fit into memory.
Arguments:
file
bytes - File contents as a bytes object.remote_path
str - Path to the destination file. Relative paths are resolved based on the user’s root directory.timeout
int - Timeout for the upload operation in seconds. 0 means no timeout. Default is 30 minutes.
Example:
# Upload a text filecontent = b"Hello, World!"sandbox.fs.upload_file(content, "tmp/hello.txt")
# Upload a local filewith open("local_file.txt", "rb") as f: content = f.read()sandbox.fs.upload_file(content, "tmp/file.txt")
# Upload binary dataimport jsondata = {"key": "value"}content = json.dumps(data).encode('utf-8')sandbox.fs.upload_file(content, "tmp/config.json")
FileSystem.upload_file
@overloaddef upload_file(local_path: str, remote_path: str, timeout: int = 30 * 60) -> None
Uploads a file from the local file system to the specified path in the Sandbox. If a file already exists at the destination path, it will be overwritten. This method uses streaming to upload the file, so it is useful when you want to upload larger files that may not fit into memory.
Arguments:
local_path
str - Path to the local file to upload.remote_path
str - Path to the destination file in the Sandbox. Relative paths are resolved based on the user’s root directory.timeout
int - Timeout for the upload operation in seconds. 0 means no timeout. Default is 30 minutes.
Example:
sandbox.fs.upload_file("local_file.txt", "tmp/large_file.txt")
FileSystem.upload_files
@intercept_errors(message_prefix="Failed to upload files: ")def upload_files(files: List[FileUpload], timeout: int = 30 * 60) -> None
Uploads multiple files to the Sandbox. If files already exist at the destination paths, they will be overwritten.
Arguments:
files
List[FileUpload] - List of files to upload.timeout
int - Timeout for the upload operation in seconds. 0 means no timeout. Default is 30 minutes.
Example:
# Upload multiple text filesfiles = [ FileUpload( source=b"Content of file 1", destination="/tmp/file1.txt" ), FileUpload( source="workspace/data/file2.txt", destination="/tmp/file2.txt" ), FileUpload( source=b'{"key": "value"}', destination="/tmp/config.json" )]sandbox.fs.upload_files(files)
FileUpload
@dataclassclass FileUpload()
Represents a file to be uploaded to the Sandbox.
Attributes:
source
Union[bytes, str] - File contents as a bytes object or a local file path. If a bytes object is provided, make sure it fits into memory, otherwise use the local file path which content will be streamed to the Sandbox.destination
str - Absolute destination path in the Sandbox. Relative paths are resolved based on the user’s root directory.