File Operations
AIO Sandbox provides a comprehensive REST API for file system operations, enabling programmatic file management across the entire sandbox environment.
API Endpoints
All file operations are available through RESTful endpoints at /v1/file/*
.
Read Files
Read file contents with optional line range:
curl -X POST http://localhost:8080/v1/file/read \
-H "Content-Type: application/json" \
-d '{
"file": "/path/to/file.txt",
"start_line": 0,
"end_line": 100,
"sudo": false
}'
Response:
{
"success": true,
"message": "File read successfully",
"data": {
"content": "File contents here...",
"line_count": 42,
"file": "/path/to/file.txt"
}
}
Write Files
Write content to files with various options:
curl -X POST http://localhost:8080/v1/file/write \
-H "Content-Type: application/json" \
-d '{
"file": "/path/to/output.txt",
"content": "Hello, World!",
"append": false,
"leading_newline": false,
"trailing_newline": true,
"sudo": false
}'
Response:
{
"success": true,
"message": "File written successfully",
"data": {
"file": "/path/to/output.txt",
"bytes_written": 13
}
}
Replace Content
Find and replace text within files:
curl -X POST http://localhost:8080/v1/file/replace \
-H "Content-Type: application/json" \
-d '{
"file": "/path/to/file.txt",
"old_str": "old text",
"new_str": "new text",
"sudo": false
}'
Response:
{
"success": true,
"message": "Replacement completed, replaced 3 occurrences",
"data": {
"file": "/path/to/file.txt",
"replaced_count": 3
}
}
Search Files
Search file contents using regular expressions:
curl -X POST http://localhost:8080/v1/file/search \
-H "Content-Type: application/json" \
-d '{
"file": "/path/to/file.txt",
"regex": "function\\s+\\w+",
"sudo": false
}'
Response:
{
"success": true,
"message": "Search completed, found 5 matches",
"data": {
"file": "/path/to/file.txt",
"matches": [
{
"line_number": 10,
"line": "function myFunction() {",
"match": "function myFunction"
}
]
}
}
Find Files
Search for files using glob patterns:
curl -X POST http://localhost:8080/v1/file/find \
-H "Content-Type: application/json" \
-d '{
"path": "/home/user",
"glob": "*.js"
}'
Response:
{
"success": true,
"message": "Search completed, found 12 files",
"data": {
"files": [
"/home/user/app.js",
"/home/user/config.js",
"/home/user/utils.js"
]
}
}
Integration Examples
Python Integration
import requests
import json
class SandboxFileAPI:
def __init__(self, base_url="http://localhost:8080"):
self.base_url = base_url
def read_file(self, file_path, start_line=None, end_line=None):
payload = {"file": file_path}
if start_line is not None:
payload["start_line"] = start_line
if end_line is not None:
payload["end_line"] = end_line
response = requests.post(
f"{self.base_url}/v1/file/read",
json=payload
)
return response.json()
def write_file(self, file_path, content, append=False):
payload = {
"file": file_path,
"content": content,
"append": append
}
response = requests.post(
f"{self.base_url}/v1/file/write",
json=payload
)
return response.json()
def search_files(self, pattern, directory="/"):
payload = {
"path": directory,
"glob": pattern
}
response = requests.post(
f"{self.base_url}/v1/file/find",
json=payload
)
return response.json()
# Usage
api = SandboxFileAPI()
# Read configuration
config = api.read_file("/app/config.json")
print(config["data"]["content"])
# Write log entry
api.write_file("/var/log/app.log", "Process started\n", append=True)
# Find Python files
files = api.search_files("*.py", "/app")
for file_path in files["data"]["files"]:
print(f"Found: {file_path}")
JavaScript/Node.js Integration
class SandboxFileAPI {
constructor(baseUrl = 'http://localhost:8080') {
this.baseUrl = baseUrl;
}
async readFile(filePath, options = {}) {
const payload = { file: filePath, ...options };
const response = await fetch(`${this.baseUrl}/v1/file/read`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload)
});
return response.json();
}
async writeFile(filePath, content, options = {}) {
const payload = { file: filePath, content, ...options };
const response = await fetch(`${this.baseUrl}/v1/file/write`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload)
});
return response.json();
}
async replaceInFile(filePath, oldStr, newStr) {
const payload = { file: filePath, old_str: oldStr, new_str: newStr };
const response = await fetch(`${this.baseUrl}/v1/file/replace`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload)
});
return response.json();
}
}
// Usage
const api = new SandboxFileAPI();
// Read and process file
const result = await api.readFile('/app/data.txt');
if (result.success) {
console.log('File content:', result.data.content);
// Update configuration
await api.replaceInFile('/app/config.json', '"debug": false', '"debug": true');
}
File System Integration
Shared Access
Files are shared across all sandbox components:
# Create file via API
curl -X POST http://localhost:8080/v1/file/write \
-d '{"file": "/tmp/shared.txt", "content": "Shared content"}'
# Access in terminal
# ws://localhost:8080/v1/shell/ws
# > cat /tmp/shared.txt
# Shared content
# Edit in Code Server
# http://localhost:8080/code-server/
# Open /tmp/shared.txt
# Process in browser
# Download or view via VNC
Workflow Example
Complete file processing workflow:
- Download file via browser
// Files appear in /home/user/Downloads/
- Process with File API
# Read downloaded file
content = api.read_file("/home/user/Downloads/data.csv")
# Process and save results
processed = process_csv(content["data"]["content"])
api.write_file("/tmp/results.json", json.dumps(processed))
- Execute shell commands
# Run analysis script
python /app/analyze.py /tmp/results.json
# Generate report
pandoc /tmp/results.json -o /tmp/report.pdf
- Edit in Code Server
# Open results in VSCode for refinement
# /tmp/report.pdf available for preview
Advanced Features
Batch Operations
Process multiple files efficiently:
def batch_process_files(api, directory, pattern):
# Find all matching files
files_result = api.search_files(pattern, directory)
for file_path in files_result["data"]["files"]:
# Read each file
content_result = api.read_file(file_path)
if content_result["success"]:
content = content_result["data"]["content"]
# Process content
processed = content.upper()
# Write back processed content
output_path = file_path.replace(".txt", "_processed.txt")
api.write_file(output_path, processed)
# Process all text files in directory
batch_process_files(api, "/app/data", "*.txt")
Error Handling
Robust error handling for file operations:
def safe_file_operation(api, operation, **kwargs):
try:
result = operation(**kwargs)
if result["success"]:
return result["data"]
else:
print(f"Operation failed: {result['message']}")
return None
except requests.exceptions.RequestException as e:
print(f"Network error: {e}")
return None
except json.JSONDecodeError as e:
print(f"JSON decode error: {e}")
return None
# Safe file reading
content = safe_file_operation(
api,
api.read_file,
file_path="/path/to/file.txt"
)
Permission Management
Handle file permissions and sudo operations:
# Regular file operation
result = api.read_file("/home/user/file.txt")
# Sudo operation for system files
result = api.read_file("/etc/nginx/nginx.conf", sudo=True)
# Write to protected location
api.write_file(
"/etc/cron.d/backup",
"0 2 * * * root /backup.sh",
sudo=True
)
Security Considerations
File Access Controls
- Default user permissions apply
- Sudo access configurable
- Path traversal protection
- File size limits enforced
Best Practices
- Validate file paths before operations
- Use appropriate permissions (avoid sudo when possible)
- Implement file size checks for uploads
- Sanitize user-provided content
Example Security Implementation
import os
import re
def secure_file_operation(file_path, base_directory="/home/user"):
# Normalize path
normalized = os.path.normpath(file_path)
# Check for path traversal
if ".." in normalized or normalized.startswith("/"):
if not normalized.startswith(base_directory):
raise ValueError("Path traversal detected")
# Validate filename
if not re.match(r'^[a-zA-Z0-9._/-]+$', normalized):
raise ValueError("Invalid characters in filename")
return normalized
# Safe usage
try:
safe_path = secure_file_operation("../../../etc/passwd")
except ValueError as e:
print(f"Security violation: {e}")
Performance Optimization
Large File Handling
# Read large files in chunks
def read_large_file(api, file_path, chunk_size=1000):
total_lines = 0
content_parts = []
while True:
result = api.read_file(
file_path,
start_line=total_lines,
end_line=total_lines + chunk_size
)
if not result["success"] or not result["data"]["content"]:
break
content_parts.append(result["data"]["content"])
total_lines += chunk_size
# Prevent infinite loop
if len(result["data"]["content"].splitlines()) < chunk_size:
break
return "\n".join(content_parts)
Concurrent Operations
import asyncio
import aiohttp
async def parallel_file_operations(files):
async with aiohttp.ClientSession() as session:
tasks = []
for file_path in files:
task = read_file_async(session, file_path)
tasks.append(task)
results = await asyncio.gather(*tasks)
return results
async def read_file_async(session, file_path):
payload = {"file": file_path}
async with session.post(
"http://localhost:8080/v1/file/read",
json=payload
) as response:
return await response.json()
Ready to integrate file operations? Check our API reference for complete endpoint documentation.