1. Install
npm install -g @magpiecloud/mags
Give your AI agents an instant home. Every sandbox is completely isolated, boots in ~300ms, and syncs your files to the cloud automatically. CLI, Python, and Node.js — pick your tool.
mags run 'echo Hello World'
mags run -w myproject -p 'pip install flask'
mags new dev && mags ssh dev
Run a script, persist your files to the cloud, then jump in with SSH.
from mags import Mags
m = Mags()
result = m.run_and_wait("echo Hello World")
print(result["status"]) # "completed"
pip install magpie-mags
const Mags = require('@magpiecloud/mags');
const mags = new Mags();
const result = await mags.runAndWait('echo Hello World');
console.log(result.logs);
npm install @magpiecloud/mags
Quickstart
npm install -g @magpiecloud/mags
mags login
Or set a token directly:
export MAGS_API_TOKEN="your-token"
mags run 'echo Hello World'
Add -w myproject -p to persist files to the cloud between runs.
pip install magpie-mags
export MAGS_API_TOKEN="your-token"
Or pass it directly: Mags(api_token="...")
from mags import Mags
m = Mags()
result = m.run_and_wait("echo Hello World")
for log in result["logs"]:
print(log["message"])
npm install @magpiecloud/mags
export MAGS_API_TOKEN="your-token"
Or pass it directly: new Mags({ apiToken: "..." })
const Mags = require('@magpiecloud/mags');
const mags = new Mags();
const result = await mags.runAndWait('echo Hello');
console.log(result.status); // "completed"
Usage Patterns
npm install -g @magpiecloud/mags
mags login
| Command | Description |
|---|---|
mags run <script> | Run a script in a fresh sandbox. Fastest — no workspace, no persistence. |
mags run -w <name> <script> | Run with a named workspace. Data stays on the VM only — deleted after 10 min idle. |
mags run -w <name> -p <script> | Run with a persistent workspace. Files synced to S3 and survive across runs indefinitely. |
mags run --url --port <port> <script> | Request a public HTTPS URL for your sandbox (requires -p). |
mags run --no-sleep <script> | Keep sandbox running 24/7, never auto-sleep (requires -p). |
mags run -e <script> | Ephemeral — no workspace at all, fastest possible execution. |
mags run --base <workspace> <script> | Use an existing workspace as a read-only base image (OverlayFS). |
mags run -f <file> <script> | Upload file(s) into the sandbox before running (repeatable). |
| Command | Description |
|---|---|
mags new <name> | Create a new sandbox. Workspace lives on local disk only. |
mags new <name> -p | Create a sandbox with persistent workspace — synced to S3. |
mags exec <name> <cmd> | Execute a command on an existing sandbox. |
mags ssh <name> | SSH into a sandbox. Auto-starts if sleeping or stopped. |
| Command | Description |
|---|---|
mags list | List recent jobs |
mags status <id> | Get job status |
mags logs <id> | Get job output |
mags stop <id> | Stop a running job |
mags set <id> [options] | Update VM settings (e.g. --no-sleep, --sleep) |
mags sync <workspace> | Sync workspace to the cloud now |
mags url <id> [port] | Enable public URL access |
mags resize <workspace> --disk <GB> | Resize workspace disk |
mags workspace list | List persistent workspaces |
mags workspace delete <id> | Delete workspace + cloud data |
mags url alias <sub> <workspace> | Create a stable URL alias |
mags url alias list | List URL aliases |
mags url alias remove <sub> | Delete a URL alias |
mags cron add [opts] <script> | Create a scheduled cron job |
mags cron list | List cron jobs |
mags cron enable <id> | Enable a cron job |
mags cron disable <id> | Disable a cron job |
mags cron remove <id> | Delete a cron job |
| Flag | Description |
|---|---|
-n, --name <name> | Alias for -w |
-e, --ephemeral | No workspace at all, fastest possible execution |
--base <workspace> | Use an existing workspace as a read-only base image |
--disk <GB> | Custom disk size in GB (default: 2) |
--startup-command <cmd> | Command to run when sandbox wakes from sleep |
# Persistent workspace — install packages, then run your app
mags run -w myproject -p 'pip install flask requests'
mags run -w myproject -p 'python3 app.py'
# Golden image — create once, fork many times
mags run -w golden -p 'apk add nodejs npm && npm install -g typescript'
mags sync golden
mags run --base golden -w fork-1 -p 'npm test'
# Interactive sandbox with SSH
mags new dev -p
mags ssh dev
mags exec dev 'node --version'
# Always-on web server with public URL
mags run -w webapp -p --no-sleep --url --port 8080 \
--startup-command 'python3 -m http.server 8080' \
'python3 -m http.server 8080'
# Cron job
mags cron add --name backup --schedule "0 0 * * *" \
-w backups 'tar czf backup.tar.gz /data'
pip install magpie-mags
export MAGS_API_TOKEN="your-token"
| Method | Description |
|---|---|
run(script, **opts) | Submit a job (returns immediately) |
run_and_wait(script, **opts) | Submit + block until complete |
new(name, **opts) | Create VM sandbox (pass persistent=True for S3) |
exec(name, command) | Run command on existing sandbox via SSH |
stop(name_or_id) | Stop a running job |
find_job(name_or_id) | Find job by name or workspace |
url(name_or_id, port) | Enable public URL access |
resize(workspace, disk_gb) | Resize workspace disk |
status(request_id) | Get job status |
logs(request_id) | Get job logs |
list_jobs() | List recent jobs |
update_job(request_id, **opts) | Update job settings (no_sleep, startup_command) |
enable_access(id, port) | Enable URL or SSH access (low-level) |
upload_file(path) | Upload a file, returns file ID |
upload_files(paths) | Upload files, returns file IDs |
list_workspaces() | List persistent workspaces |
delete_workspace(id) | Delete workspace + cloud data |
sync(request_id) | Sync workspace to S3 now |
url_alias_create(sub, ws_id) | Create a stable URL alias |
url_alias_list() | List URL aliases |
url_alias_delete(sub) | Delete a URL alias |
cron_create(**opts) | Create a cron job |
cron_list() | List cron jobs |
cron_update(id, **opts) | Update a cron job |
cron_delete(id) | Delete a cron job |
usage(window_days) | Get usage stats |
| Parameter | Description |
|---|---|
workspace_id | Name the workspace. Local only unless persistent=True. |
persistent | Keep sandbox alive, sync workspace to S3. Files persist indefinitely. |
base_workspace_id | Mount a workspace read-only as base image |
no_sleep | Never auto-sleep (requires persistent=True) |
ephemeral | No workspace, no sync (fastest) |
file_ids | List of uploaded file IDs to include |
startup_command | Command to run when sandbox wakes |
from mags import Mags
m = Mags() # reads MAGS_API_TOKEN from env
# Run a command and wait
result = m.run_and_wait("echo Hello World")
print(result["status"]) # "completed"
# Local workspace (no S3 sync, good for analysis)
m.run_and_wait("python3 analyze.py", workspace_id="analysis")
# Persistent workspace (synced to S3)
m.run("pip install flask",
workspace_id="my-project", persistent=True)
# Create a sandbox (local disk)
m.new("my-project")
# Create with S3 persistence
m.new("my-project", persistent=True)
# Execute commands on existing sandbox
result = m.exec("my-project", "ls -la /root")
print(result["output"])
# Public URL
m.new("webapp", persistent=True)
info = m.url("webapp", port=3000)
print(info["url"]) # https://xyz.apps.magpiecloud.com
# Always-on sandbox (never auto-sleeps)
m.run("python3 worker.py",
workspace_id="worker", persistent=True, no_sleep=True)
# Upload files
file_ids = m.upload_files(["script.py", "data.csv"])
m.run_and_wait("python3 /uploads/script.py", file_ids=file_ids)
# Workspaces
workspaces = m.list_workspaces()
m.delete_workspace("myproject")
# Cron
m.cron_create(name="backup", cron_expression="0 0 * * *",
script="tar czf backup.tar.gz /data", workspace_id="backups")
npm install @magpiecloud/mags
export MAGS_API_TOKEN="your-token"
| Method | Description |
|---|---|
run(script, opts) | Submit a job (returns immediately) |
runAndWait(script, opts) | Submit + block until complete |
new(name, opts) | Create a VM sandbox (add persistent: true for S3) |
exec(nameOrId, command) | Run command on existing sandbox via SSH |
stop(nameOrId) | Stop a running job |
findJob(nameOrId) | Find job by name or workspace |
url(nameOrId, port) | Enable public URL access |
status(requestId) | Get job status |
logs(requestId) | Get job logs |
list() | List recent jobs |
updateJob(requestId, opts) | Update job settings (noSleep, startupCommand) |
enableAccess(requestId, port) | Enable URL or SSH access |
resize(workspace, diskGb) | Resize workspace disk |
uploadFiles(paths) | Upload files, returns file IDs |
sync(requestId) | Sync workspace to S3 now |
listWorkspaces() | List persistent workspaces |
deleteWorkspace(id) | Delete workspace + cloud data |
urlAliasCreate(sub, wsId) | Create a stable URL alias |
urlAliasList() | List URL aliases |
urlAliasDelete(sub) | Delete a URL alias |
cronCreate(opts) | Create a cron job |
cronList() | List cron jobs |
cronDelete(id) | Delete a cron job |
usage(opts) | Get usage stats |
| Parameter | Description |
|---|---|
workspaceId | Name the workspace. Local only unless persistent: true. |
persistent | Keep sandbox alive, sync workspace to S3. Files persist indefinitely. |
baseWorkspaceId | Mount a workspace read-only as base image |
noSleep | Never auto-sleep (requires persistent: true) |
ephemeral | No workspace, no sync (fastest) |
fileIds | Array of uploaded file IDs to include |
startupCommand | Command to run when sandbox wakes |
const Mags = require('@magpiecloud/mags');
const mags = new Mags({ apiToken: process.env.MAGS_API_TOKEN });
// Run a command and wait
const result = await mags.runAndWait('echo Hello World');
console.log(result.status); // "completed"
// Local workspace (no S3 sync, good for analysis)
await mags.runAndWait('python3 analyze.py', { workspaceId: 'analysis' });
// Persistent workspace (synced to S3)
await mags.runAndWait('pip install flask', { workspaceId: 'myproject', persistent: true });
await mags.runAndWait('python3 app.py', { workspaceId: 'myproject', persistent: true });
// Base image
await mags.runAndWait('npm test', { baseWorkspaceId: 'golden' });
await mags.runAndWait('npm test', { baseWorkspaceId: 'golden', workspaceId: 'fork-1', persistent: true });
// Create a sandbox
await mags.new('dev', { persistent: true });
// SSH access
const job = await mags.run('sleep 3600', { workspaceId: 'dev', persistent: true });
const ssh = await mags.enableAccess(job.requestId, 22);
console.log(`ssh root@${ssh.sshHost} -p ${ssh.sshPort}`);
// Public URL
const webJob = await mags.run('python3 -m http.server 8080', {
workspaceId: 'webapp', persistent: true,
startupCommand: 'python3 -m http.server 8080',
});
const { url } = await mags.url('webapp', 8080);
console.log(url);
// Always-on sandbox (never auto-sleeps)
await mags.run('python3 worker.py', {
workspaceId: 'worker', persistent: true, noSleep: true,
});
// Upload files
const fileId = await mags.uploadFile('script.py');
await mags.runAndWait('python3 /uploads/script.py', { fileIds: [fileId] });
// Cron
await mags.cronCreate({
name: 'backup', cronExpression: '0 0 * * *',
script: 'tar czf backup.tar.gz /data', workspaceId: 'backups',
});
Persistent Workspaces
-w for a local workspace (no cloud sync, good for throwaway analysis)-p to sync to S3 — files, packages, and configs persist indefinitely-p)Always-On Servers
By default, persistent sandboxes auto-sleep after 10 minutes of inactivity to save resources. With the --no-sleep flag, your VM stays running 24/7 — perfect for web servers, workers, and background processes.
# CLI
mags run -w my-api -p --no-sleep --url --port 3000 'node server.js'
# Python
m.run("node server.js",
workspace_id="my-api", persistent=True, no_sleep=True)
# Node.js
await mags.run('node server.js', {
workspaceId: 'my-api', persistent: true, noSleep: true,
});
Always-on sandboxes are automatically monitored. If the host goes down, your VM is re-provisioned on a healthy server within ~60 seconds — no manual intervention needed.
-p (persistent) flagrunning state indefinitely--url to expose a public HTTPS endpoint--startup-command to auto-restart your process if the VM recoversSDKs + API
pip install magpie-mags
from mags import Mags
m = Mags() # reads MAGS_API_TOKEN from env
# Create a sandbox, run commands on it
m.new("demo") # local disk; use persistent=True for S3
result = m.exec("demo", "uname -a")
print(result["output"])
# Or run a one-shot script
result = m.run_and_wait("echo Hello!")
print(result["status"]) # "completed"
run(script, **opts) — submit a jobrun_and_wait(script, **opts) — submit + blocknew(name, **opts) — create VM sandboxexec(name, command) — run on existing sandboxstop(name_or_id) — stop a jobfind_job(name_or_id) — find by name/workspaceurl(name_or_id, port) — enable public URLresize(workspace, disk_gb) — resize diskstatus(id) / logs(id) / list_jobs()upload_file(path) / upload_files(paths)list_workspaces() / delete_workspace(id)sync(id) — sync workspace to S3cron_create(**opts) / cron_list() / cron_delete(id)npm install @magpiecloud/mags
const Mags = require('@magpiecloud/mags');
const mags = new Mags({
apiToken: process.env.MAGS_API_TOKEN,
});
const result = await mags.runAndWait('echo Hello World');
console.log(result.status);
console.log(result.logs);
run(script, opts) — submit a jobrunAndWait(script, opts) — submit + blocknew(name, opts) — create VM sandboxexec(nameOrId, command) — run on existing sandboxstop(nameOrId) — stop a jobfindJob(nameOrId) — find by name/workspaceurl(nameOrId, port) — enable public URLstatus(id) / logs(id) / list()enableAccess(requestId, port) — URL or SSHuploadFiles(paths) — upload fileslistWorkspaces() / deleteWorkspace(id)sync(id) — sync workspace to S3cronCreate(opts) / cronList() / cronDelete(id)curl -X POST https://api.magpiecloud.com/api/v1/mags-jobs \
-H "Authorization: Bearer $MAGS_API_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"script": "echo Hello World",
"type": "inline",
"workspace_id": "myproject"
}'
POST /mags-jobs — submit jobGET /mags-jobs — list jobsGET /mags-jobs/:id/status — statusGET /mags-jobs/:id/logs — logsPOST /mags-jobs/:id/access — URL/SSHPOST /mags-jobs/:id/stop — stop jobPOST /mags-jobs/:id/sync — sync workspacePATCH /mags-jobs/:id — updatePOST /mags-files — upload fileGET /mags-workspaces — list wsDELETE /mags-workspaces/:id — delete wsPOST /mags-url-aliases — create aliasGET /mags-url-aliases — list aliasesDELETE /mags-url-aliases/:sub — delete aliasPOST /mags-cron — create cronGET /mags-cron — list cronPATCH /mags-cron/:id — update cronDELETE /mags-cron/:id — delete cronResources
Sign in with Google or email to access jobs and tokens.
Open loginView usage summaries and recent jobs.
Open usageCreate and manage tokens for CLI and SDK access.
Open tokensInstall the Claude Code skill to run sandboxes from Claude.
Open Claude skillCopy ready-to-run recipes for common workflows.
Open cookbook