Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
{
"name": "SelectPilot",
"image": "mcr.microsoft.com/devcontainers/javascript-node:22-bookworm",
"features": {
"ghcr.io/devcontainers/features/python:1": {
"version": "3.12"
}
},
"forwardPorts": [8083],
"portsAttributes": {
"8083": {
"label": "Nano Server",
"onAutoForward": "notify"
}
},
"postCreateCommand": "corepack enable && corepack prepare pnpm@10.33.0 --activate && pnpm install",
"remoteEnv": {
"CHROMEAI_BIND_HOST": "0.0.0.0",
"CHROMEAI_RUN_DIR": "/tmp/selectpilot/run",
"CHROMEAI_LOG_DIR": "/tmp/selectpilot/logs"
},
"customizations": {
"vscode": {
"extensions": [
"saoudrizwan.claude-dev",
"dbaeumer.vscode-eslint",
"ms-python.python",
"ms-python.vscode-pylance"
]
}
}
}
135 changes: 135 additions & 0 deletions scripts/bootstrap-linux.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
#!/usr/bin/env bash
# Bootstrap SelectPilot on Linux or inside a dev container.
# Usage: bash scripts/bootstrap-linux.sh [--profile auto|<name>] [--skip-ollama-install] [--skip-model-pull]
set -euo pipefail

ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
PROFILE="auto"
SKIP_OLLAMA_INSTALL="0"
SKIP_MODEL_PULL="0"

while [[ $# -gt 0 ]]; do
case "$1" in
--profile)
PROFILE="${2:-auto}"
shift 2
;;
--skip-ollama-install)
SKIP_OLLAMA_INSTALL="1"
shift
;;
--skip-model-pull)
SKIP_MODEL_PULL="1"
shift
;;
*)
echo "Unknown argument: $1" >&2
exit 1
;;
esac
done

if [[ "$(uname -s)" != "Linux" ]]; then
echo "This bootstrapper targets Linux / dev containers. For macOS, use bootstrap-macos-local.sh." >&2
exit 1
fi

read_profile_json() {
python3 - "$PROFILE" "$ROOT" <<'PY'
import json
import sys
from pathlib import Path

profile = sys.argv[1]
root = Path(sys.argv[2])
sys.path.insert(0, str(root / "server"))

from runtime_profiles import build_bootstrap_commands, get_runtime_profile, recommend_runtime_profile

recommendation = recommend_runtime_profile()
selected = recommendation["recommended_profile"] if profile == "auto" else profile
runtime_profile = get_runtime_profile(selected)
commands = build_bootstrap_commands(runtime_profile.key, root)

print(json.dumps({
"selected_profile": runtime_profile.key,
"label": runtime_profile.label,
"reason": recommendation["reason"],
"generation_model": runtime_profile.generation_model,
"embedding_model": runtime_profile.embedding_model,
"command": commands["command"],
}))
PY
}

PROFILE_JSON="$(read_profile_json)"
PROFILE_VARS="$(python3 - "$PROFILE_JSON" <<'PY'
import json
import shlex
import sys

payload = json.loads(sys.argv[1])
for key in ("selected_profile", "generation_model", "embedding_model", "reason"):
print(f"{key.upper()}={shlex.quote(str(payload[key]))}")
PY
)"
eval "$PROFILE_VARS"

if [[ "$SKIP_OLLAMA_INSTALL" != "1" ]] && ! command -v ollama >/dev/null 2>&1; then
echo "Installing Ollama..."
curl -fsSL https://ollama.ai/install.sh | sh
fi

if ! pgrep -x "ollama" >/dev/null 2>&1; then
echo "Starting Ollama service..."
nohup ollama serve >/tmp/selectpilot-ollama.log 2>&1 &
sleep 3
fi

if [[ "$SKIP_MODEL_PULL" != "1" ]]; then
echo "Pulling generation model: $GENERATION_MODEL"
ollama pull "$GENERATION_MODEL"
echo "Pulling embedding model: $EMBEDDING_MODEL"
ollama pull "$EMBEDDING_MODEL"
fi

RUN_DIR="${CHROMEAI_RUN_DIR:-${HOME}/.local/share/SelectPilot/run}"
LOG_DIR="${CHROMEAI_LOG_DIR:-${HOME}/.local/share/SelectPilot/logs}"
mkdir -p "$RUN_DIR" "$LOG_DIR"

HASH="$(sha256sum "$ROOT/server/nano_server.py" | awk '{print $1}')"
BIND="${CHROMEAI_BIND_HOST:-127.0.0.1}"
# When binding to all interfaces (0.0.0.0), display the loopback address for the
# local bridge URL as that is what the browser extension and tools use to connect.
if [[ "$BIND" == "0.0.0.0" ]]; then
DISPLAY_URL="http://127.0.0.1:8083"
else
DISPLAY_URL="http://${BIND}:8083"
fi

echo "Starting nano server..."
CHROMEAI_OLLAMA_MODEL="$GENERATION_MODEL" \
CHROMEAI_OLLAMA_EMBED_MODEL="$EMBEDDING_MODEL" \
nohup python3 "$ROOT/server/nano_server.py" \
--bind "$BIND" \
--binary-path "$ROOT/server/nano_server.py" \
--binary-hash "$HASH" \
--run-dir "$RUN_DIR" \
--log-dir "$LOG_DIR" \
>"$LOG_DIR/nano.log" 2>"$LOG_DIR/nano.err" &

cat <<EOF

SelectPilot bootstrap complete.

Profile: $SELECTED_PROFILE
Reason: $REASON
Generation model: $GENERATION_MODEL
Embedding model: $EMBEDDING_MODEL
Run dir: $RUN_DIR
Log dir: $LOG_DIR
Local bridge URL: $DISPLAY_URL

Next recommended command:
pnpm benchmark:local
EOF
34 changes: 29 additions & 5 deletions server/nano_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,12 +191,35 @@ def do_POST(self):
self._write_json(200, resp)


def _default_run_dir() -> str:
env = os.environ.get('CHROMEAI_RUN_DIR')
if env:
return env
import platform
if platform.system() == 'Darwin':
return os.path.expanduser('~/Library/Application Support/SelectPilot/run')
return os.path.expanduser('~/.local/share/SelectPilot/run')


def _default_log_dir() -> str:
env = os.environ.get('CHROMEAI_LOG_DIR')
if env:
return env
import platform
if platform.system() == 'Darwin':
return os.path.expanduser('~/Library/Logs/SelectPilot')
return os.path.expanduser('~/.local/share/SelectPilot/logs')


def main():
parser = argparse.ArgumentParser()
parser.add_argument('--port', type=int, default=DEFAULT_PORT)
parser.add_argument('--port-range', default=None)
parser.add_argument('--run-dir', default=os.path.expanduser('~/Library/Application Support/SelectPilot/run'))
parser.add_argument('--log-dir', default=os.path.expanduser('~/Library/Logs/SelectPilot'))
parser.add_argument('--bind', default=os.environ.get('CHROMEAI_BIND_HOST', '127.0.0.1'),
help='Address to bind the server to (default: 127.0.0.1; '
'override with CHROMEAI_BIND_HOST env var)')
parser.add_argument('--run-dir', default=_default_run_dir())
parser.add_argument('--log-dir', default=_default_log_dir())
parser.add_argument('--binary-path', default=None)
parser.add_argument('--binary-hash', default=None)
args = parser.parse_args()
Expand All @@ -211,13 +234,14 @@ def main():
expected = args.binary_hash or os.environ.get('CHROMEAI_BINARY_HASH')
verify_binary(binary_path, expected)
port = args.port
bind = args.bind
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
if s.connect_ex(("127.0.0.1", port)) == 0:
if s.connect_ex((bind, port)) == 0:
raise RuntimeError(f"port {port} is already in use")
write_port_info(port_file, port)

server = HTTPServer(('127.0.0.1', port), Handler)
print(f"nano server listening on {port}")
server = HTTPServer((bind, port), Handler)
print(f"nano server listening on {bind}:{port}")
server.serve_forever()


Expand Down