import React, { useEffect, useMemo, useState } from "react";
import { testProvider } from "../utils/api";
const PROVIDERS = ["ollabridge", "openai", "claude", "watsonx", "ollama"];
const PROVIDER_LABELS = {
ollabridge: "OllaBridge Cloud",
openai: "OpenAI",
claude: "Claude",
watsonx: "Watsonx",
ollama: "Ollama",
};
const AUTH_MODES = [
{ id: "device", label: "Device Pairing", icon: "📱" },
{ id: "apikey", label: "API Key", icon: "🔑" },
{ id: "local", label: "Local Trust", icon: "🏠" },
];
function LoadingState({ loadingMessage, loadingSlow, onRetry }) {
return (
AI Providers
Choose which LLM provider GitPilot should use for planning and agent
workflows. Provider settings are stored on the server.
{error &&
{error}
}
{savedMsg &&
{savedMsg}
}
{PROVIDERS.map((p) => (
))}
{provider === "ollabridge" && (
OllaBridge Cloud Configuration
Connect to OllaBridge Cloud or any OllaBridge instance for LLM
inference. No API key required for public endpoints.
{AUTH_MODES.map((m) => (
))}
{authMode === "device" && (
Enter the pairing code from your OllaBridge console and click
Pair.
setPairCode(e.target.value.toUpperCase())}
onKeyDown={(e) => e.key === "Enter" && handlePair()}
/>
{pairResult && (
{pairResult.message}
)}
)}
updateField("ollabridge", "base_url", e.target.value)
}
placeholder="https://your-ollabridge-endpoint"
/>
{(authMode === "apikey" || authMode === "local") && (
<>
updateField("ollabridge", "api_key", e.target.value)
}
placeholder="Optional API key"
/>
>
)}
updateField("ollabridge", "model", e.target.value)
}
placeholder="qwen2.5:1.5b"
/>
)}
{provider === "openai" && (
)}
{provider === "claude" && (
)}
{provider === "watsonx" && (
)}
{provider === "ollama" && (
)}
{availableModels.length > 0 && (
Available Models
{availableModels.map((model) => (
))}
)}
{modelsError &&
{modelsError}
}
{testResult && (
{testResult.health === "ok"
? testResult.details || "Provider connection successful."
: testResult.warning || "Provider connection failed."}
)}
);
}