|
12 | 12 |
|
13 | 13 | from llama_stack.core.access_control.access_control import AccessDeniedError |
14 | 14 | from llama_stack.core.datatypes import AuthenticationRequiredError |
15 | | -from llama_stack.core.server.server import translate_exception |
| 15 | +from llama_stack.core.server.server import remove_disabled_providers, translate_exception |
16 | 16 |
|
17 | 17 |
|
18 | 18 | class TestTranslateException: |
@@ -194,3 +194,70 @@ def test_multiple_access_denied_scenarios(self): |
194 | 194 | assert isinstance(result3, HTTPException) |
195 | 195 | assert result3.status_code == 403 |
196 | 196 | assert result3.detail == "Permission denied: Access denied" |
| 197 | + |
| 198 | + |
| 199 | +class TestRemoveDisabledProviders: |
| 200 | + """Test cases for the remove_disabled_providers function.""" |
| 201 | + |
| 202 | + def test_remove_explicitly_disabled_provider(self): |
| 203 | + """Test that providers with provider_id='__disabled__' are removed.""" |
| 204 | + config = { |
| 205 | + "providers": { |
| 206 | + "inference": [ |
| 207 | + {"provider_id": "openai", "provider_type": "remote::openai", "config": {}}, |
| 208 | + {"provider_id": "__disabled__", "provider_type": "remote::vllm", "config": {}}, |
| 209 | + ] |
| 210 | + } |
| 211 | + } |
| 212 | + result = remove_disabled_providers(config) |
| 213 | + assert len(result["providers"]["inference"]) == 1 |
| 214 | + assert result["providers"]["inference"][0]["provider_id"] == "openai" |
| 215 | + |
| 216 | + def test_remove_empty_provider_id(self): |
| 217 | + """Test that providers with empty provider_id are removed.""" |
| 218 | + config = { |
| 219 | + "providers": { |
| 220 | + "inference": [ |
| 221 | + {"provider_id": "openai", "provider_type": "remote::openai", "config": {}}, |
| 222 | + {"provider_id": "", "provider_type": "remote::vllm", "config": {}}, |
| 223 | + ] |
| 224 | + } |
| 225 | + } |
| 226 | + result = remove_disabled_providers(config) |
| 227 | + assert len(result["providers"]["inference"]) == 1 |
| 228 | + assert result["providers"]["inference"][0]["provider_id"] == "openai" |
| 229 | + |
| 230 | + def test_keep_models_with_none_provider_model_id(self): |
| 231 | + """Test that models with None provider_model_id are NOT removed.""" |
| 232 | + config = { |
| 233 | + "registered_resources": { |
| 234 | + "models": [ |
| 235 | + { |
| 236 | + "model_id": "llama-3-2-3b", |
| 237 | + "provider_id": "vllm-inference", |
| 238 | + "model_type": "llm", |
| 239 | + "provider_model_id": None, |
| 240 | + "metadata": {}, |
| 241 | + }, |
| 242 | + { |
| 243 | + "model_id": "gpt-4o-mini", |
| 244 | + "provider_id": "openai", |
| 245 | + "model_type": "llm", |
| 246 | + "provider_model_id": None, |
| 247 | + "metadata": {}, |
| 248 | + }, |
| 249 | + { |
| 250 | + "model_id": "granite-embedding-125m", |
| 251 | + "provider_id": "sentence-transformers", |
| 252 | + "model_type": "embedding", |
| 253 | + "provider_model_id": "ibm-granite/granite-embedding-125m-english", |
| 254 | + "metadata": {"embedding_dimension": 768}, |
| 255 | + }, |
| 256 | + ] |
| 257 | + } |
| 258 | + } |
| 259 | + result = remove_disabled_providers(config) |
| 260 | + assert len(result["registered_resources"]["models"]) == 3 |
| 261 | + assert result["registered_resources"]["models"][0]["model_id"] == "llama-3-2-3b" |
| 262 | + assert result["registered_resources"]["models"][1]["model_id"] == "gpt-4o-mini" |
| 263 | + assert result["registered_resources"]["models"][2]["model_id"] == "granite-embedding-125m" |
0 commit comments