diff --git a/README.md b/README.md index 72511637..23570e00 100644 --- a/README.md +++ b/README.md @@ -258,6 +258,20 @@ result = lx.extract( ) ``` +**Option 4: GCP project authentication** +If you don't have an API key, you can initialize your model through Vertex AI with a Project ID and location; +```python +result = lx.extract( + text_or_documents=input_text, + prompt_description="Extract information...", + examples=[...], + model_id="gemini-2.5-flash", + project_id="your-gcp-project-id" + location="your-project-location" # For example, 'us-central1' +) +``` + + ## Adding Custom Model Providers LangExtract supports custom LLM providers via a lightweight plugin system. You can add support for new models without changing core code. diff --git a/langextract/providers/gemini.py b/langextract/providers/gemini.py index 1d1f58f9..7c30234d 100644 --- a/langextract/providers/gemini.py +++ b/langextract/providers/gemini.py @@ -51,6 +51,8 @@ class GeminiLanguageModel(base_model.BaseLanguageModel): model_id: str = 'gemini-2.5-flash' api_key: str | None = None + project_id: str | None = None + location: str | None = None gemini_schema: schemas.gemini.GeminiSchema | None = None format_type: data.FormatType = data.FormatType.JSON temperature: float = 0.0 @@ -84,6 +86,8 @@ def __init__( self, model_id: str = 'gemini-2.5-flash', api_key: str | None = None, + project_id: str | None = None, + location: str | None = None, gemini_schema: schemas.gemini.GeminiSchema | None = None, format_type: data.FormatType = data.FormatType.JSON, temperature: float = 0.0, @@ -96,6 +100,8 @@ def __init__( Args: model_id: The Gemini model ID to use. api_key: API key for Gemini service. + project_id: Project ID for Gemini service. + location: Location for Gemini service. gemini_schema: Optional schema for structured output. format_type: Output format (JSON or YAML). temperature: Sampling temperature. @@ -117,16 +123,20 @@ def __init__( self.model_id = model_id self.api_key = api_key + self.project_id = project_id + self.location = location self.gemini_schema = gemini_schema self.format_type = format_type self.temperature = temperature self.max_workers = max_workers self.fence_output = fence_output - if not self.api_key: - raise exceptions.InferenceConfigError('API key not provided.') - - self._client = genai.Client(api_key=self.api_key) + if self.api_key: + self._client = genai.Client(api_key=self.api_key) + elif (self.project_id and self.location): + self._client = genai.Client(vertexai=True, project=self.project_id, location=self.location) + else: + raise exceptions.InferenceConfigError('API key OR project_id/location not provided.') super().__init__( constraint=schema.Constraint(constraint_type=schema.ConstraintType.NONE)