Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,20 @@ result = lx.extract(
)
```

**Option 4: GCP project authentication**
If you don't have an API key, you can initialize your model through Vertex AI with a Project ID and location;
```python
result = lx.extract(
text_or_documents=input_text,
prompt_description="Extract information...",
examples=[...],
model_id="gemini-2.5-flash",
project_id="your-gcp-project-id"
location="your-project-location" # For example, 'us-central1'
)
```


## Adding Custom Model Providers

LangExtract supports custom LLM providers via a lightweight plugin system. You can add support for new models without changing core code.
Expand Down
18 changes: 14 additions & 4 deletions langextract/providers/gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ class GeminiLanguageModel(base_model.BaseLanguageModel):

model_id: str = 'gemini-2.5-flash'
api_key: str | None = None
project_id: str | None = None
location: str | None = None
gemini_schema: schemas.gemini.GeminiSchema | None = None
format_type: data.FormatType = data.FormatType.JSON
temperature: float = 0.0
Expand Down Expand Up @@ -84,6 +86,8 @@ def __init__(
self,
model_id: str = 'gemini-2.5-flash',
api_key: str | None = None,
project_id: str | None = None,
location: str | None = None,
gemini_schema: schemas.gemini.GeminiSchema | None = None,
format_type: data.FormatType = data.FormatType.JSON,
temperature: float = 0.0,
Expand All @@ -96,6 +100,8 @@ def __init__(
Args:
model_id: The Gemini model ID to use.
api_key: API key for Gemini service.
project_id: Project ID for Gemini service.
location: Location for Gemini service.
gemini_schema: Optional schema for structured output.
format_type: Output format (JSON or YAML).
temperature: Sampling temperature.
Expand All @@ -117,16 +123,20 @@ def __init__(

self.model_id = model_id
self.api_key = api_key
self.project_id = project_id
self.location = location
self.gemini_schema = gemini_schema
self.format_type = format_type
self.temperature = temperature
self.max_workers = max_workers
self.fence_output = fence_output

if not self.api_key:
raise exceptions.InferenceConfigError('API key not provided.')

self._client = genai.Client(api_key=self.api_key)
if self.api_key:
self._client = genai.Client(api_key=self.api_key)
elif (self.project_id and self.location):
self._client = genai.Client(vertexai=True, project=self.project_id, location=self.location)
else:
raise exceptions.InferenceConfigError('API key OR project_id/location not provided.')

super().__init__(
constraint=schema.Constraint(constraint_type=schema.ConstraintType.NONE)
Expand Down
Loading