google credentials

This commit is contained in:
Matteo Rosati
2026-01-29 12:54:47 +01:00
parent b18c56e2f2
commit 20919298c8
2 changed files with 48 additions and 2 deletions

42
lib.py
View File

@@ -5,9 +5,12 @@ with Vertex AI RAG (Retrieval-Augmented Generation) support.
"""
import asyncio
import json
import os
import threading
from google import genai
from google.genai import types
from google.oauth2 import service_account
from dotenv import load_dotenv
from llm_config import generate_content_config
@@ -16,6 +19,39 @@ from llm_config import generate_content_config
load_dotenv()
def get_credentials():
"""Get Google Cloud credentials from environment.
Supports two methods:
1. GOOGLE_CREDENTIALS_JSON: Direct JSON content as string (production)
2. GOOGLE_APPLICATION_CREDENTIALS: Path to JSON file (local development)
Returns:
service_account.Credentials: The loaded credentials
"""
# Try to load credentials from JSON content directly
credentials_json = os.getenv("GOOGLE_CREDENTIALS_JSON")
if credentials_json:
try:
credentials_info = json.loads(credentials_json)
return service_account.Credentials.from_service_account_info(
credentials_info
)
except json.JSONDecodeError as e:
raise ValueError(f"Invalid JSON in GOOGLE_CREDENTIALS_JSON: {e}")
# Fall back to file-based credentials (standard behavior)
credentials_path = os.getenv("GOOGLE_APPLICATION_CREDENTIALS")
if credentials_path and os.path.exists(credentials_path):
return service_account.Credentials.from_service_account_file(
credentials_path
)
# If neither is provided, return None to let the client use default credentials
# (useful when running on Google Cloud with service account attached)
return None
# Gemini model name
GEMINI_MODEL: str = "gemini-3-pro-preview"
@@ -42,10 +78,12 @@ async def generate(prompt: str):
def run_streaming():
"""Run the synchronous streaming in a separate thread."""
try:
client = genai.Client(vertexai=True)
credentials = get_credentials()
client = genai.Client(vertexai=True, credentials=credentials)
contents = [
types.Content(role="user", parts=[types.Part.from_text(text=prompt)]),
types.Content(role="user", parts=[
types.Part.from_text(text=prompt)]),
]
for chunk in client.models.generate_content_stream(