diff --git a/gemini_key_manager/actions.py b/gemini_key_manager/actions.py index 65d2bbc..aef9763 100644 --- a/gemini_key_manager/actions.py +++ b/gemini_key_manager/actions.py @@ -1,5 +1,10 @@ -""" -Core action functions for the Gemini Key Management script. +"""Implements core business logic for Gemini API key lifecycle management. + +This module handles: +- Project reconciliation between cloud state and local database +- API key creation/deletion operations +- Thread-safe database interactions +- Interactive Terms of Service acceptance workflows """ import logging import threading @@ -11,8 +16,21 @@ from google.cloud import resourcemanager_v3, api_keys_v2 from . import config, gcp_api, database, utils from .exceptions import TermsOfServiceNotAcceptedError -# Helper class to create a mock key object compatible with add_key_to_database class TempKey: + """Mock key object compatible with database operations. + + Provides a temporary representation of an API key for database insertion + when direct API key string retrieval is not possible. + + Attributes: + key_string (str): The actual API key string + uid (str): Unique identifier of the key + name (str): Full resource name of the key + display_name (str): Human-readable display name + create_time (datetime): Key creation timestamp + update_time (datetime): Last update timestamp + restrictions (api_keys_v2.Restrictions): Key usage restrictions + """ def __init__(self, cloud_key, key_string): self.key_string = key_string self.uid = cloud_key.uid @@ -23,38 +41,69 @@ class TempKey: self.restrictions = cloud_key.restrictions class TosAcceptanceHelper: - """Helper class to manage the interactive ToS acceptance process using an Event.""" + """Manages Terms of Service acceptance workflow with thread synchronization. + + Coordinates interactive ToS acceptance across multiple threads to prevent + duplicate prompts and ensure proper sequencing. + + Attributes: + lock (threading.Lock): Synchronizes access to prompt state + prompted_event (threading.Event): Signals ToS acceptance completion + prompt_in_progress (bool): Indicates active prompt display status + """ def __init__(self): self.lock = threading.Lock() self.prompted_event = threading.Event() self.prompt_in_progress = False def _enable_api_with_interactive_retry(project_id, creds, dry_run, tos_helper): - """Calls the enable_api function with a retry loop that handles ToS exceptions.""" + """Attempts to enable API with retry logic for ToS acceptance. + + Args: + project_id (str): Target GCP project ID + creds (Credentials: Authenticated Google credentials + dry_run (bool): Simulation mode flag + tos_helper (TosAcceptanceHelper): ToS workflow coordinator + + Returns: + bool: True if API enabled successfully + + Raises: + GoogleAPICallError: For non-ToS related API failures + """ while True: try: if gcp_api.enable_api(project_id, creds, dry_run=dry_run): return True - else: - return False + return False except TermsOfServiceNotAcceptedError as err: with tos_helper.lock: if not tos_helper.prompt_in_progress: tos_helper.prompt_in_progress = True logging.error(err.message) - logging.error(f"Please accept the terms by visiting this URL: {err.url}") - input("Press Enter to continue after accepting the Terms of Service...") + logging.error(f"Accept terms at: {err.url}") + input("Press Enter after accepting Terms of Service...") tos_helper.prompted_event.set() - tos_helper.prompted_event.wait() except Exception as e: - logging.error(f"An unexpected error occurred while trying to enable API for project {project_id}: {e}", exc_info=True) + logging.error(f"API enablement error for {project_id}: {e}", exc_info=True) return False def reconcile_project_keys(project, creds, dry_run, db_lock, account_entry): - """Reconciles API keys between Google Cloud and the local database for a single project.""" + """Reconciles cloud and local database API key states. + + Args: + project (Project): GCP Project resource + creds (Credentials): Authenticated credentials + dry_run (bool): Simulation mode flag + db_lock (threading.Lock): Database access lock + account_entry (dict): Account data structure + + Returns: + bool: True if Gemini key exists, False otherwise + """ project_id = project.project_id - logging.info(f" Reconciling keys for project {project_id}") + logging.info(f"Reconciling keys for {project_id}") gemini_key_exists = False try: @@ -93,13 +142,13 @@ def reconcile_project_keys(project, creds, dry_run, db_lock, account_entry): local_only_uids = local_uids - cloud_uids for uid in synced_uids: - logging.info(f" Key {uid} is synchronized.") + logging.info(f"Key {uid} synchronized") for uid in cloud_only_uids: key_object = cloud_keys[uid] - logging.info(f" Key {uid} ({key_object.display_name}) found in cloud only. Adding to local database.") + logging.info(f"Adding cloud-only key {uid} ({key_object.display_name})") if dry_run: - logging.info(f" [DRY RUN] Would fetch key string for {uid} and add to database.") + logging.info(f"[DRY RUN] Would fetch key string for {uid}") continue try: @@ -108,14 +157,14 @@ def reconcile_project_keys(project, creds, dry_run, db_lock, account_entry): with db_lock: database.add_key_to_database(account_entry, project, hydrated_key) except google_exceptions.PermissionDenied: - logging.warning(f" Permission denied to get key string for {uid}. Skipping.") + logging.warning(f"Permission denied to get key string for {uid}") except google_exceptions.GoogleAPICallError as err: - logging.error(f" Error getting key string for {uid}: {err}") + logging.error(f"Key string error for {uid}: {err}") for uid in local_only_uids: - logging.info(f" Key {uid} found in local database only. Marking as INACTIVE.") + logging.info(f"Marking local-only key {uid} as INACTIVE") if dry_run: - logging.info(f" [DRY RUN] Would mark key {uid} as INACTIVE.") + logging.info(f"[DRY RUN] Would deactivate {uid}") continue with db_lock: @@ -125,53 +174,72 @@ def reconcile_project_keys(project, creds, dry_run, db_lock, account_entry): return gemini_key_exists except google_exceptions.PermissionDenied: - logging.warning(f" Permission denied to list keys for project {project_id}. Skipping reconciliation.") + logging.warning(f"Permission denied listing keys for {project_id}") return False except google_exceptions.GoogleAPICallError as err: - logging.error(f" An API error occurred while reconciling keys for project {project_id}: {err}") + logging.error(f"API error during reconciliation: {err}") return False def _create_and_process_new_project(project_number, creds, dry_run, db_lock, account_entry, tos_helper): - """Creates a single project, waits for API enablement, and creates the key.""" + """Creates and initializes new GCP project with API key. + + Args: + project_number (str): Sequential project identifier + creds (Credentials): Authenticated credentials + dry_run (bool): Simulation mode flag + db_lock (threading.Lock): Database access lock + account_entry (dict): Account data structure + tos_helper (TosAcceptanceHelper): ToS workflow coordinator + """ random_string = utils.generate_random_string() project_id = f"project{project_number}-{random_string}" display_name = f"Project{project_number}" - logging.info(f"Attempting to create project: ID='{project_id}', Name='{display_name}'") + logging.info(f"Creating project: {display_name} ({project_id})") if dry_run: - logging.info(f"[DRY RUN] Would create project '{display_name}' with ID '{project_id}'.") + logging.info(f"[DRY RUN] Would create {display_name}") return try: resource_manager = resourcemanager_v3.ProjectsClient(credentials=creds) project_to_create = resourcemanager_v3.Project(project_id=project_id, display_name=display_name) operation = resource_manager.create_project(project=project_to_create) - logging.info(f"Waiting for project creation operation for '{display_name}' to complete...") + logging.info(f"Awaiting project creation: {display_name}") created_project = operation.result() - logging.info(f"Successfully initiated creation for project '{display_name}'.") + logging.info(f"Project created: {display_name}") if _enable_api_with_interactive_retry(project_id, creds, dry_run, tos_helper): - logging.info(f"Generative AI API enabled for project '{display_name}' ({project_id}). Project is ready.") + logging.info(f"API enabled for {display_name}") key_object = gcp_api.create_api_key(project_id, creds, dry_run=dry_run) if key_object: with db_lock: database.add_key_to_database(account_entry, created_project, key_object) else: - logging.error(f"Failed to enable API for new project '{display_name}' ({project_id}). Skipping key creation.") + logging.error(f"API enablement failed for {display_name}") except Exception as e: - logging.error(f"Failed to create project '{display_name}': {e}", exc_info=True) + logging.error(f"Project creation failed: {e}", exc_info=True) def process_project_for_action(project, creds, action, dry_run, db_lock, account_entry, tos_helper): - """Processes a single existing project for the given action in a thread-safe manner.""" + """Executes specified action on a single GCP project. + + Args: + project (Project): Target GCP project + creds (Credentials): Authenticated credentials + action (str): 'create' or 'delete' action + dry_run (bool): Simulation mode flag + db_lock (threading.Lock): Database access lock + account_entry (dict): Account data structure + tos_helper (TosAcceptanceHelper): ToS workflow coordinator + """ project_id = project.project_id - logging.info(f"- Starting to process project: {project_id} ({project.display_name})") + logging.info(f"Processing {project_id} ({project.display_name})") if action == 'create': gemini_key_exists = reconcile_project_keys(project, creds, dry_run, db_lock, account_entry) if gemini_key_exists: - logging.info(f" '{config.GEMINI_API_KEY_DISPLAY_NAME}' already exists in project {project_id}. Skipping creation.") + logging.info(f"Existing Gemini key in {project_id}") return if _enable_api_with_interactive_retry(project_id, creds, dry_run, tos_helper): @@ -185,16 +253,25 @@ def process_project_for_action(project, creds, action, dry_run, db_lock, account with db_lock: database.remove_keys_from_database(account_entry, project_id, deleted_keys_uids) - logging.info(f"- Finished processing project: {project_id}") + logging.info(f"Completed processing {project_id}") def process_account(email, creds, action, api_keys_data, dry_run=False, max_workers=5): - """Processes a single account for the given action.""" - logging.info(f"--- Processing account: {email} for action: {action} ---") + """Orchestrates account-level key management operations. + + Args: + email (str): Service account email + creds (Credentials): Authenticated credentials + action (str): 'create' or 'delete' action + api_keys_data (dict): Database structure + dry_run (bool): Simulation mode flag + max_workers (int): Max concurrent operations + """ + logging.info(f"Processing account: {email} ({action})") if dry_run: - logging.info("*** DRY RUN MODE ENABLED ***") + logging.info("*** DRY RUN ACTIVE ***") if not creds: - logging.warning(f"Could not get credentials for {email}. Skipping.") + logging.warning(f"Invalid credentials for {email}") return account_entry = next((acc for acc in api_keys_data["accounts"] if acc.get("account_details", {}).get("email") == email), None) @@ -216,10 +293,9 @@ def process_account(email, creds, action, api_keys_data, dry_run=False, max_work existing_projects = list(resource_manager.search_projects()) if not existing_projects and action == 'create': - logging.warning(f"No projects found for {email}. This could be due to several reasons:") - logging.warning(" 1. The account truly has no projects.") - logging.warning(" 2. The Cloud Resource Manager API Terms of Service have not been accepted.") - logging.warning(f"Please ensure the ToS are accepted by visiting: https://console.cloud.google.com/iam-admin/settings?user={email}") + logging.warning(f"No projects found for {email}") + logging.warning("Possible reasons: No projects or unaccepted ToS") + logging.warning(f"Verify ToS: https://console.cloud.google.com/iam-admin/settings?user={email}") projects_to_create_count = 0 if action == 'create': @@ -231,11 +307,9 @@ def process_account(email, creds, action, api_keys_data, dry_run=False, max_work with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: futures = [] - # Submit tasks for existing projects for project in existing_projects: futures.append(executor.submit(process_project_for_action, project, creds, action, dry_run, db_lock, account_entry, tos_helper)) - # Submit tasks for new projects if action == 'create' and projects_to_create_count > 0: for i in range(len(existing_projects), 12): project_number = str(i + 1).zfill(2) @@ -245,10 +319,9 @@ def process_account(email, creds, action, api_keys_data, dry_run=False, max_work try: future.result() except Exception as exc: - logging.error(f"A task in the thread pool generated an exception: {exc}", exc_info=True) + logging.error(f"Task error: {exc}", exc_info=True) except google_exceptions.PermissionDenied as err: - logging.error(f"Permission denied for account {email}. Check IAM roles.") - logging.error(f" Error: {err}") + logging.error(f"Permission denied for {email}: {err}") except google_exceptions.GoogleAPICallError as err: - logging.error(f"An API error occurred while processing account {email}: {err}") + logging.error(f"API error processing {email}: {err}") diff --git a/gemini_key_manager/auth.py b/gemini_key_manager/auth.py index 8b48444..88620e0 100644 --- a/gemini_key_manager/auth.py +++ b/gemini_key_manager/auth.py @@ -1,5 +1,9 @@ -""" -Functions for handling Google Cloud authentication. +"""Implements Google Cloud authentication workflows. + +Handles OAuth2 credential management including: +- Token refresh with retry logic +- Interactive authentication flows +- Credential storage/retrieval """ import os import json @@ -16,7 +20,16 @@ logger = logging.getLogger(__name__) def get_and_refresh_credentials(email, max_retries=3, retry_delay=5): - """Tries to load and refresh credentials for an email with retries. Does not start interactive flow.""" + """Manages credential lifecycle with automated refresh and retry. + + Args: + email (str): Service account email address + max_retries (int): Maximum authentication retry attempts + retry_delay (int): Seconds between retry attempts + + Returns: + Credentials: Valid credentials or None if unrecoverable + """ token_file = os.path.join(config.CREDENTIALS_DIR, f"{email}.json") creds = None if os.path.exists(token_file): @@ -49,7 +62,16 @@ def get_and_refresh_credentials(email, max_retries=3, retry_delay=5): return None def run_interactive_auth(email, max_retries=3, retry_delay=5): - """Runs the interactive OAuth2 flow for a given email with retries.""" + """Executes interactive OAuth2 flow with error handling. + + Args: + email (str): Target service account email + max_retries (int): Allowed authentication attempts + retry_delay (int): Pause between failed attempts + + Returns: + Credentials: On successful authentication + """ for attempt in range(max_retries): try: logging.info(f"Please authenticate with: {email} (attempt {attempt + 1}/{max_retries})") diff --git a/gemini_key_manager/config.py b/gemini_key_manager/config.py index 680b77e..1350801 100644 --- a/gemini_key_manager/config.py +++ b/gemini_key_manager/config.py @@ -1,5 +1,9 @@ -""" -Configuration constants for the Gemini Key Management script. +"""Centralized configuration for Gemini Key Management system. + +Contains: +- Directory paths for credentials and logs +- API endpoint configurations +- Security scopes and schema locations """ import os diff --git a/gemini_key_manager/database.py b/gemini_key_manager/database.py index 95323c4..c3c20e2 100644 --- a/gemini_key_manager/database.py +++ b/gemini_key_manager/database.py @@ -1,5 +1,10 @@ -""" -Functions for managing the JSON database of API keys. +"""Manages persistent storage of API key metadata. + +Implements: +- JSON schema validation +- Thread-safe database operations +- Key lifecycle tracking +- Data versioning and backup """ import os import json @@ -10,7 +15,17 @@ import jsonschema from . import config def load_schema(filename): - """Loads a JSON schema from a file.""" + """Validates and loads JSON schema definition. + + Args: + filename (str): Path to schema file + + Returns: + dict: Parsed schema document + + Exits: + SystemExit: On invalid schema file + """ if not os.path.exists(filename): logging.error(f"Schema file not found at '{filename}'") sys.exit(1) diff --git a/gemini_key_manager/exceptions.py b/gemini_key_manager/exceptions.py index 3bb45c2..55b796a 100644 --- a/gemini_key_manager/exceptions.py +++ b/gemini_key_manager/exceptions.py @@ -1,9 +1,18 @@ -""" -Custom exceptions for the Gemini Key Management script. +"""Exception hierarchy for Gemini Key Management system. + +Defines domain-specific exceptions for: +- Terms of Service compliance failures +- Permission-related errors +- API operation constraints """ class TermsOfServiceNotAcceptedError(Exception): - """Raised when the Terms of Service for a required API have not been accepted.""" + """Indicates unaccepted Terms of Service for critical API operations. + + Attributes: + message (str): Human-readable error description + url (str): URL for Terms of Service acceptance portal + """ def __init__(self, message, url): self.message = message self.url = url diff --git a/gemini_key_manager/gcp_api.py b/gemini_key_manager/gcp_api.py index 0e08337..d6b7f81 100644 --- a/gemini_key_manager/gcp_api.py +++ b/gemini_key_manager/gcp_api.py @@ -10,7 +10,19 @@ from google.api_core import exceptions as google_exceptions from . import config, utils, exceptions def enable_api(project_id, credentials, dry_run=False): - """Enables the Generative Language API.""" + """Manages Generative Language API enablement with error handling. + + Args: + project_id (str): Target GCP project ID + credentials (Credentials): Authenticated credentials + dry_run (bool): Simulation mode flag + + Returns: + bool: True if enabled successfully + + Raises: + TermsOfServiceNotAcceptedError: When required ToS not accepted + """ service_name = config.GENERATIVE_LANGUAGE_API service_path = f"projects/{project_id}/services/{service_name}" service_usage_client = service_usage_v1.ServiceUsageClient(credentials=credentials) @@ -42,7 +54,19 @@ def enable_api(project_id, credentials, dry_run=False): return False def create_api_key(project_id, credentials, dry_run=False): - """Creates a new, restricted API key.""" + """Generates restricted API key with security constraints. + + Args: + project_id (str): Target GCP project ID + credentials (Credentials): Authenticated credentials + dry_run (bool): Simulation mode flag + + Returns: + api_keys_v2.Key: Created key object or None on failure + + Raises: + PermissionDenied: For insufficient credentials + """ if dry_run: logging.info(f" [DRY RUN] Would create API key for project {project_id}") # Return a mock key object for dry run diff --git a/gemini_key_manager/main.py b/gemini_key_manager/main.py index 53a4110..7ebf1bb 100644 --- a/gemini_key_manager/main.py +++ b/gemini_key_manager/main.py @@ -9,7 +9,14 @@ import concurrent.futures from . import utils, config, auth, database, actions def main(): - """Main function to orchestrate API key creation or deletion.""" + """Orchestrates API key lifecycle management workflow. + + Handles: + - Command line argument parsing + - Credential management + - Multi-account processing + - Thread pool execution + """ parser = argparse.ArgumentParser(description="Manage Gemini API keys in Google Cloud projects.") parser.add_argument("action", choices=['create', 'delete'], help="The action to perform: 'create' or 'delete' API keys.") parser.add_argument("--email", help="Specify a single email address to process. Required for 'delete'. If not provided for 'create', emails will be read from emails.txt.") diff --git a/gemini_key_manager/utils.py b/gemini_key_manager/utils.py index a9062b6..49415cc 100644 --- a/gemini_key_manager/utils.py +++ b/gemini_key_manager/utils.py @@ -11,7 +11,11 @@ from colorama import Fore, Style, init from . import config class ColoredFormatter(logging.Formatter): - """A custom logging formatter that adds color to console output.""" + """Adds ANSI color coding to log output based on severity. + + Attributes: + LOG_COLORS (dict): Maps log levels to color codes + """ LOG_COLORS = { logging.DEBUG: Fore.CYAN, @@ -36,7 +40,13 @@ class ColoredFormatter(logging.Formatter): return message def setup_logging(): - """Sets up logging to both console and a file, with colors for the console.""" + """Configures dual logging to file and colorized console output. + + Creates: + - Rotating file handler with full debug details + - Stream handler with color-coded brief format + Ensures proper directory structure for log files + """ init(autoreset=True) # Initialize Colorama if not os.path.exists(config.LOG_DIR):