concurrency

This commit is contained in:
2025-08-20 17:55:51 +05:30
parent b298f4a221
commit e917de2718
2 changed files with 47 additions and 19 deletions

View File

@@ -73,6 +73,12 @@ To create Gemini API keys for all users listed in `emails.txt`:
uv run main.py create uv run main.py create
``` ```
You can control the number of projects processed concurrently using the `--max-workers` flag (defaults to 5):
```bash
uv run main.py create --max-workers 10
```
The first time you run this for a particular user, a browser window will open, and you will be prompted to log in and grant permission. After successful authentication, an access token will be saved in the `credentials/` directory, and subsequent runs will not require manual intervention (unless the token expires or is revoked). The first time you run this for a particular user, a browser window will open, and you will be prompted to log in and grant permission. After successful authentication, an access token will be saved in the `credentials/` directory, and subsequent runs will not require manual intervention (unless the token expires or is revoked).
### Deleting API Keys ### Deleting API Keys

60
main.py
View File

@@ -3,6 +3,8 @@ import sys
import argparse import argparse
import json import json
import logging import logging
import threading
import concurrent.futures
from datetime import datetime, timezone from datetime import datetime, timezone
import jsonschema import jsonschema
import google.auth import google.auth
@@ -190,7 +192,30 @@ def main():
if not args.dry_run: if not args.dry_run:
save_keys_to_json(api_keys_data, API_KEYS_DATABASE_FILE, schema) save_keys_to_json(api_keys_data, API_KEYS_DATABASE_FILE, schema)
def process_account(email, action, api_keys_data, dry_run=False): def process_project_for_action(project, creds, action, dry_run, db_lock, account_entry):
"""Processes a single project for the given action in a thread-safe manner."""
project_id = project.project_id
logging.info(f"- Starting to process project: {project_id} ({project.display_name})")
if action == 'create':
if project_has_gemini_key(project_id, creds):
logging.info(f" 'Gemini API Key' already exists in project {project_id}. Skipping creation.")
return
if enable_api(project_id, creds, dry_run=dry_run):
key_object = create_api_key(project_id, creds, dry_run=dry_run)
if key_object:
with db_lock:
add_key_to_database(account_entry, project, key_object)
elif action == 'delete':
deleted_keys_uids = delete_api_keys(project_id, creds, dry_run=dry_run)
if deleted_keys_uids:
with db_lock:
remove_keys_from_database(account_entry, project_id, deleted_keys_uids)
logging.info(f"- Finished processing project: {project_id}")
def process_account(email, action, api_keys_data, dry_run=False, max_workers=5):
"""Processes a single account for the given action.""" """Processes a single account for the given action."""
logging.info(f"--- Processing account: {email} for action: {action} ---") logging.info(f"--- Processing account: {email} for action: {action} ---")
if dry_run: if dry_run:
@@ -223,24 +248,21 @@ def process_account(email, action, api_keys_data, dry_run=False):
logging.info(f"No projects found for {email}.") logging.info(f"No projects found for {email}.")
return return
logging.info(f"Found {len(projects)} projects. Processing...") logging.info(f"Found {len(projects)} projects. Processing with up to {max_workers} workers...")
for project in projects:
project_id = project.project_id db_lock = threading.Lock()
logging.info(f"- Project: {project_id} ({project.display_name})")
if action == 'create': with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
if project_has_gemini_key(project_id, creds): future_to_project = {
logging.info(" 'Gemini API Key' already exists in this project. Skipping creation.") executor.submit(process_project_for_action, project, creds, action, dry_run, db_lock, account_entry): project
continue for project in projects
}
if enable_api(project_id, creds, dry_run=dry_run): for future in concurrent.futures.as_completed(future_to_project):
key_object = create_api_key(project_id, creds, dry_run=dry_run) project = future_to_project[future]
if key_object: try:
add_key_to_database(account_entry, project, key_object) future.result()
elif action == 'delete': except Exception as exc:
deleted_keys_uids = delete_api_keys(project_id, creds, dry_run=dry_run) logging.error(f"Project {project.project_id} generated an exception: {exc}", exc_info=True)
if deleted_keys_uids:
remove_keys_from_database(account_entry, project_id, deleted_keys_uids)
except google_exceptions.PermissionDenied as err: except google_exceptions.PermissionDenied as err:
logging.error(f"Permission denied for account {email}. Check IAM roles.") logging.error(f"Permission denied for account {email}. Check IAM roles.")
@@ -315,7 +337,7 @@ def project_has_gemini_key(project_id, credentials):
parent = f"projects/{project_id}/locations/global" parent = f"projects/{project_id}/locations/global"
keys = api_keys_client.list_keys(parent=parent) keys = api_keys_client.list_keys(parent=parent)
for key in keys: for key in keys:
if key.display_name == "Gemini API Key": if key.display_name in ["Gemini API Key", "Generative Language API Key"]: # 2nd name if when api created using AI studio
return True return True
return False return False
except google_exceptions.GoogleAPICallError as err: except google_exceptions.GoogleAPICallError as err: