|
| 1 | +#!/usr/bin/env python3 |
| 2 | + |
| 3 | +""" |
| 4 | +Import CSV to Supabase |
| 5 | +
|
| 6 | +This script reads events.csv and syncs it to the Supabase database. |
| 7 | +It will create/update events in the database based on the CSV. |
| 8 | +
|
| 9 | +Usage: |
| 10 | + python3 import-csv.py # Import to production |
| 11 | + STAGING=true python3 import-csv.py # Import to staging |
| 12 | +""" |
| 13 | + |
| 14 | +import os |
| 15 | +import sys |
| 16 | +import csv |
| 17 | +import json |
| 18 | +from supabase import create_client, Client |
| 19 | +from dotenv import load_dotenv |
| 20 | + |
| 21 | +# Load .env file if not in GitHub Actions |
| 22 | +if not os.getenv('GITHUB_ACTIONS'): |
| 23 | + load_dotenv() |
| 24 | + |
| 25 | +supabase_url = os.getenv('SUPABASE_URL') |
| 26 | +supabase_service_key = os.getenv('SUPABASE_SERVICE_KEY') or os.getenv('SUPABASE_ANON_KEY') |
| 27 | + |
| 28 | +if not supabase_url or not supabase_service_key: |
| 29 | + print('❌ Missing required environment variables!') |
| 30 | + print('Please set SUPABASE_URL and SUPABASE_SERVICE_KEY in .env') |
| 31 | + sys.exit(1) |
| 32 | + |
| 33 | +is_staging = os.getenv('STAGING') == 'true' |
| 34 | +environment = 'staging' if is_staging else 'production' |
| 35 | +events_table = 'av_events_staging' if is_staging else 'av_events' |
| 36 | + |
| 37 | +print('\n' + '=' * 70) |
| 38 | +print(f'🌍 ENVIRONMENT: {environment.upper()}') |
| 39 | +print('=' * 70) |
| 40 | +print(f'📋 Events table: {events_table}\n') |
| 41 | + |
| 42 | +# Production safety check |
| 43 | +if not is_staging and not os.getenv('GITHUB_ACTIONS'): |
| 44 | + print('⚠️ WARNING: You are about to modify PRODUCTION data!') |
| 45 | + print('This will import events to the production database.') |
| 46 | + print('Make sure you have tested in STAGING first!\n') |
| 47 | + |
| 48 | + confirmation = input('Type "PRODUCTION" to confirm (or Ctrl+C to cancel): ') |
| 49 | + if confirmation.strip() != 'PRODUCTION': |
| 50 | + print('\n❌ Import cancelled - confirmation did not match') |
| 51 | + sys.exit(0) |
| 52 | + print('') |
| 53 | + |
| 54 | +supabase: Client = create_client(supabase_url, supabase_service_key) |
| 55 | + |
| 56 | +# Map CSV column names to database field names |
| 57 | +FIELD_MAPPING = { |
| 58 | + 'date': 'event_date', |
| 59 | + 'event_type': 'event_type', |
| 60 | + 'company': 'company', |
| 61 | + 'city': 'city', |
| 62 | + 'geometry_file': 'geometry_name', |
| 63 | + 'vehicles': 'vehicle_types', |
| 64 | + 'platform': 'platform', |
| 65 | + 'fares': 'fares', |
| 66 | + 'direct_booking': 'direct_booking', |
| 67 | + 'service_model': 'service_model', |
| 68 | + 'supervision': 'supervision', |
| 69 | + 'access': 'access', |
| 70 | + 'fleet_partner': 'fleet_partner', |
| 71 | + 'expected_launch': 'expected_launch', |
| 72 | + 'company_link': 'company_link', |
| 73 | + 'booking_platform_link': 'booking_platform_link', |
| 74 | + 'source_url': 'event_url', |
| 75 | + 'notes': 'notes' |
| 76 | +} |
| 77 | + |
| 78 | +def csv_row_to_event(row): |
| 79 | + company = row.get('company', '').strip() |
| 80 | + city = row.get('city', '').strip() |
| 81 | + date = row.get('date', '').strip() |
| 82 | + |
| 83 | + if not company or not city or not date: |
| 84 | + raise ValueError('Missing required fields: company, city, or date') |
| 85 | + |
| 86 | + # Create aggregate_id (unique identifier for the service) |
| 87 | + aggregate_id = f"{company.lower().replace(' ', '-')}-{city.lower().replace(' ', '-')}" |
| 88 | + |
| 89 | + # Build event_data object with all service attributes |
| 90 | + event_data = { |
| 91 | + 'name': city, # city becomes "name" in event_data |
| 92 | + 'company': company # company goes in event_data too |
| 93 | + } |
| 94 | + |
| 95 | + # Determine which fields to include in event_data based on event type |
| 96 | + event_type = row.get('event_type', '') |
| 97 | + is_update_event = event_type in [ |
| 98 | + 'fares_policy_changed', |
| 99 | + 'access_policy_changed', |
| 100 | + 'supervision_updated', |
| 101 | + 'platform_updated', |
| 102 | + 'vehicle_types_updated', |
| 103 | + 'fleet_partner_changed', |
| 104 | + 'service_model_updated', |
| 105 | + 'geometry_updated', |
| 106 | + 'direct_booking_updated' |
| 107 | + ] |
| 108 | + |
| 109 | + is_service_created = event_type == 'service_created' |
| 110 | + is_service_testing = event_type == 'service_testing' |
| 111 | + is_service_announced = event_type == 'service_announced' |
| 112 | + |
| 113 | + # Map all CSV fields to event_data |
| 114 | + for csv_key, value in row.items(): |
| 115 | + value = value.strip() if value else '' |
| 116 | + if not value: |
| 117 | + continue # Skip empty values |
| 118 | + |
| 119 | + db_key = FIELD_MAPPING.get(csv_key) |
| 120 | + if db_key and db_key not in ['event_date', 'event_type']: |
| 121 | + # Convert geometry_file to geometry_name format |
| 122 | + if csv_key == 'geometry_file': |
| 123 | + # Check if it's inline coordinates (lng,lat format) |
| 124 | + import re |
| 125 | + if re.match(r'^-?\d+\.?\d*,-?\d+\.?\d*$', value): |
| 126 | + # Store inline coordinates directly as geometry_name |
| 127 | + event_data['geometry_name'] = value |
| 128 | + else: |
| 129 | + # Remove .geojson extension from filename |
| 130 | + event_data['geometry_name'] = value.replace('.geojson', '') |
| 131 | + # For service_created, service_testing, service_announced include all fields |
| 132 | + # For update events, skip the field being updated (will be added as new_* below) |
| 133 | + # Always include company, city, notes, and source_url |
| 134 | + elif (is_service_created or |
| 135 | + is_service_testing or |
| 136 | + is_service_announced or |
| 137 | + csv_key in ['company', 'city', 'notes', 'source_url', 'expected_launch']): |
| 138 | + event_data[db_key] = value |
| 139 | + # For update events, only include fields that aren't being updated |
| 140 | + elif is_update_event: |
| 141 | + is_field_being_updated = ( |
| 142 | + (event_type == 'fares_policy_changed' and csv_key == 'fares') or |
| 143 | + (event_type == 'access_policy_changed' and csv_key == 'access') or |
| 144 | + (event_type == 'supervision_updated' and csv_key == 'supervision') or |
| 145 | + (event_type == 'platform_updated' and csv_key == 'platform') or |
| 146 | + (event_type == 'vehicle_types_updated' and csv_key == 'vehicles') or |
| 147 | + (event_type == 'fleet_partner_changed' and csv_key == 'fleet_partner') or |
| 148 | + (event_type == 'service_model_updated' and csv_key == 'service_model') or |
| 149 | + (event_type == 'direct_booking_updated' and csv_key == 'direct_booking') |
| 150 | + ) |
| 151 | + |
| 152 | + if not is_field_being_updated: |
| 153 | + event_data[db_key] = value |
| 154 | + |
| 155 | + # Add the new_* fields for update events |
| 156 | + if event_type == 'fares_policy_changed' and row.get('fares'): |
| 157 | + event_data['new_fares'] = row['fares'] |
| 158 | + elif event_type == 'access_policy_changed' and row.get('access'): |
| 159 | + event_data['new_access'] = row['access'] |
| 160 | + elif event_type == 'supervision_updated' and row.get('supervision'): |
| 161 | + event_data['new_supervision'] = row['supervision'] |
| 162 | + elif event_type == 'platform_updated' and row.get('platform'): |
| 163 | + event_data['new_platform'] = row['platform'] |
| 164 | + elif event_type == 'vehicle_types_updated' and row.get('vehicles'): |
| 165 | + event_data['new_vehicle_types'] = row['vehicles'] |
| 166 | + elif event_type == 'fleet_partner_changed' and row.get('fleet_partner'): |
| 167 | + event_data['new_fleet_partner'] = row['fleet_partner'] |
| 168 | + elif event_type == 'service_model_updated' and row.get('service_model'): |
| 169 | + event_data['new_service_model'] = row['service_model'] |
| 170 | + elif event_type == 'direct_booking_updated' and row.get('direct_booking'): |
| 171 | + event_data['new_direct_booking'] = row['direct_booking'] |
| 172 | + |
| 173 | + return { |
| 174 | + 'aggregate_id': aggregate_id, |
| 175 | + 'aggregate_type': 'service_area', |
| 176 | + 'event_date': date, |
| 177 | + 'event_type': event_type, |
| 178 | + 'event_data': event_data |
| 179 | + } |
| 180 | + |
| 181 | +def import_csv(): |
| 182 | + print('📖 Reading events.csv...') |
| 183 | + |
| 184 | + try: |
| 185 | + # Read CSV file |
| 186 | + records = [] |
| 187 | + with open('./events.csv', 'r', encoding='utf-8') as f: |
| 188 | + reader = csv.DictReader(f) |
| 189 | + records = list(reader) |
| 190 | + |
| 191 | + print(f' Found {len(records)} events in CSV\n') |
| 192 | + |
| 193 | + # Convert CSV rows to event objects |
| 194 | + print('🔄 Converting CSV to event format...') |
| 195 | + events = [] |
| 196 | + for index, row in enumerate(records): |
| 197 | + try: |
| 198 | + events.append(csv_row_to_event(row)) |
| 199 | + except Exception as error: |
| 200 | + print(f' ❌ Error on row {index + 2}: {error}') |
| 201 | + raise |
| 202 | + |
| 203 | + print(f' ✅ Converted {len(events)} events\n') |
| 204 | + |
| 205 | + # Clear existing events in the table |
| 206 | + print(f'🗑️ Clearing {events_table} table...') |
| 207 | + response = supabase.table(events_table).delete().neq('id', '00000000-0000-0000-0000-000000000000').execute() |
| 208 | + print(' ✅ Table cleared\n') |
| 209 | + |
| 210 | + # Insert events in batches |
| 211 | + print('📥 Importing events to database...') |
| 212 | + BATCH_SIZE = 50 |
| 213 | + imported = 0 |
| 214 | + |
| 215 | + for i in range(0, len(events), BATCH_SIZE): |
| 216 | + batch = events[i:i + BATCH_SIZE] |
| 217 | + response = supabase.table(events_table).insert(batch).execute() |
| 218 | + |
| 219 | + imported += len(batch) |
| 220 | + print(f' Progress: {imported}/{len(events)} events') |
| 221 | + |
| 222 | + print(f' ✅ Imported {imported} events\n') |
| 223 | + |
| 224 | + # Verify count |
| 225 | + print('🔍 Verifying import...') |
| 226 | + response = supabase.table(events_table).select('id', count='exact').execute() |
| 227 | + count = response.count |
| 228 | + |
| 229 | + print(f' CSV events: {len(events)}') |
| 230 | + print(f' Database events: {count}\n') |
| 231 | + |
| 232 | + if count == len(events): |
| 233 | + print('✅ Import complete! All events synced successfully.') |
| 234 | + print('\n📝 Next steps:') |
| 235 | + print(f' Run: {"STAGING=true " if is_staging else ""}python3 rebuild-cache.py') |
| 236 | + else: |
| 237 | + print('❌ Count mismatch! Import may have failed.') |
| 238 | + sys.exit(1) |
| 239 | + |
| 240 | + except Exception as error: |
| 241 | + print(f'❌ Import failed: {error}') |
| 242 | + sys.exit(1) |
| 243 | + |
| 244 | +if __name__ == '__main__': |
| 245 | + import_csv() |
0 commit comments