# ga4.py ''' 퍼스트가든 구글 애널리틱스 API를 활용해 관련 데이터를 DB에 저장함 병렬 처리를 통해 처리 속도 향상 (내부 병렬은 유지하되 에러/재시도 보강) ''' import sys, os sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) import yaml import pprint import traceback from datetime import datetime, timedelta from dateutil.parser import parse from google.analytics.data import BetaAnalyticsDataClient from google.analytics.data_v1beta.types import DateRange, Dimension, Metric, RunReportRequest from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.exc import IntegrityError from sqlalchemy import select, func from concurrent.futures import ThreadPoolExecutor, as_completed from conf import db, db_schema CONFIG_PATH = os.path.join(os.path.dirname(__file__), "../conf/config.yaml") # ------------------------ # 설정 파일 로드 # ------------------------ def load_config(): with open(CONFIG_PATH, encoding="utf-8") as f: cfg = yaml.safe_load(f) sa_file = cfg.get('ga4', {}).get('service_account_file') if sa_file and not os.path.isabs(sa_file): base_dir = os.path.dirname(CONFIG_PATH) cfg['ga4']['service_account_file'] = os.path.abspath(os.path.join(base_dir, sa_file)) return cfg # ------------------------ # GA4 클라이언트 초기화 # ------------------------ def init_ga_client(service_account_file): try: os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = service_account_file print(f"[INFO] GA4 클라이언트 초기화 - 인증파일: {service_account_file}") return BetaAnalyticsDataClient() except Exception as e: print(f"[ERROR] GA4 클라이언트 초기화 실패: {e}") traceback.print_exc() raise # ------------------------ # config.yaml에 최대 rows 저장 # ------------------------ def update_config_file_with_max_rows(max_rows): try: with open(CONFIG_PATH, encoding="utf-8") as f: config = yaml.safe_load(f) if "ga4" not in config: config["ga4"] = {} config["ga4"]["max_rows_per_request"] = int(max_rows) with open(CONFIG_PATH, "w", encoding="utf-8") as f: yaml.dump(config, f, allow_unicode=True) print(f"[INFO] config.yaml에 max_rows_per_request = {max_rows} 저장 완료") except Exception as e: print(f"[WARN] config.yaml 업데이트 실패: {e}") # ------------------------ # GA4 API로 최대 rows 감지 # ------------------------ def detect_max_rows_supported(client, property_id): try: request = RunReportRequest( property=f"properties/{property_id}", dimensions=[Dimension(name="date")], metrics=[Metric(name="sessions")], date_ranges=[DateRange(start_date="2024-01-01", end_date="2024-12-31")], limit=100000 ) response = client.run_report(request) nrows = len(response.rows) print(f"[INFO] 최대 rows 감지: {nrows} rows 수신됨.") return nrows except Exception as e: print(f"[WARNING] 최대 rows 감지 실패: {e}") traceback.print_exc() # 안전한 기본값 반환 return 10000 # ------------------------ # GA4 데이터 요청 # ------------------------ def fetch_report(client, property_id, start_date, end_date, dimensions, metrics, limit=10000): print(f"[INFO] fetch_report 호출 - 기간: {start_date} ~ {end_date}, dims={dimensions}, metrics={metrics}") try: request = RunReportRequest( property=f"properties/{property_id}", dimensions=[Dimension(name=d) for d in dimensions], metrics=[Metric(name=m) for m in metrics], date_ranges=[DateRange(start_date=start_date, end_date=end_date)], limit=limit, ) response = client.run_report(request) print(f"[INFO] GA4 리포트 응답 받음: {len(response.rows)} rows") return response except Exception as e: print(f"[ERROR] GA4 fetch_report 실패: {e}") traceback.print_exc() # 빈 응답 형태 반환하는 대신 None 반환해서 호출부가 처리하도록 함 return None # ------------------------ # 응답 데이터를 DB에 저장 # ------------------------ def save_report_to_db(engine, table, response, dimension_names, metric_names, debug=False): if response is None: print("[INFO] 저장할 응답 없음 (None)") return with engine.begin() as conn: for row in response.rows: dims = row.dimension_values mets = row.metric_values data = {} for i, dim_name in enumerate(dimension_names): try: val = dims[i].value if dim_name == "date": if len(val) == 8: val = datetime.strptime(val, "%Y%m%d").date() else: val = parse(val).date() data[dim_name] = val except IndexError: print(f"[WARNING] dimension index {i} 초과: dims={dims}") for i, met_name in enumerate(metric_names): try: data[met_name] = int(mets[i].value) except (IndexError, ValueError): print(f"[WARNING] metric 처리 오류: {met_name}") data[met_name] = None stmt = mysql_insert(table).values(**data) stmt = stmt.on_duplicate_key_update(**data) if debug: print(f"[DEBUG] 저장할 데이터:") pprint.pprint(data, indent=2, width=80) print(f"[DEBUG] 실행할 쿼리:") print(stmt) print("-" * 60) else: try: conn.execute(stmt) except IntegrityError as e: print(f"[DB ERROR] 중복 오류 또는 기타: {e}") except Exception as e: print(f"[DB ERROR] 저장 실패: {e}") traceback.print_exc() # ------------------------ # 테이블에서 마지막 날짜 조회 # ------------------------ def get_latest_date_from_db(engine, table): with engine.connect() as conn: stmt = select(func.max(table.c.date)) result = conn.execute(stmt).scalar() print(f"[INFO] DB 기준 마지막 저장 날짜: {result}") return result # ------------------------ # 요청 범위를 최대 rows 기준으로 나눔 # ------------------------ def date_range_chunks(start_date, end_date, max_rows_per_request, avg_rows_per_day=500): chunk_days = max(1, max_rows_per_request // avg_rows_per_day) current_start = start_date while current_start <= end_date: current_end = min(current_start + timedelta(days=chunk_days - 1), end_date) yield current_start, current_end current_start = current_end + timedelta(days=1) # ------------------------ # 날짜 범위 결정 로직 # ------------------------ def determine_date_range(table, config_start, config_end, force_update, engine): yesterday = datetime.now().date() - timedelta(days=1) actual_end = min(yesterday, config_end) latest_db_date = get_latest_date_from_db(engine, table) if force_update: actual_start = config_start else: if latest_db_date is not None: actual_start = latest_db_date + timedelta(days=1) else: actual_start = config_start if actual_start > actual_end: print(f"[WARN] 시작일({actual_start})이 종료일({actual_end})보다 뒤에 있습니다. 날짜를 교환하여 수집을 계속합니다.") actual_start, actual_end = actual_end, actual_start print(f"[INFO] 수집 날짜 범위 결정: {actual_start} ~ {actual_end}") return actual_start, actual_end # ------------------------ # 단일 테이블 단위 데이터 처리 # ------------------------ def process_dimension_metric(engine, client, property_id, config, table, dims, mets, max_rows, debug=False, force_update=False): config_start = parse(config.get("startDt", "2023-01-01")).date() config_end = parse(config.get("endDt", datetime.now().strftime("%Y-%m-%d")).strip()).date() actual_start, actual_end = determine_date_range(table, config_start, config_end, force_update, engine) if actual_start is None or actual_end is None: print(f"[INFO] 모든 데이터가 이미 수집되었거나 수집 범위가 없습니다.") return for start_dt, end_dt in date_range_chunks(actual_start, actual_end, max_rows): start_str = start_dt.strftime('%Y-%m-%d') end_str = end_dt.strftime('%Y-%m-%d') print(f"[INFO] GA4 데이터 조회: {start_str} ~ {end_str}") response = fetch_report(client, property_id, start_str, end_str, dimensions=dims, metrics=mets, limit=max_rows) if response and len(response.rows) > 0: save_report_to_db(engine, table, response, dimension_names=dims, metric_names=mets, debug=debug) else: print(f"[INFO] 해당 기간 {start_str} ~ {end_str} 데이터 없음 또는 요청 실패") # ------------------------ # 메인 진입점 (병렬 처리 포함) # ------------------------ def main(): print("[INFO] GA4 수집 프로그램 시작") config = load_config() ga4_cfg = config.get('ga4', {}) service_account_file = ga4_cfg.get('service_account_file') property_id = ga4_cfg.get('property_id') debug = config.get('debug', False) force_update = config.get('force_update', False) max_workers = config.get('max_workers', 4) if not service_account_file or not property_id: print("[ERROR] config.yaml에 'ga4.service_account_file'과 'ga4.property_id'를 반드시 설정하세요.") return engine = db.engine try: client = init_ga_client(service_account_file) except Exception: print("[ERROR] GA4 클라이언트 초기화 실패로 종료합니다.") return max_rows = ga4_cfg.get("max_rows_per_request") if not max_rows: max_rows = detect_max_rows_supported(client, property_id) try: update_config_file_with_max_rows(max_rows) except Exception: pass print(f"[INFO] 설정된 max_rows_per_request = {max_rows}") tasks = [ (db_schema.ga4_by_date, ["date"], ["activeUsers", "screenPageViews", "sessions"]), (db_schema.ga4_by_source, ["date", "sessionSource"], ["sessions"]), (db_schema.ga4_by_medium, ["date", "sessionMedium"], ["sessions"]), (db_schema.ga4_by_device, ["date", "deviceCategory"], ["activeUsers"]), (db_schema.ga4_by_country, ["date", "country"], ["activeUsers"]), (db_schema.ga4_by_city, ["date", "city"], ["activeUsers"]) ] with ThreadPoolExecutor(max_workers=max_workers) as executor: futures = [ executor.submit(process_dimension_metric, engine, client, property_id, ga4_cfg, table, dims, mets, max_rows, debug, force_update) for table, dims, mets in tasks ] for i, future in enumerate(as_completed(futures)): try: future.result() print(f"[INFO] 태스크 {i} 완료") except Exception as e: print(f"[ERROR] 태스크 {i} 실패: {e}") traceback.print_exc() print("[INFO] GA4 데이터 수집 및 저장 완료") if __name__ == '__main__': main()