-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathload_ga_dims.py
executable file
·85 lines (68 loc) · 2.35 KB
/
load_ga_dims.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
#!/usr/bin/python
# Load Google Analytics dimensions based on TSV file
import lyf, logging
import argparse
import csv
import os
from lyf import psql
from datetime import date, timedelta, datetime # Date time
from dateutil.parser import parse # Date parser
parser = argparse.ArgumentParser(description="Extract Google Analytics Dimensions")
parser.add_argument("-f", "--full", action='store_true', default=False, help="Specifies full mode for extract as opposed to incremental.")
args = parser.parse_args()
FULL_MODE = args.full
def main():
file = os.path.join(lyf.SCRIPT_DIR, lyf.get_config('ETL', 'GA_Dims'))
# Read TSV file, looping through dimensions
i = 0
if FULL_MODE:
try:
db = psql.DB()
# Reload country table
countries_file = os.path.join(lyf.SCRIPT_DIR, 'data', 'countries.csv')
db.truncate('d_country')
db.reset_seq('d_country', 'country_id')
db.load_csv('d_country', countries_file)
db.close()
logging.info('Reloaded d_country table.')
except Exception as err:
logging.error(err)
with open(file, 'r') as f:
f = csv.reader(f, delimiter='\t')
for row in f:
if (i > 0):
if (len(row) > 0):
table = row[0]
ga_dims = row[1].split(',')
columns = row[2].split(',')
keys = row[3].split(',')
psql.load_ga_dim(FULL_MODE, table, ga_dims, columns, keys)
i += 1
# Post load processing
try:
db = psql.DB()
# Update the page table to apply any information about blog authors that can be found
end_date = date.today().strftime('%Y-%m-%d')
if FULL_MODE:
start_date = lyf.get_config('ETL', 'Extract_Date')
else:
start_date = end_date
service = lyf.google_api('analytics', 'v3', ['https://www.googleapis.com/auth/analytics.readonly'])
metrics = 'ga:sessions'
dims = 'ga:pageTitle,ga:contentGroup1,ga:contentGroup2'
filters = 'ga:contentGroup1==Blog;ga:contentGroup2!=(not set)'
results = lyf.ga_query(service, start_date, end_date, metrics, dims, filters)
updated_pages = 0
for row in results:
rec = { 'page_type' : row[1], 'author' : row[2] }
filter_rec = { 'page_title' : row[0] }
status = db.update('d_ga_page', rec, filter_rec)
if status == 1:
updated_pages += 1
if updated_pages > 0:
logging.info('Updated %s page entries with Blog and Author info.' % updated_pages)
db.close()
except Exception as err:
logging.error(err)
if __name__ == '__main__':
main()