Competitive analysis is central to any organization in a competitive market space. Depending on that space, or the goals for the organization/team, you’ll look at different marketing signals communicated by the competition. For the current competitive analysis, I focused on our competitors Search Engine Rank Postion (SERP) and other SEO metrics to indicate how well a company is doing. The script below takes those signals into account and monitors them overtime through monthly comparisons and data collection.
This allowed our team to automate a standard practice on the team. I also was able to use a Google API service without incurring costs by limiting the API calls per month. We were able to get the most up to date data in fine detail for free through this method.
import datetime as dt
import json
import os
from serpapi import GoogleSearch
import pandas as pd
import gspread
from gspread_dataframe import set_with_dataframe
from oauth2client.service_account import ServiceAccountCredentials
# PLACES ----------------------------------------------------------------------
places = {
"LOC1" : "Google's_preferred_name_for_LOC1" ,
"LOC2" : "Google's_preferred_name_for_LOC2" ,
"LOC3" : "Google's_preferred_name_for_LOC3"
"LOC4" : "Google's_preferred_name_for_LOC4"
"LOC5" : "Google's_preferred_name_for_LOC5"
"LOC6" : "Google's_preferred_name_for_LOC6"
"LOC7" : "Google's_preferred_name_for_LOC7"
"LOC8" : "Google's_preferred_name_for_LOC8"
"LOC9" : "Google's_preferred_name_for_LOC9"
"LOC10" : "Google's_preferred_name_for_LOC10"
"LOC11" : "Google's_preferred_name_for_LOC11"
"LOC12" : "Google's_preferred_name_for_LOC12"
"LOC13" : "Google's_preferred_name_for_LOC13"
}
city = "austin_tx"
# SERP API --------------------------------------------------------------------
api_key = os . getenv ( "SERP_API_KEY" )
def local_places_scrape ( location , key = api_key ):
params = {
"api_key" : key ,
"engine" : "google_local" ,
"google_domain" : "google.com" ,
"q" : "KEYWORD_PHRASE_OF_INTEREST" ,
"hl" : "en" ,
"gl" : "us" ,
"location" : location ,
}
search = GoogleSearch ( params )
results = search . get_dict ()
return results
def parse_lp_results ( local_places ):
lp_list = []
dict_lp = {}
for entry in local_places [ "local_results" ]:
if entry [ "type" ] != "Home inspector" :
continue
else :
dict_lp [ "Company" ] = entry . get ( "title" , None )
if entry . get ( "links" , None ):
dict_lp [ "URL" ] = entry [ "links" ]. get ( "website" , None )
else :
dict_lp [ "Link" ] = None
dict_lp [ "National Franchise" ] = None # added externally
dict_lp [ "Phone Number" ] = entry . get ( "phone" , None )
dict_lp [ "Google Review Count" ] = entry . get ( "reviews" , None )
dict_lp [ "SERP Rank" ] = entry . get ( "position" , None )
dict_lp [ "Years in Business" ] = entry . get ( "years_in_business" , None )
dict_lp [ "Google Review Rating" ] = entry . get ( "rating" , None )
# add the dictionary to the list
lp_list . append ( dict_lp )
# clear the dictionary
dict_lp = {}
df = pd . DataFrame ( lp_list )
# sort the values in df by descending review count
df_rank = df . sort_values ( by = [ "Google Review Count" ], ascending = False )
df_top = df_rank . head ( 10 ). reset_index ( drop = True )
return df_top
lp_data = local_places_scrape ( location = places [ city ])
df_lp = parse_lp_results ( local_places = lp_data )
df_lp . to_csv ( f "data/_clean/comp_analysis_ { city } _ { dt . date . today () } .csv" )