Google Places API: Local B2B Prospecting

Google Places API: Local B2B Prospecting

Why Google Places API?

Google Maps indexes millions of businesses along with their contact details. The Google Places API lets you extract this data in a structured way to build a qualified B2B prospect database.

Typical use cases:

  • Find all restaurants in a city to sell them a POS system
  • List real estate agencies in a region to offer a photography service
  • Identify hair salons to offer them an online booking tool

API Setup

1. Create a Google Cloud project

  1. Go to Google Cloud Console
  2. Create a new project
  3. Enable the Places API (New)
  4. Create an API key in APIs & Services > Credentials

2. Secure your key

# Never commit your key in your code!
export GOOGLE_PLACES_API_KEY="your-key-here"

Recommended restrictions:

  • Restrict by API (Places API only)
  • Restrict by IP or referrer in production
  • Set a daily quota to avoid budget overruns

3. Pricing

Request Cost (per call)
Text Search $0.032
Nearby Search $0.032
Place Details $0.017
Autocomplete $0.00283

Google provides $200 in free monthly credits, which allows approximately 6,000 Text Search queries per month.

Searching for businesses by type and location

Text Search (recommended)

import requests
import json

API_KEY = "YOUR_API_KEY"
BASE_URL = "https://places.googleapis.com/v1/places:searchText"

def search_businesses(query: str, latitude: float, longitude: float, radius_m: int = 5000) -> list[dict]:
    """
    Search for businesses via Google Places Text Search.
    
    Args:
        query: e.g. "Italian restaurants"
        latitude: latitude of the search center
        longitude: longitude of the search center
        radius_m: search radius in meters (max 50000)
    """
    headers = {
        "Content-Type": "application/json",
        "X-Goog-Api-Key": API_KEY,
        "X-Goog-FieldMask": "places.displayName,places.formattedAddress,places.nationalPhoneNumber,places.websiteUri,places.googleMapsUri,places.rating,places.userRatingCount,places.businessStatus"
    }
    
    payload = {
        "textQuery": query,
        "locationBias": {
            "circle": {
                "center": {"latitude": latitude, "longitude": longitude},
                "radius": radius_m
            }
        },
        "languageCode": "en"
    }
    
    response = requests.post(BASE_URL, headers=headers, json=payload)
    data = response.json()
    
    return data.get("places", [])

# Example: restaurants in Lyon
results = search_businesses("restaurants", 45.7640, 4.8357, 10000)

for place in results:
    name = place.get("displayName", {}).get("text", "N/A")
    address = place.get("formattedAddress", "N/A")
    phone = place.get("nationalPhoneNumber", "N/A")
    website = place.get("websiteUri", "N/A")
    rating = place.get("rating", "N/A")
    
    print(f"{name} | {address} | {phone} | {website} | Rating: {rating}")

Pagination with nextPageToken

The API returns 20 results per page. To get more:

def search_all_businesses(query: str, lat: float, lng: float, radius_m: int = 5000, max_pages: int = 3) -> list[dict]:
    """Paginated search to retrieve up to 60 results."""
    all_results = []
    page_token = None
    
    for page in range(max_pages):
        headers = {
            "Content-Type": "application/json",
            "X-Goog-Api-Key": API_KEY,
            "X-Goog-FieldMask": "places.displayName,places.formattedAddress,places.nationalPhoneNumber,places.websiteUri,places.rating,nextPageToken"
        }
        
        payload = {
            "textQuery": query,
            "locationBias": {
                "circle": {
                    "center": {"latitude": lat, "longitude": lng},
                    "radius": radius_m
                }
            },
            "languageCode": "en",
            "pageSize": 20
        }
        
        if page_token:
            payload["pageToken"] = page_token
        
        response = requests.post(BASE_URL, headers=headers, json=payload)
        data = response.json()
        
        results = data.get("places", [])
        all_results.extend(results)
        
        page_token = data.get("nextPageToken")
        if not page_token:
            break
    
    return all_results

Data enrichment

Extract business details

def get_details(place_id: str) -> dict:
    """Get full details for a place."""
    url = f"https://places.googleapis.com/v1/places/{place_id}"
    
    headers = {
        "X-Goog-Api-Key": API_KEY,
        "X-Goog-FieldMask": "displayName,formattedAddress,nationalPhoneNumber,internationalPhoneNumber,websiteUri,googleMapsUri,rating,userRatingCount,businessStatus,types,regularOpeningHours"
    }
    
    response = requests.get(url, headers=headers)
    return response.json()

Finding emails from websites

Google Places does not provide email addresses. You need to extract them from the business website:

import re
from bs4 import BeautifulSoup

def extract_email_from_site(url: str) -> str | None:
    """Attempt to extract an email from a website."""
    try:
        response = requests.get(url, timeout=10, headers={
            "User-Agent": "Mozilla/5.0"
        })
        
        # Search in the HTML
        emails = re.findall(
            r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}',
            response.text
        )
        
        # Filter out common false positives
        valid_emails = [
            e for e in emails
            if not e.endswith(('.png', '.jpg', '.gif', '.svg'))
            and 'example' not in e
            and 'wixpress' not in e
        ]
        
        return valid_emails[0] if valid_emails else None
        
    except Exception:
        return None

Export to CSV

import csv

def export_csv(businesses: list[dict], filename: str = "prospects.csv"):
    """Export results to CSV for import into a CRM or Lemlist."""
    fields = ["name", "address", "phone", "website", "email", "rating", "review_count"]
    
    with open(filename, "w", newline="", encoding="utf-8") as f:
        writer = csv.DictWriter(f, fieldnames=fields)
        writer.writeheader()
        
        for b in businesses:
            website = b.get("websiteUri", "")
            email = extract_email_from_site(website) if website else ""
            
            writer.writerow({
                "name": b.get("displayName", {}).get("text", ""),
                "address": b.get("formattedAddress", ""),
                "phone": b.get("nationalPhoneNumber", ""),
                "website": website,
                "email": email,
                "rating": b.get("rating", ""),
                "review_count": b.get("userRatingCount", "")
            })
    
    print(f"✓ {len(businesses)} prospects exported to {filename}")

Multi-city scraping strategy

To cover a large geographic area, divide it into sub-zones:

CITIES_FRANCE = [
    {"name": "Paris", "lat": 48.8566, "lng": 2.3522},
    {"name": "Lyon", "lat": 45.7640, "lng": 4.8357},
    {"name": "Marseille", "lat": 43.2965, "lng": 5.3698},
    {"name": "Toulouse", "lat": 43.6047, "lng": 1.4442},
    {"name": "Bordeaux", "lat": 44.8378, "lng": -0.5792},
    {"name": "Lille", "lat": 50.6292, "lng": 3.0573},
    {"name": "Nantes", "lat": 47.2184, "lng": -1.5536},
    {"name": "Strasbourg", "lat": 48.5734, "lng": 7.7521},
]

import time

def scrape_multi_city(query: str, cities: list[dict]) -> list[dict]:
    """Scrape businesses across multiple cities."""
    all_prospects = []
    
    for city in cities:
        print(f"Searching in {city['name']}...")
        results = search_all_businesses(
            query, city["lat"], city["lng"], radius_m=15000
        )
        all_prospects.extend(results)
        time.sleep(1)  # Respect rate limits
    
    # Deduplicate by name + address
    seen = set()
    unique = []
    for p in all_prospects:
        key = (
            p.get("displayName", {}).get("text", ""),
            p.get("formattedAddress", "")
        )
        if key not in seen:
            seen.add(key)
            unique.append(p)
    
    print(f"Total: {len(unique)} unique prospects across {len(cities)} cities")
    return unique

Best practices

  1. Respect quotas: monitor your usage in Google Cloud Console
  2. Cache results: store results in a database to avoid redundant API calls
  3. Respect the Terms of Service: Google prohibits mass storage of Places data without displaying a map
  4. Qualify your prospects: filter by rating, number of reviews, and whether they have a website
  5. Rate limiting: add delays between requests (1-2 seconds)