Domain Activity API v2

Python Command Line Tool

A simple ready-to-use Python script for querying the Domain Activity feed from your terminal. Useful for quick lookups, scheduled tasks, and testing your API credentials before building a full integration.

Requires Python 3 with the requests and DateTime packages. Install them with pip install requests DateTime. Python 3 on Windows can be installed via the Microsoft Store.

Setup

Download dnfeed-app.py and open it in a text editor. Replace the two placeholder values with your API credentials:

apikey    = "paste_your_api_key_here"
apisecret = "paste_your_api_secret_here"

Save the file and open a terminal. Change to the folder where you saved it.

Usage Examples

Added domains containing keywords — last 10 days

python dnfeed-app.py -d 10 -kw "%hdmi%,%interface%"

Deleted domains with the same keywords

python dnfeed-app.py -s deleted -d 10 -kw "%hdmi%,%interface%"

Domain names only, no dates

python dnfeed-app.py -d 10 -kw "%hdmi%,%interface%" -od

Filter by TLD

python dnfeed-app.py -d 5 -kw "%apple%" -tlds com,net,org

Download previous day's added domains ZIP

python dnfeed-app.py -zip

Download previous day's deleted domains ZIP

python dnfeed-app.py -zip -s deleted

Download ZIP for a specific date

python dnfeed-app.py -zip -zd 20240101
python dnfeed-app.py -zip -zd 20240101 -s deleted -o deleted-jan1.zip

Download current day's ZIP (domains processed so far)

python dnfeed-app.py -zip -latest
python dnfeed-app.py -zip -latest -s deleted

Show all available options

python dnfeed-app.py -h

All Options

Domain query options

-d days

Number of preceding days to retrieve data for. Between 0 and 14. Default 1.

-kw keywords

Keyword filter. Separate multiple keywords with a comma. Use % as a wildcard. Enclose in double quotes when using %.

-tlds tlds

Filter by TLD. Separate multiple TLDs with a comma or |. Example: com,net,org

-s source

Data source. Use added or deleted. Default added. Applies to both query and ZIP download modes.

-od

Print domain names only, without dates.

-raw

Print raw JSON output. Overrides -od.

-v

Show additional information about the query being run.

ZIP download options

-zip

Switch to ZIP download mode. Downloads the full daily domain list as a ZIP file instead of querying. Use with -s to choose added or deleted.

-zd date

Date for the ZIP download in yyyymmdd format. Omit for the previous day's data. Cannot be used together with -latest.

-latest

Download a ZIP for the current day containing domains processed so far. Only valid without -zd.

-o filename

Output filename for the downloaded ZIP. Default is dnfeed-<source>-<date>.zip.

The Code

Download dnfeed-app.py

import requests
import sys
import json
import datetime

# Provide your keys
apikey    = "paste_your_api_key_here"
apisecret = "paste_your_api_secret_here"

apiurl = "https://api.codepunch.com/dnfeed/v2/"


def get_api_token():
    AUTHURLFMT = "{}auth/{}/{}"
    url = AUTHURLFMT.format(apiurl, apikey, apisecret)
    response = requests.get(url)
    if response.status_code == 200:
        apiresponse = response.json()
    else:
        raise Exception('Authentication: {}'.format(response.status_code))
    if bool(apiresponse['status']) != True:
        raise Exception(apiresponse['error'])
    return apiresponse['token']


def get_api_data(token, command, parameters):
    APIURLFMT = "{}{}/{}/"
    url = APIURLFMT.format(apiurl, token, command)
    response = requests.get(url, params=parameters)
    if response.status_code != 200:
        raise Exception('Invalid response code: {}'.format(response.status_code))
    apiresponse = response.json()
    if bool(apiresponse['status']) != True:
        raise Exception(apiresponse['error'])
    return apiresponse


def get_api_zip_file(token, command, parameters, filename):
    APIURLFMT = "{}{}/{}/"
    url = APIURLFMT.format(apiurl, token, command)
    response = requests.get(url, params=parameters, stream=True)
    totalbits = 0
    if response.status_code == 200:
        with open(filename, 'wb') as f:
            for chunk in response.iter_content(chunk_size=1024):
                if chunk:
                    totalbits += 1024
                    f.write(chunk)
        print("Downloaded {} KB to {}".format(round(totalbits / 1024), filename))
    else:
        raise Exception('Invalid response code: {}'.format(response.status_code))


try:
    idx               = 0
    source_name       = 'added'
    keywords          = "%apple%|%paypal%"
    number_of_days    = 1
    only_domain_names = False
    show_information  = True
    tlds_to_get       = ''
    show_raw_data     = False
    download_zip      = False
    zip_date          = None
    zip_latest        = False
    zip_output_file   = None

    for i in sys.argv:
        if   i == "-h":
            print("\ndnfeed-app.py [options]\n")
            print("Domain query options:")
            print("  -d days      Number of preceding days (0-14)")
            print("  -kw keywords Keywords. Separate with comma. Use % for wildcard.")
            print("  -tlds tlds   TLDs to filter. Separate with comma or |")
            print("  -s source    added or deleted. Default: added")
            print("  -od          Domain names only")
            print("  -raw         Raw JSON output")
            print("  -v           Show query information")
            print("\nZIP download options:")
            print("  -zip         Download daily ZIP instead of querying")
            print("  -zd date     Date in yyyymmdd format. Omit for previous day.")
            print("  -latest      Current day ZIP (processed so far). Cannot use with -zd.")
            print("  -o filename  Output filename. Default: dnfeed-<source>-<date>.zip")
            sys.exit()
        elif i == "-d":      idx += 1; number_of_days    = min(int(sys.argv[idx]), 14)
        elif i == "-kw":     idx += 1; keywords          = sys.argv[idx].replace(",", "|")
        elif i == "-tlds":   idx += 1; tlds_to_get        = sys.argv[idx]
        elif i == "-s":      idx += 1; source_name        = sys.argv[idx]
        elif i == "-od":     only_domain_names  = True;  idx += 1
        elif i == "-raw":    show_raw_data      = True;  idx += 1
        elif i == "-v":      show_information   = True;  idx += 1
        elif i == "-zip":    download_zip       = True;  idx += 1
        elif i == "-zd":     idx += 1; zip_date           = sys.argv[idx]
        elif i == "-latest": zip_latest         = True;  idx += 1
        elif i == "-o":      idx += 1; zip_output_file    = sys.argv[idx]
        else:               idx += 1

    if source_name not in ('added', 'deleted'):
        source_name = 'added'

    token = get_api_token()

    # ── ZIP download mode ──────────────────────────────────────────────────
    if download_zip:
        if zip_latest and zip_date:
            raise Exception("Use either -zd or -latest, not both.")
        params = {"source": source_name}
        if zip_date:
            params["date"] = zip_date;  label = zip_date
        elif zip_latest:
            params["latest"] = "";     label = "latest"
        else:
            label = "prev"
        if zip_output_file is None:
            zip_output_file = "dnfeed-{}-{}.zip".format(source_name, label)
        print("Downloading {} {} ZIP to {}...".format(source_name, label, zip_output_file))
        get_api_zip_file(token, "dailyzip", params, zip_output_file)

    # ── Domain query mode ──────────────────────────────────────────────────
    else:
        today_date = datetime.datetime.now()
        olddate    = today_date - datetime.timedelta(days=number_of_days)
        datecode   = olddate.strftime("%Y%m%d")
        if show_information:
            print('\nGetting domain data after {} for keywords \'{}\' from {} list.\n'.format(
                olddate.strftime("%Y-%m-%d"), keywords, source_name))
        parameters = {"date": datecode, "limit": 5000, "kw": keywords,
                      "dcm": 'gte', "tlds": tlds_to_get, "format": "json"}
        thedata    = get_api_data(token, source_name, parameters)
        if show_raw_data:
            print(json.dumps(thedata['data'], indent=2))
        else:
            for d in thedata['data']:
                print(d['domain'] if only_domain_names else d['domain'] + ', ' + d['date'])

except Exception as e:
    print(repr(e))

Need help with the API? See the full API documentation or contact us with your subscription details.