data science

Reddit User Info – Python

This one is just for me. No explanation. As is.

This script will let you download all the posts submitted by any Reddit user. Just put the user name in line 9.


import urllib2 as ul2
import json
import pandas as pd
from datetime import datetime

# user name
str_un = 'Any_user_name'

# page counter
int_counter = 1

# list page ids
list_pids = ['']

# this is the output list with all the data
list_output = []

# do the loop
while int_counter!=0:

    # raw url
    str_url = '' % str_un

    # if its not the first page
    if int_counter>=2:
        # complete url
        str_url = '%s?after=%s' % (str_url, list_pids[0])

    # request, connect, then read
    ul_req = ul2.Request(str_url, headers={'User-agent':'Mozilla/5.0'})
    str_json = ul2.urlopen(ul_req).read()

    js_d = json.loads(str_json)

    for js_post in js_d['data']['children']:

        # get the correct date format
        dt_uf = datetime.utcfromtimestamp(js_post['data']['created'])
        str_date = dt_uf.strftime('%Y%m%d')

        # all the data
        tup_out = (js_post['data']['id'],
        # append it 

    # tell me when page is done
    print 'page %i done' % int_counter

    # is there another page
    if js_d['data']['after']!=None:
        list_pids[0] = js_d['data']['after']

    # if not, then end the loop
    elif js_d['data']['after']==None:
        print 'done'

# turn output into data frame
df_d = pd.DataFrame(list_output)
df_d.columns = ['id','created','title','subreddit',

# save as csv
df_d.to_csv('%s.csv' % str_un, index=None)

Leave a Reply

Please log in using one of these methods to post your comment: Logo

You are commenting using your account. Log Out /  Change )

Google photo

You are commenting using your Google account. Log Out /  Change )

Twitter picture

You are commenting using your Twitter account. Log Out /  Change )

Facebook photo

You are commenting using your Facebook account. Log Out /  Change )

Connecting to %s