Я работаю на Reddit Crawler и получил следующую ошибку.Я новичок в этой теме и ищу предложения по преодолению ошибки
import praw
import json
reddit = praw.Reddit(client_id= client_id, \
client_secret= client_secret, \
user_agent= user1, \
username= username, \
password= password)
def prawSubreddit(subName, lm):
print("Collecting from /r/{}...".format(subName))
subreddit = reddit.subreddit(subName)
submissions = subreddit.top(limit=lm)
redditData = []
for submission in submissions:
keys = ['Title', 'Txt', 'Author']
func = [submission.title, submission.selftext, submission.author]
redditData.append(dict(zip(keys,func)))
print("Finished Collecting.")
writeOutput("{}.txt".format(subName),redditData)
def writeOutput(fileName, data):
`outputFile = open(fileName, "w")
outputFile.write(json.dumps(data, sort_keys = True))
if __name__ == '__main__':
prawSubreddit('opiates', 5)