Remove unnecessary decoding.

This commit is contained in:
oddluck 2020-03-01 21:06:47 +00:00
parent fe6541cac0
commit f4cc96e610
20 changed files with 144 additions and 106 deletions

View File

@ -35,7 +35,7 @@ import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
import supybot.conf as conf
import requests
import json
class Azure(callbacks.Plugin):
@ -68,8 +68,7 @@ class Azure(callbacks.Plugin):
'text' : text
}]
response = requests.post(url, headers=headers, json=body)
response.raise_for_status()
result = response.json()
result = json.loads(response.content)
if result[0].get('detectedLanguage'):
reply = "{0} [{1}~>{2}]".format(result[0]['translations'][0]['text'], result[0]['detectedLanguage']['language'], target)
else:

View File

@ -33,7 +33,7 @@ import supybot.utils as utils
import supybot.plugins as plugins
import supybot.callbacks as callbacks
import re
import json
import requests
try:
@ -76,7 +76,7 @@ class BotLibre(callbacks.Plugin):
}
try:
r = requests.post(self.url, json=payload)
j = r.json()
j = json.loads(r.content)
response = j['message']
self.conversation[channel] = j['conversation']
if response:

View File

@ -31,7 +31,7 @@
import pendulum
import requests
import collections
import json
from supybot import utils, plugins, ircutils, callbacks, conf, schedule, ircmsgs
from supybot.commands import *
try:
@ -170,7 +170,8 @@ class CBB(callbacks.Plugin):
data = collections.OrderedDict()
for date in dates:
tmp = requests.get(SCOREBOARD.format(date=date)).json()
tmp = requests.get(SCOREBOARD.format(date=date), timeout=10)
tmp = json.loads(tmp.content)
tmp_date = pendulum.parse(tmp['eventsDate']['date'],
strict=False).in_tz('US/Eastern').format('YYYYMMDD')
data[tmp_date] = tmp['events']

View File

@ -37,6 +37,8 @@ import re
import os
from supybot import utils, plugins, ircutils, callbacks, schedule
from supybot.commands import *
import json
try:
from supybot.i18n import PluginInternationalization
_ = PluginInternationalization('CFB')
@ -69,7 +71,7 @@ class CFB(callbacks.Plugin):
if not self.abbrv:
self.abbrv = requests.get(
'https://raw.githubusercontent.com/diagonalfish/FootballBotX2/master/abbrv.json')
self.abbrv = self.abbrv.json()
self.abbrv = json.loads(self.abbrv.content)
@wrap
@ -118,7 +120,7 @@ class CFB(callbacks.Plugin):
try:
data = requests.get(url)
data = data.json()
data = json.loads(data.content)
except:
irc.reply('Error fetching rankings')
return
@ -224,7 +226,7 @@ class CFB(callbacks.Plugin):
url += '&week={}'.format(week)
games = requests.get(url)
games = games.json()
games = json.loads(games.content)
games = games['events']

View File

@ -36,6 +36,7 @@ import random
import datetime
import os
import requests
import json
try:
from supybot.i18n import PluginInternationalization
@ -59,7 +60,8 @@ class Cayenne(callbacks.Plugin):
"""
Get a random cat fact
"""
data = requests.get("https://catfact.ninja/fact").json()
data = requests.get("https://catfact.ninja/fact")
data = json.loads(data.content)
return data['fact']
def message_contains_trigger_word(self, message):

View File

@ -39,6 +39,7 @@ from bs4 import BeautifulSoup
import codecs
import os
import collections
import json
try:
from supybot.i18n import PluginInternationalization
@ -58,7 +59,8 @@ class Fun(callbacks.Plugin):
"""
channel = msg.args[0]
data = requests.get("https://api.adviceslip.com/advice").json()
data = requests.get("https://api.adviceslip.com/advice")
data = json.loads(data.content)
irc.reply(data['slip']['advice'])
advice = wrap(advice)
@ -72,7 +74,8 @@ class Fun(callbacks.Plugin):
headers = {
'Accept': 'application/json',
}
data = requests.get('https://icanhazdadjoke.com/', headers=headers).json()
data = requests.get('https://icanhazdadjoke.com/', headers=headers)
data = json.loads(data.content)
irc.reply(data['joke'].replace('\n', '').replace('\r', '').replace('\t', ''))
joke = wrap(joke)
@ -83,7 +86,8 @@ class Fun(callbacks.Plugin):
"""
channel = msg.args[0]
data = requests.get("https://catfact.ninja/fact").json()
data = requests.get("https://catfact.ninja/fact")
data = json.loads(data.content)
irc.reply(data['fact'])
catfact = wrap(catfact)
@ -94,7 +98,8 @@ class Fun(callbacks.Plugin):
"""
channel = msg.args[0]
data = requests.get("https://uselessfacts.jsph.pl/random.json?language=en").json()
data = requests.get("https://uselessfacts.jsph.pl/random.json?language=en")
data = json.loads(data.content)
irc.reply(data['text'])
useless = wrap(useless)
@ -104,7 +109,8 @@ class Fun(callbacks.Plugin):
Corporate buzzord generator
"""
channel = msg.args[0]
data = requests.get("https://corporatebs-generator.sameerkumar.website").json()
data = requests.get("https://corporatebs-generator.sameerkumar.website")
data = json.loads(data.content)
irc.reply(data['phrase'])
buzz = wrap(buzz)
@ -113,7 +119,8 @@ class Fun(callbacks.Plugin):
Startup generator
"""
channel = msg.args[0]
data = requests.get("http://itsthisforthat.com/api.php?json").json()
data = requests.get("http://itsthisforthat.com/api.php?json")
data = json.loads(data.content)
vowels = ('a','e','i','o','u','A','E','I','O','U')
if data['this'].startswith(vowels):
response = "So, Basically, It\'s Like An {0} for {1}".format(data['this'], data['that'])
@ -127,7 +134,8 @@ class Fun(callbacks.Plugin):
Insult generator. Optionally send insult to <nick> (<nick> must be in channel).
"""
channel = msg.args[0]
data = requests.get("https://insult.mattbas.org/api/en/insult.json").json()
data = requests.get("https://insult.mattbas.org/api/en/insult.json")
data = json.loads(data.content)
if nick:
response = "{0}: {1}".format(nick, data['insult'])
irc.reply(response, prefixNick=False)
@ -328,14 +336,14 @@ class Fun(callbacks.Plugin):
coins = []
coins.append(optcoin)
coins_str = ','.join(c.upper() for c in coins)
coin_data = requests.get(coin_url.format(coins=coins_str))
coin_data = coin_data.json()
if 'RAW' not in coin_data:
data = requests.get(coin_url.format(coins=coins_str))
data = json.loads(data.content)
if 'RAW' not in data:
irc.reply('ERROR: no coin found for {}'.format(optcoin))
return
output = []
tmp = {}
data = coin_data['RAW']
data = data['RAW']
data2 = collections.OrderedDict.fromkeys(sorted(data))
for k,v in data.items():
data2.update({k: v})
@ -350,18 +358,19 @@ class Fun(callbacks.Plugin):
"""
volm_url = 'https://min-api.cryptocompare.com/data/top/totalvol?limit=10&tsym=USD'
coin_url = 'https://min-api.cryptocompare.com/data/pricemultifull?fsyms={coins}&tsyms=USD'
volm_data = requests.get(volm_url).json()
data = requests.get(volm_url)
data = json.loads(data.content)
coins = []
for thing in volm_data['Data']:
for thing in data['Data']:
name = thing['CoinInfo']['Name']
coins.append(name)
coins.append('DOGE')
coins_str = ','.join(c for c in coins)
coin_data = requests.get(coin_url.format(coins=coins_str))
coin_data = coin_data.json()
data = requests.get(coin_url.format(coins=coins_str))
data = json.loads(data.content)
output = []
tmp = {}
data = coin_data['RAW']
data = data['RAW']
tmp['BTC'] = data.pop('BTC')
data2 = collections.OrderedDict.fromkeys(sorted(data))
for k,v in data.items():
@ -374,9 +383,7 @@ class Fun(callbacks.Plugin):
coins = wrap(coins, [optional('somethingWithoutSpaces')])
def _parseCoins(self, data, optmarket=None):
ticker = []
def _humifyCap(cap):
if not cap:
return cap
@ -394,7 +401,6 @@ class Fun(callbacks.Plugin):
cap = '${:.2f}'.format(cap)
return cap
return cap
for symbol in data:
name = symbol
name = ircutils.bold(name)

View File

@ -36,6 +36,7 @@ import supybot.ircmsgs as ircmsgs
import supybot.callbacks as callbacks
import supybot.log as log
import requests
import json
from fake_useragent import UserAgent
from bs4 import BeautifulSoup
from urllib.parse import urljoin, urlparse, quote_plus
@ -87,7 +88,7 @@ class IMDb(callbacks.Plugin):
try:
request = requests.get(omdb_url, timeout=10)
if request.status_code == requests.codes.ok:
response = request.json()
response = json.loads(request.content)
not_found = "Error" in response
unknown_error = response["Response"] != "True"
if not_found or unknown_error:

View File

@ -39,6 +39,7 @@ import os
import random
import re
import requests
import json
import string
import supybot.callbacks as callbacks
import supybot.conf as conf
@ -163,9 +164,11 @@ class Jeopardy(callbacks.Plugin):
break
try:
if self.jserviceUrl == 'http://jservice.io':
data = requests.get("{0}/api/random".format(self.jserviceUrl), timeout=5).json()
data = requests.get("{0}/api/random".format(self.jserviceUrl), timeout=5)
data = json.loads(data.content)
else:
data = requests.get("{0}/api/random?count={1}".format(self.jserviceUrl, self.num + 5), timeout=5).json()
data = requests.get("{0}/api/random?count={1}".format(self.jserviceUrl, self.num + 5), timeout=5)
data = json.loads(data.content)
for item in data:
if n == self.num:
break
@ -207,20 +210,21 @@ class Jeopardy(callbacks.Plugin):
break
try:
category = int(category)
data = requests.get("{0}/api/clues?category={1}".format(self.jserviceUrl, category)).json()
data = requests.get("{0}/api/clues?category={1}".format(self.jserviceUrl, category))
data = json.loads(data.content)
cluecount = data[0]['category']['clues_count']
if cluecount < self.num and len(self.categories) == 1:
self.num = cluecount
if cluecount > 100:
data.extend(requests.get("{0}/api/clues?&category={1}&offset=100".format(self.jserviceUrl, category), timeout=5).json())
data.extend(json.loads(requests.get("{0}/api/clues?&category={1}&offset=100".format(self.jserviceUrl, category), timeout=5).content))
if cluecount > 200:
data.extend(requests.get("{0}/api/clues?&category={1}&offset=200".format(self.jserviceUrl, category), timeout=5).json())
data.extend(json.loads(requests.get("{0}/api/clues?&category={1}&offset=200".format(self.jserviceUrl, category), timeout=5).content))
if cluecount > 300:
data.extend(requests.get("{0}/api/clues?&category={1}&offset=300".format(self.jserviceUrl, category), timeout=5).json())
data.extend(json.loads(requests.get("{0}/api/clues?&category={1}&offset=300".format(self.jserviceUrl, category), timeout=5).content))
if cluecount > 400:
data.extend(requests.get("(0}/api/clues?&category={1}&offset=400".format(self.jserviceUrl, category), timeout=5).json())
data.extend(json.loads(requests.get("(0}/api/clues?&category={1}&offset=400".format(self.jserviceUrl, category), timeout=5).content))
if cluecount > 500:
data.extend(requests.get("{0}/api/clues?&category={1}&offset=500".format(self.jserviceUrl, category), timeout=5).json())
data.extend(json.loads(requests.get("{0}/api/clues?&category={1}&offset=500".format(self.jserviceUrl, category), timeout=5).content))
j = 0
for item in data:
if n == self.num or k > len(self.categories):
@ -601,7 +605,8 @@ class Jeopardy(callbacks.Plugin):
seed = random.randint(0,184) * 100
else:
seed = random.randint(0,250) * 100
data = requests.get("{0}/api/categories?count=100&offset={1}".format(self.jserviceUrl, int(seed)), timeout=5).json()
data = requests.get("{0}/api/categories?count=100&offset={1}".format(self.jserviceUrl, int(seed)), timeout=5)
data = json.loads(data.content)
random.shuffle(data)
results = []
for item in data:

View File

@ -29,7 +29,8 @@
###
import pendulum
import requests, json
import requests
import json
from roman_numerals import convert_to_numeral
from supybot import utils, plugins, ircutils, callbacks
@ -97,17 +98,18 @@ class NFL(callbacks.Plugin):
url = BASE_URL.format(f"/schedules/{int(date['year'])-1}")
else:
url = BASE_URL.format(f"/schedules/{date['year']}")
data = requests.get(url).json()
data = requests.get(url)
data = json.loads(data.content)
if not week:
url = BASE_URL.format('/currentWeek')
week = requests.get(url).json()['week']
week = json.loads(requests.get(url).content)['week']
if not season:
url = BASE_URL.format('/currentWeek')
season = requests.get(url).json()['seasonId']
season = json.loads(requests.get(url).content)['seasonId']
if not seasonType:
url = BASE_URL.format('/currentWeek')
tmp = requests.get(url).json()['seasonType']
tmp = json.loads(requests.get(url).content)['seasonType']
if tmp == "PRO":
if not options.get('pro'):
tmp = "POST"
@ -147,7 +149,7 @@ class NFL(callbacks.Plugin):
season, seasonType.upper(), week
))
try:
scores = requests.get(url).json()['gameScores']
scores = json.loads(requests.get(url).content)['gameScores']
except json.decoder.JSONDecodeError:
irc.error('invalid input', Raise=True)
except Exception as e:
@ -296,7 +298,7 @@ class NFL(callbacks.Plugin):
gameId = info['gameId']
url = BASE_URL.format('/playbyplay/{}/latest'.format(gameId))
try:
last_play = requests.get(url).json()
last_play = json.loads(requests.get(url).content)
last_play = last_play['plays'][-1]['playDescription']
except:
pass
@ -342,7 +344,7 @@ class NFL(callbacks.Plugin):
return
endpoint = '/playerGameStats/{}'.format(player_id)
data = requests.get(BASE_URL.format(endpoint)).json()
data = json.loads(requests.get(BASE_URL.format(endpoint)).content)
game_stats = data['playerGameStats']
player_info = data['teamPlayer']

View File

@ -154,7 +154,8 @@ class NHL(callbacks.Plugin):
url = 'https://statsapi.web.nhl.com/api/v1/teams'
try:
data = requests.get(url).json()
data = requests.get(url)
data = json.loads(data.content)
data = data['teams']
except:
return None
@ -316,7 +317,7 @@ class NHL(callbacks.Plugin):
return self._cachedData()
def _extractJSON(self, body):
return json.loads(body.decode())
return json.loads(body)
def _parseGames(self, json, team, tz='US/Eastern'):
"""Extract all relevant fields from NHL.com's json

View File

@ -30,7 +30,7 @@
import requests
import pendulum
import json
from supybot import utils, plugins, ircutils, callbacks
from supybot.commands import *
try:
@ -98,9 +98,11 @@ class Odds(callbacks.Plugin):
'origin': 'https://www.oddsshark.com/',
'referer': 'https://www.oddsshark.com/nhl/odds',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36'}
data.append(requests.get('https://io.oddsshark.com/ticker/nhl', headers=headers).json())
request = requests.get('https://io.oddsshark.com/ticker/nhl', headers=headers)
request = json.loads(request.content)
data.append(request)
elif league == 4:
#print('one')
headers = {'accept': 'application/json, text/plain, */*',
@ -109,14 +111,18 @@ class Odds(callbacks.Plugin):
'origin': 'https://www.oddsshark.com/',
'referer': 'https://www.oddsshark.com/nba/odds',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36'}
data.append(requests.get('https://io.oddsshark.com/ticker/nba', headers=headers).json())
request = requests.get('https://io.oddsshark.com/ticker/nba', headers=headers)
request = json.loads(request.content)
data.append(request)
else:
#print('two')
url = base_url.format(league=league, date=today)
print(url)
data.append(requests.get(url).json())
request = requests.get(headers, timeout=10, headers=headers)
request = json.loads(request.content)
data.append(request)
if league == 2 or league == 1 or league == 3:
if league == 2 or league == 1:
dates = [tdate.add(days=1).format('YYYY-MM-DD'), tdate.add(days=2).format('YYYY-MM-DD'),
@ -130,8 +136,9 @@ class Odds(callbacks.Plugin):
for nflday in dates:
url = base_url.format(league=league, date=nflday)
#print(url)
#tdata = requests.get(url).json()
data[idx]['events'].extend(requests.get(url).json()['events'])
request = requests.get(url)
request = json.loads(request.content)
data[idx]['events'].extend(request['events'])
#print(data)
#
#try:

View File

@ -30,7 +30,7 @@
import pendulum
import requests
import json
from supybot import utils, plugins, ircutils, callbacks
from supybot.commands import *
try:
@ -51,7 +51,8 @@ class PGA(callbacks.Plugin):
def _fetchCurrent(self, type_='r'):
tmp = None
try:
jdata = requests.get(CURRENT_URL.format(trn_type=type_)).json()
jdata = requests.get(CURRENT_URL.format(trn_type=type_))
jdata = json.loads(jdata.content)
tmp = jdata['tid']
return [type_, tmp]
except:
@ -79,17 +80,18 @@ class PGA(callbacks.Plugin):
url = 'https://www.pgatour.com/bin/data/feeds/weather.json/{}{}'.format(
trn[0], trn[1])
print(url)
idata = requests.get(url).json()
idata = requests.get(url, timeout=10)
idata = json.loads(idata.content)
url2 = 'https://statdata.pgatour.com/r/current/schedule-v2.json' #.format(trn[0])
#print(url2)
sdata = requests.get(url2).json()
sdata = requests.get(url2, timeout=10)
sdata = json.loads(sdata.content)
#print(sdata)
# now get the leaderboard json
try:
jdata = requests.get(SCOREBOARD.format(trn_type=trn[0], trn_id=trn[1]))
print(jdata.url)
jdata = jdata.json()
jdata = requests.get(SCOREBOARD.format(trn_type=trn[0], trn_id=trn[1]), timeout=10)
jdata = json.loads(jdata.content)
except:
irc.reply('Something went wrong fetching the leaderboard')
return

View File

@ -42,7 +42,7 @@ except ImportError:
import requests
import pendulum
import pickle
import json
class Soccer(callbacks.Plugin):
"""Fetches soccer scores and other information"""
@ -195,8 +195,7 @@ class Soccer(callbacks.Plugin):
data.url))
return
print(data.url)
data = data.json()
data = json.loads(data.content)
if 'leagues' not in data:
irc.reply('ERROR: {} not found in valid leagues: {}'.format(

View File

@ -41,6 +41,7 @@ from urllib.parse import urlencode, urlparse, parse_qsl
from bs4 import BeautifulSoup
import random
import time
import json
from jinja2 import Template
import timeout_decorator
import unicodedata
@ -167,7 +168,7 @@ class SpiffyTitles(callbacks.Plugin):
ok = request.status_code == requests.codes.ok
if ok:
response = request.json()
response = json.loads(request.content)
if response is not None and "title" in response:
video = response
@ -222,7 +223,7 @@ class SpiffyTitles(callbacks.Plugin):
ok = request.status_code == requests.codes.ok
if ok:
response = request.json()
response = json.loads(request.content)
if response is not None and "title" in response[0]:
video = response[0]
@ -288,7 +289,7 @@ class SpiffyTitles(callbacks.Plugin):
ok = request.status_code == requests.codes.ok
if ok:
response = request.json()
response = json.loads(request.content)
if response:
video = response
@ -673,7 +674,7 @@ class SpiffyTitles(callbacks.Plugin):
ok = request.status_code == requests.codes.ok
if ok:
response = request.json()
response = json.loads(request.content)
if response:
try:
@ -895,7 +896,7 @@ class SpiffyTitles(callbacks.Plugin):
data = {}
extract = ""
if ok:
response = request.json()
response = json.loads(request.content)
if response:
self.log.debug("SpiffyTitles: twitch - got response:\n%s" % (response))
if 'error' in response:
@ -917,7 +918,7 @@ class SpiffyTitles(callbacks.Plugin):
self.log.error("SpiffyTitles: twitch HTTP %s: %s" %
(request.status_code, request.content.decode()[:200]))
else:
response = requests.json()
response = json.loads(request.content)
if not response:
self.log.error("SpiffyTitles: Error parsing Twitch JSON response")
else:
@ -953,7 +954,7 @@ class SpiffyTitles(callbacks.Plugin):
created_at = self._time_created_at(data['started_at'])
if game_id:
get_game = requests.get("https://api.twitch.tv/helix/games?id={}".format(game_id), timeout=10, headers=headers)
game_data = get_game.json()
game_data = json.loads(get_game.content)
game_name = game_data["data"][0]["name"]
template_vars = {
"display_name": display_name,
@ -976,7 +977,7 @@ class SpiffyTitles(callbacks.Plugin):
self.log.error("SpiffyTitles: twitch HTTP %s: %s" %
(request.status_code, request.content.decode()[:200]))
else:
response = request.json()
response = json.loads(request.content)
if not response:
self.log.error("SpiffyTitles: Error parsing Twitch JSON response")
else:
@ -995,7 +996,7 @@ class SpiffyTitles(callbacks.Plugin):
created_at = self._time_created_at(data['created_at'])
if game_id:
get_game = requests.get("https://api.twitch.tv/helix/games?id={}".format(game_id), timeout=10, headers=headers)
game_data = get_game.json()
game_data = json.loads(get_game.content)
game_name = game_data["data"][0]["name"]
template_vars = {
"display_name": display_name,
@ -1018,7 +1019,7 @@ class SpiffyTitles(callbacks.Plugin):
self.log.error("SpiffyTitles: twitch HTTP %s: %s" %
(request.status_code, request.content.decode()[:200]))
else:
response = request.json()
response = json.loads(request.content)
if not response:
self.log.error("SpiffyTitles: Error parsing Twitch JSON response")
else:
@ -1105,7 +1106,7 @@ class SpiffyTitles(callbacks.Plugin):
request = requests.get(omdb_url, timeout=10, headers=headers)
if request.status_code == requests.codes.ok:
response = request.json()
response = json.loads(request.content)
result = None
imdb_template = Template(self.registryValue("imdb.template"))
not_found = "Error" in response
@ -1224,7 +1225,7 @@ class SpiffyTitles(callbacks.Plugin):
ok = request.status_code == requests.codes.ok
if ok:
response = request.json()
response = json.loads(request.content)
if response:
try:
@ -1304,7 +1305,7 @@ class SpiffyTitles(callbacks.Plugin):
extract = ''
if ok:
response = request.json()
response = json.loads(request.content)
if response:
try:

View File

@ -31,7 +31,7 @@
import requests
import pendulum
import urllib.parse
import json
from . import accountsdb
from supybot import utils, plugins, ircutils, callbacks, world
@ -87,7 +87,8 @@ class TVMaze(callbacks.Plugin):
query = urllib.parse.quote_plus(query)
base_url += '/search/shows?q={}'.format(query)
try:
data = requests.get(base_url).json()
data = requests.get(base_url)
data = json.loads(data.content)
except:
data = None
elif mode == 'schedule':
@ -95,7 +96,8 @@ class TVMaze(callbacks.Plugin):
date = pendulum.now().format('YYYY-MM-DD')
base_url += '/schedule?country={}&date={}'.format(country, date)
try:
data = requests.get(base_url).json()
data = requests.get(base_url)
data = json.loads(data.content)
except:
data = None
elif mode == 'shows':
@ -103,7 +105,8 @@ class TVMaze(callbacks.Plugin):
return
base_url += '/shows/{}?embed[]=previousepisode&embed[]=nextepisode'.format(id_)
try:
data = requests.get(base_url).json()
data = requests.get(base_url)
data = json.loads(data.content)
except:
data = None
else:

View File

@ -47,6 +47,7 @@ import time
import random as random
import pyimgur
from bs4 import BeautifulSoup
import json
try:
from supybot.i18n import PluginInternationalization
@ -412,7 +413,7 @@ class TextArt(callbacks.Plugin):
payload = {'description':description,'sections':[{'contents':paste}]}
headers = {'X-Auth-Token':apikey}
post_response = requests.post(url='https://api.paste.ee/v1/pastes', json=payload, headers=headers)
response = post_response.json()
response = json.loads(post_response.content)
return response['link'].replace('/p/', '/r/')
except:
return "Error. Did you set a valid Paste.ee API Key? https://paste.ee/account/api"

View File

@ -51,8 +51,9 @@ class WebParser():
def getWebData(self, irc, url):
headers = {'User-Agent' : 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17'}
try:
content = requests.get(url, headers=headers)
return content.json()
content = requests.get(url, headers=headers, timeout=10)
content = json.loads(content.content)
return content
except:
irc.reply("Error: Couldn't connect to "+url)
return

View File

@ -139,13 +139,14 @@ class Tweety(callbacks.Plugin):
def _shortenUrl(self, url):
"""Shortens a long URL into a short one."""
api_key = self.registryValue('bitlyKey')
url_enc = urllib.parse.quote_plus(url)
api_url = 'https://api-ssl.bitly.com/v3/shorten?access_token={}&longUrl={}&format=json'
try:
data = requests.get(api_url.format(api_key, url_enc)).json()
data = requests.get(api_url.format(api_key, url_enc), timeout=10)
data = json.loads(data.content)
url2 = data['data'].get('url')
if url2.strip():
return url2.strip()
@ -177,7 +178,7 @@ class Tweety(callbacks.Plugin):
data = twitterApi.ApiCall('account/verify_credentials')
# check the response. if we can load json, it means we're authenticated. else, return response.
try: # if we pass, response is validated. set self.twitterApi w/object.
json.loads(data.read().decode())
json.loads(data.read())
self.log.info("I have successfully authorized and logged in to Twitter using your credentials.")
self.twitterApi = OAuthApi(self.registryValue('consumerKey'), self.registryValue('consumerSecret'), self.registryValue('accessKey'), self.registryValue('accessSecret'))
except: # response failed. Return what we got back.
@ -303,7 +304,8 @@ class Tweety(callbacks.Plugin):
api_url = 'https://api-ssl.bitly.com/v3/shorten?access_token={}&longUrl={}&format=json'
try:
data = requests.get(api_url.format(api_key, longurl)).json()
data = requests.get(api_url.format(api_key, longurl), timeout=10)
data = json.loads(data.content)
url2 = data['data'].get('url')
if url2.strip():
return url2.strip()
@ -354,7 +356,7 @@ class Tweety(callbacks.Plugin):
# make API call.
data = self.twitterApi.ApiCall('application/rate_limit_status', parameters={'resources':'trends,search,statuses,users'})
try:
data = json.loads(data.read().decode())
data = json.loads(data.read())
except:
irc.reply("ERROR: Failed to lookup ratelimit data: {0}".format(data))
return
@ -423,7 +425,7 @@ class Tweety(callbacks.Plugin):
# now build our API call
data = self.twitterApi.ApiCall('trends/place', parameters=args)
try:
data = json.loads(data.read().decode())
data = json.loads(data.read())
except:
irc.reply("ERROR: failed to lookup trends on Twitter: {0}".format(data))
return
@ -487,7 +489,7 @@ class Tweety(callbacks.Plugin):
# now build our API call.
data = self.twitterApi.ApiCall('search/tweets', parameters=tsearchArgs)
try:
data = json.loads(data.read().decode())
data = json.loads(data.read())
except:
irc.reply("ERROR: Something went wrong trying to search Twitter. ({0})".format(data))
return
@ -612,7 +614,7 @@ class Tweety(callbacks.Plugin):
# call the Twitter API with our data.
data = self.twitterApi.ApiCall(apiUrl, parameters=twitterArgs)
try:
data = json.loads(data.read().decode())
data = json.loads(data.read())
except:
irc.reply("ERROR: Failed to lookup Twitter for '{0}' ({1}) ".format(optnick, data))
return

View File

@ -62,7 +62,8 @@ class Weed(callbacks.Plugin):
url = "http://strainapi.evanbusse.com/{0}/strains/search/name/{1}".format(strain_api, strain)
data = requests.get(url).json()
data = requests.get(url)
data = json.loads(data.content)
for item in data:
if item['desc'] is not None and item['name'].casefold() == strain:
@ -71,7 +72,8 @@ class Weed(callbacks.Plugin):
type = ircutils.bold(item['race'])
desc = item['desc']
url2 = "http://strainapi.evanbusse.com/{0}/strains/data/flavors/{1}".format(strain_api, id)
data2 = requests.get(url2).json()
data2 = requests.get(url2)
data2 = json.loads(data2.content)
flavor1 = data2[0]
flavor2 = data2[1]
flavor3 = data2[2]
@ -84,7 +86,8 @@ class Weed(callbacks.Plugin):
type = ircutils.bold(item['race'])
desc = item['desc']
url2 = "http://strainapi.evanbusse.com/{0}/strains/data/flavors/{1}".format(strain_api, id)
data2 = requests.get(url2).json()
data2 = requests.get(url2)
data2 = json.loads(data2.content)
flavor1 = data2[0]
flavor2 = data2[1]
flavor3 = data2[2]

View File

@ -37,6 +37,7 @@ import supybot.callbacks as callbacks
import supybot.log as log
import requests
import pendulum
import json
from jinja2 import Template
from urllib.parse import urlencode
@ -68,8 +69,7 @@ class YouTube(callbacks.Plugin):
try:
log.debug("YouTube: requesting %s" % (api_url))
request = requests.get(api_url, timeout=10)
request.raise_for_status()
request = request.json()
request = json.loads(request.content)
video_id = request["items"][0]["id"]["videoId"]
except Exception:
log.error("YouTube: YouTube API HTTP %s: %s" % (request.status_code, request.content.decode()))
@ -131,7 +131,7 @@ class YouTube(callbacks.Plugin):
request = requests.get(api_url, timeout=10)
ok = request.status_code == requests.codes.ok
if ok:
response = request.json()
response = json.loads(request.content)
if response:
try:
if response["pageInfo"]["totalResults"] > 0: