nba_playoffs_game_updater.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380
  1. #!/usr/bin/env python3
  2. import gspread
  3. import json
  4. import random
  5. from os import sys
  6. from oauth2client.service_account import ServiceAccountCredentials
  7. import datetime
  8. import time
  9. from nba_api.stats.static import players
  10. from nba_api.stats.endpoints import playergamelog
  11. import timeout_decorator
  12. import urllib
  13. try:
  14. from nba_api.library.debug.debug import DEBUG_STORAGE
  15. except ImportError:
  16. DEBUG_STORAGE = False
  17. #spreadsheet_key = '1QBQvGSMesox1gwjpaoK-0-p3n-c4I_L73PWCggjdayM' # 2019 Official
  18. #spreadsheet_key = '14pHOScaGXvN83iCca6_5p6QoViYvo223cIJD9nnl7TI' # 2019 Test
  19. #spreadsheet_key = '1n2qAxDhy3B-a20cn92H340GoPeKQE8fpztPlzKpGw80' # 2020 Test
  20. #spreadsheet_key = '1ajlHmH-dUzwkVfD-4NgpkK8ni3I3UuUFcdefF_DUOyg' # 2020 Official
  21. #spreadsheet_key = '1FgoBfPw4Vhi89rcYgxnAxLTeJ84GLXD1r39K0DXqxq4' # 2021 Official
  22. #spreadsheet_key = '1Gt4J1nNOv1E3-gikopSJQCD7nAqx7DEYxUqsZNac9_Y' # 2022 Official
  23. #spreadsheet_key = '1taZ97_PKEGYdSnMAeraPAbFR2U_7-P1GamzIdhlVXjc' # 2023 Test
  24. spreadsheet_key = '10qAZWyRhwo4Hb4_5X0Q4LFFSudGn2okJNLojwGAg0yI' # 2023 Official
  25. json_keyfile = 'NBA Playoffs Game-1f9a46f0715c.json'
  26. day = 'today' # today, else:
  27. #day = datetime.date(2022, 5, 7) # set date manually
  28. nba_cooldown = random.gammavariate(alpha=9, beta=0.4) # don't hammer the NBA API
  29. stats=['PTS', 'REB', 'AST', 'STL', 'BLK', 'TOV', 'WL'] # stats appear in this order
  30. STATS_HEADERS = {
  31. 'Host': 'stats.nba.com',
  32. 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:72.0) Gecko/20100101 Firefox/72.0',
  33. 'Accept': 'application/json, text/plain, */*',
  34. 'Accept-Language': 'en-US,en;q=0.5',
  35. 'Accept-Encoding': 'gzip, deflate, br',
  36. 'x-nba-stats-origin': 'stats',
  37. 'x-nba-stats-token': 'true',
  38. 'Connection': 'keep-alive',
  39. 'Referer': 'https://stats.nba.com/',
  40. 'Pragma': 'no-cache',
  41. 'Cache-Control': 'no-cache',
  42. }
  43. proxy_url="https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt"
  44. # proxies = [
  45. # "206.127.88.18:80",
  46. # "47.112.104.235:80",
  47. # "60.169.201.13:53315",
  48. # "14.207.10.155:8080",
  49. # "113.100.209.10:3128",
  50. # "113.100.209.145:3128",
  51. # "69.162.65.42:5836",
  52. # "46.18.210.88:5836",
  53. # "203.150.160.95:8080",
  54. # "117.252.68.173:8080",
  55. # "113.100.209.140:3128",
  56. # "218.66.253.146:8800",
  57. # "186.159.5.161:8080",
  58. # "181.224.161.132:999",
  59. # "103.28.90.154:5836",
  60. # "113.100.209.116:3128",
  61. # "45.231.29.45:8080",
  62. # "202.169.255.3:8181",
  63. # "103.52.144.242:8080",
  64. # "181.78.12.164:999",
  65. # "117.67.77.188:4216",
  66. # "183.89.63.159:8080",
  67. # "88.150.220.130:3128",
  68. # "168.195.204.168:8080",
  69. # "116.0.2.162:52076",
  70. # "58.52.115.99:4216",
  71. # "182.23.52.114:6060",
  72. # "116.0.3.238:8080",
  73. # "175.6.66.48:3128",
  74. # "180.183.246.110:8080",
  75. # "220.249.149.69:9999",
  76. # "103.221.254.102:48146",
  77. # "110.77.242.14:8080",
  78. # "198.98.59.87:8080",
  79. # "209.91.216.168:8080",
  80. # "177.8.170.62:8080",
  81. # "218.66.253.144:10200",
  82. # "113.161.58.255:8080",
  83. # "180.244.73.12:8080",
  84. # "190.2.210.98:8080",
  85. # "82.114.71.98:8080",
  86. # "190.103.28.161:999",
  87. # "181.119.69.89:3128",
  88. # "190.120.249.246:999",
  89. # "82.114.115.194:1256",
  90. # "218.66.253.146:10084",
  91. # "103.146.68.255:8080",
  92. # ]
  93. proxies = []
  94. ###############################################################################
  95. def buildProxyList(proxies=[], raw_text_url="https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt"):
  96. good_proxy_list = []
  97. proxy_list = []
  98. r = urllib.request.urlopen(raw_text_url)
  99. for line in r:
  100. line = line.decode("utf-8")
  101. line = line.strip()
  102. proxy_list.append(line)
  103. random.shuffle(proxy_list)
  104. proxy_list = proxies + proxy_list
  105. return proxy_list, good_proxy_list
  106. """
  107. Returns a worksheet instance
  108. """
  109. def getWorksheet(spreadsheet_key, json_keyfile):
  110. try:
  111. scope = ['https://spreadsheets.google.com/feeds',
  112. 'https://www.googleapis.com/auth/drive']
  113. credentials = ServiceAccountCredentials.from_json_keyfile_name(json_keyfile, scope)
  114. gc = gspread.authorize(credentials)
  115. spreadsheet = gc.open_by_key(spreadsheet_key)
  116. worksheet = spreadsheet.get_worksheet(0)
  117. except Exception as e:
  118. print(f"Exception: {str(e)}")
  119. print("Could not retrieve worksheet!")
  120. print("Check your API key, credentials, or network!")
  121. raise(e)
  122. return worksheet
  123. """
  124. Returns a list of lists containing the values of all cells in the worksheet by row
  125. """
  126. def getAllValues(worksheet):
  127. return worksheet.get_all_values()
  128. """
  129. Create various date variables based on "today's" day
  130. """
  131. def setDates(day):
  132. if day == 'today':
  133. # in case games go past midnight
  134. date = datetime.datetime.now() - datetime.timedelta(hours=3)
  135. date = date.date()
  136. else:
  137. date = day
  138. url_date = date.strftime('%m/%d/%Y')
  139. year = date.year
  140. season = f"{format(str(year - 1))}-{str(year)[2:]}"
  141. return url_date, season, date
  142. """
  143. Determines the number of players in the pool
  144. """
  145. def getNumberOfParticipants(all_values):
  146. count=0
  147. for row_num, row in enumerate(all_values):
  148. if row[0] != "" and row_num >= 3 and count == 0:
  149. start=row_num
  150. count+=1
  151. elif row[0] != "" and row_num >= 3 and count == 1:
  152. end=row_num
  153. break
  154. num_participants = end - start
  155. return num_participants
  156. """
  157. Determines the active day's first and last rows
  158. """
  159. def getFirstRowLastRow(all_values, num_participants, current_date):
  160. first_row = None
  161. last_row = None
  162. for row_num, row in enumerate(all_values, start=1):
  163. date=row[0]
  164. if date != "" and row_num >= 4:
  165. day = datetime.datetime.strptime('{} {}'.format(date,
  166. str(current_date.year)),
  167. '%A, %B %d %Y')
  168. if day.date() == current_date:
  169. first_row = row_num
  170. last_row = first_row + num_participants - 1
  171. break
  172. return first_row, last_row
  173. """
  174. Rudimentary way to reduce player name errors
  175. """
  176. def cleanFirstNameLastName(player):
  177. first_name_last_name = player.split()
  178. first_name = first_name_last_name[0]
  179. first_name = first_name.replace('.', '')
  180. # New nickname for T.J. Warren should be "The Outlier"
  181. if first_name == "TJ":
  182. first_name = "T.J."
  183. elif first_name == "Donavan":
  184. first_name = "Donovan"
  185. last_name = first_name_last_name[1]
  186. player_clean = first_name + ' ' + last_name
  187. return player_clean
  188. """
  189. Create a unique list of players that have been selected today
  190. Also, append misspelled players to batch_update_list to autofix on next push if we can
  191. """
  192. def cleanPlayers(all_values, first_row, last_row, batch_update_list):
  193. players_unique = []
  194. for row_num, row in enumerate(all_values, start=1):
  195. if first_row <= row_num <= last_row:
  196. player = row[2]
  197. if player[-7:] != "-FIX!!!" and player != "":
  198. if len(players.find_players_by_full_name(player)) > 0:
  199. players_unique.append(player)
  200. else:
  201. player_clean = cleanFirstNameLastName(player)
  202. if len(players.find_players_by_full_name(player_clean)) > 0:
  203. all_values[row_num - 1][2] = player_clean
  204. batch_update_list.append({'range': f'{indexToLetter(2)}{row_num}', 'values': [[player_clean]]})
  205. players_unique.append(player_clean)
  206. else:
  207. print(f"Player: {player} not found, please fix name!")
  208. players_unique = list(dict.fromkeys(players_unique))
  209. return players_unique, batch_update_list, all_values
  210. """
  211. Pull player's gamelog from stats.nba.com based on the url_date and player_id
  212. """
  213. #@timeout_decorator.timeout(30)
  214. def getStats(players_unique, url_date, season, proxy_list=[], good_proxy_list=[]):
  215. stats_dict = {}
  216. for player in players_unique:
  217. player_info = players.find_players_by_full_name(player)
  218. player_id = player_info[0].get('id')
  219. print(f'Retrieving stats for: {player}')
  220. while True:
  221. # Move working proxies to the front of the list
  222. if len(good_proxy_list) > 0:
  223. proxy_list = good_proxy_list + proxy_list
  224. # Remove duplicate proxies
  225. proxy_list = list(dict.fromkeys(proxy_list))
  226. # Use the first proxy in the list
  227. request_proxy = proxy_list[0]
  228. try:
  229. print(f'Proxy: http://{request_proxy}')
  230. player_game_log = playergamelog.PlayerGameLog( player_id=player_id,
  231. proxy='http://' + request_proxy,
  232. season=season,
  233. timeout=10,
  234. league_id_nullable='00',
  235. season_type_all_star='Playoffs',
  236. date_from_nullable=url_date,
  237. date_to_nullable=url_date,
  238. )
  239. print('Success!')
  240. if request_proxy not in good_proxy_list:
  241. good_proxy_list.append(request_proxy)
  242. player_game_log_dict = player_game_log.get_dict()
  243. if DEBUG_STORAGE is False:
  244. time.sleep(nba_cooldown)
  245. break
  246. except OSError as e:
  247. print(e)
  248. if request_proxy in good_proxy_list:
  249. good_proxy_list.remove(request_proxy)
  250. else:
  251. print(f'Proxy refused, removing {request_proxy}')
  252. proxy_list.remove(request_proxy)
  253. continue
  254. except Exception as e:
  255. print(e)
  256. print('Could not connect to the NBA API, sleeping for 30 seconds')
  257. time.sleep(30)
  258. player_game_log_results = player_game_log_dict.get('resultSets')[0]
  259. player_game_log_headers = player_game_log_results.get('headers')
  260. # if player has no stats for this day, list will be empty
  261. if len(player_game_log_results.get('rowSet')) < 1:
  262. player_stats_dict = None
  263. else:
  264. player_game_log_stats = player_game_log_results.get('rowSet')[0]
  265. player_stats_dict = dict(zip(player_game_log_headers, player_game_log_stats))
  266. stats_dict[player] = player_stats_dict
  267. return stats_dict, good_proxy_list
  268. """
  269. Append stat cells that have changes to batch_update_list
  270. Also append player cells that need fixing to batch_update_list
  271. """
  272. def cellsToUpdate(all_values, first_row, last_row, stats_dict, stats, batch_update_list):
  273. for row_num, row in enumerate(all_values, start=1):
  274. if first_row <= row_num <= last_row:
  275. player_name = row[2]
  276. if player_name[-7:] != "-FIX!!!" and player_name in stats_dict.keys():
  277. if stats_dict[player_name] is not None:
  278. player_stats = stats_dict[player_name]
  279. if player_stats == "Fix!":
  280. batch_update_list.append({'range': f'{indexToLetter(2)}{row_num}', 'values': [[f'{player_name}-FIX!!!']]})
  281. continue
  282. for col_num, stat in enumerate(stats, start=3):
  283. pass
  284. #print(player_name, player_stats[stat])
  285. #print(player_name, f'{indexToLetter(col_num)}{row_num}', str(row[col_num]), f',', player_stats[stat])
  286. if str(player_stats[stat]) != str(row[col_num]) and player_stats[stat] is not None:
  287. #print('Update:', row_num, col_num, player_name, f'{indexToLetter(col_num)}{row_num}', str(row[col_num]), player_stats[stat])
  288. batch_update_list.append({'range': f'{indexToLetter(col_num)}{row_num}', 'values': [[f'{player_stats[stat]}']]}.copy())
  289. return batch_update_list
  290. """
  291. Convert zero-indexed column number to the appropriate column letter (A=0, B=1, C=2...)
  292. """
  293. def indexToLetter(index):
  294. return chr(ord('@')+int(index)+1)
  295. """
  296. Push changes to Google Sheet
  297. """
  298. def batchUpdate(batch_update_list):
  299. if len(batch_update_list) > 1:
  300. worksheet.batch_update(batch_update_list, value_input_option="USER_ENTERED")
  301. else:
  302. print('No update needed, sleeping for 1 minute')
  303. time.sleep(60)
  304. if __name__ == "__main__":
  305. # Use a combination of our good proxies with some fetched from the internet for variation
  306. proxy_list, good_proxy_list = buildProxyList(proxies=proxies, raw_text_url=proxy_url)
  307. while True:
  308. try:
  309. batch_update_list = []
  310. worksheet = getWorksheet(spreadsheet_key, json_keyfile)
  311. url_date, season, date = setDates(day)
  312. print("Date: " + str(date))
  313. all_values = getAllValues(worksheet)
  314. num_participants = getNumberOfParticipants(all_values)
  315. first_row, last_row = getFirstRowLastRow(all_values, num_participants, date)
  316. if first_row is None:
  317. print("No games today! Pausing for 1000 seconds...")
  318. time.sleep(1000)
  319. continue
  320. players_unique, batch_update_list, all_values = cleanPlayers(all_values, first_row, last_row, batch_update_list)
  321. stats_dict, good_proxy_list = getStats(players_unique, url_date, season, proxy_list=proxy_list, good_proxy_list=good_proxy_list)
  322. batch_update_list = cellsToUpdate(all_values, first_row, last_row, stats_dict, stats, batch_update_list)
  323. if len(batch_update_list) > 1:
  324. print(batch_update_list)
  325. batchUpdate(batch_update_list)
  326. except Exception as e:
  327. print(e)
  328. print('Sleeping for 10 seconds')
  329. time.sleep(10)
  330. continue