main.py 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256
  1. import auth as auth
  2. from tqdm import tqdm
  3. import pickle
  4. import os
  5. import configparser
  6. from getpass import getpass
  7. import requests
  8. from subprocess import call
  9. import subprocess
  10. from parsers.episode import episodeParser
  11. from parsers.lesezeichen import lesezeichenParser
  12. from parsers.chapter import chapterParser
  13. from parsers.player import playerParser
  14. from util.chromecast import ccast
  15. baseurl = ''
  16. if os.path.isfile('cookies'):
  17. f = open('cookies','rb')
  18. sess = requests.session()
  19. sess.cookies.update(pickle.load(f))
  20. def main():
  21. ReadConfig()
  22. print(config['paths']['anime_path'])
  23. while True:
  24. print('PYrxoer Python wrapper for proxer.me')
  25. print('0 - Load')
  26. print('1 - Login')
  27. print('2 - Lesezeichen - All')
  28. print('3 - Lesezeichen - Anime')
  29. print('4 - Lesezeichen - Manga')
  30. print('5 - Browse local')
  31. print('99 - exit')
  32. uin = input('$>: ')
  33. # auth.checkLogin()
  34. if uin == '0':
  35. if os.path.isfile(config['paths']['cookie_jar']):
  36. f = open(config['paths']['cookie_jar'],'rb')
  37. sess = requests.session()
  38. sess.cookies.update(pickle.load(f))
  39. if uin == '1':
  40. user = input('username: ')
  41. pw = getpass('password:' )
  42. sess = auth.login(user,pw,config)
  43. print(str(auth.token))
  44. f = open(config['paths']['cookie_jar'],'wb')
  45. pickle.dump(sess.cookies,f)
  46. if uin == '2':
  47. LesezeichenAll(sess)
  48. if uin == '3':
  49. LesezeichenSingle(sess,1)
  50. if uin == '4':
  51. LesezeichenSingle(sess,2)
  52. if uin =='99':
  53. exit()
  54. def chapter(sess,ep):
  55. url = baseurl + ep['link'].replace('chapter','read').replace('#top','')+'/1'
  56. response = sess.get(url)
  57. content = response.content
  58. chapPars = chapterParser()
  59. chapPars.images = []
  60. chapPars.feed(str(content))
  61. path = config['paths']['manga_path']+'/'+ep['name']+'/'+ep['num']+'/'
  62. if not os.path.exists(path):
  63. os.makedirs(path)
  64. print(str(ep))
  65. print(chapPars.serverurl)
  66. count = 0
  67. while True:
  68. print(ep['name'] + ' - ' + ep['num'])
  69. print('1 - Download')
  70. print('2 - mark as read')
  71. print('3 - go to next')
  72. print('4 - go to previous')
  73. print('5 - mark to read')
  74. print('6 - read local')
  75. print('7 - Details')
  76. print('99 - Back')
  77. uin = input('$>: ')
  78. if uin == '1':
  79. print(path)
  80. for p in tqdm(chapPars.images):
  81. #put https in front because changes from proxer?
  82. call(['curl','https://'+chapPars.serverurl + p['file'],'-o',path+str(count).zfill(2)+p['file'][p['file'].rindex('.'):]])
  83. #call(['curl',chapPars.serverurl + p['file'],'-o',path+str(count).zfill(2)+p['file'][p['file'].rindex('.'):]])
  84. count +=1
  85. if uin == '2':
  86. url = baseurl + ep['link'].replace('#top','?format=json&type=reminder&'+ chapPars.token +'=1&title=reminder_next')
  87. response = sess.post(url,data = {"check":1})
  88. print(str(response.content))
  89. if uin == '5':
  90. url = baseurl + ep['link'].replace('#top','?format=json&type=reminder&'+ chapPars.token +'=1&title=reminder_this')
  91. response = sess.post(url,data = {"check":1})
  92. print(str(response.content))
  93. if uin =='4':
  94. prev = ep
  95. linkParts = ep['link'].split('/')
  96. linkParts[3] = str(int(linkParts[3]) - 1)
  97. prev['link'] = '/'.join(linkParts)
  98. prev['num'] = str(int(prev['num'])-1)
  99. chapter(sess,prev)
  100. if uin =='3':
  101. prev = ep
  102. linkParts = ep['link'].split('/')
  103. linkParts[3] = str(int(linkParts[3]) +1)
  104. prev['link'] = '/'.join(linkParts)
  105. prev['num'] = str(int(prev['num'])+1)
  106. chapter(sess,prev)
  107. if uin =='6':
  108. call(['feh','-z','-S','name',path])
  109. if uin == '99':
  110. break
  111. def episode(sess,ep):
  112. response = sess.get(baseurl+ep['link'])
  113. content = response.content
  114. epars = episodeParser()
  115. epars.feed(str(content))
  116. code = epars.code
  117. #link = 'https:'+epars.url.replace('#',code)
  118. link = epars.url.replace('#',code)
  119. response = sess.get(link)
  120. ppars = playerParser()
  121. ppars.feed(str(response.content))
  122. link = ppars.url
  123. while True:
  124. print(ep['name'] + ' - ' + ep['num'])
  125. print('1 - open with vlc')
  126. print('2 - download')
  127. print('3 - mark as watched')
  128. print('4 - go to next')
  129. print('5 - go to previous')
  130. print('6 - Details')
  131. print('7 - Play on Chromecast')
  132. print('99 - Exit')
  133. path = 'anime/'+ep['name']+'/'
  134. if not os.path.exists(path):
  135. os.makedirs(path)
  136. uin = input('$>: ')
  137. if uin == '1':
  138. call(["vlc",'-f', link])
  139. if uin == '2':
  140. call(['curl',link,'-o',path+ep['num']+'.mp4'])
  141. if uin == '3':
  142. url = baseurl + ep['link'].replace('#top','?format=json&type=reminder&'+ epars.token +'=1&title=reminder_next')
  143. response = sess.post(url,data = {"check":1})
  144. print(str(response.content))
  145. if uin =='4':
  146. prev = ep
  147. linkParts = ep['link'].split('/')
  148. linkParts[3] = str(int(linkParts[3]) +1)
  149. prev['link'] = '/'.join(linkParts)
  150. prev['num'] = str(int(prev['num'])+1)
  151. episode(sess,prev)
  152. if uin == '7':
  153. #TODO improve discovery and multiple favorites
  154. cast = ccast(config['devices']['chromecast'])
  155. if len(cast.getCasts()) <= 0:
  156. print('no chromecast found')
  157. cast.play(link,cast.getCasts()[0])
  158. if uin == '99':
  159. break
  160. def LesezeichenAll(sess):
  161. watchlist = []
  162. readlist = []
  163. response = sess.get(config['urls']['base_url']+'/ucp?s=reminder&utm_source=nav#top')
  164. lpars = lesezeichenParser()
  165. lpars.watchlist = []
  166. lpars.readlist = []
  167. content = response.content
  168. lpars.feed(str(content))
  169. watchlist = lpars.watchlist
  170. readlist = lpars.readlist
  171. print('Watchlist')
  172. for i in range (0,len(watchlist) - 1):
  173. print('w' + str(i) + ' - ' + watchlist[i]['name'] + ' - ' + str(watchlist[i]['new']))
  174. print('Readlist')
  175. for i in range (0,len(readlist) - 1):
  176. print('r' + str(i) + ' - ' + readlist[i]['name'] + ' - ' + str(readlist[i]['new']))
  177. uin = input('$>: ')
  178. link = ""
  179. if uin[0] == 'w':
  180. ep = watchlist[int(uin.replace('w',''))]
  181. episode(sess,ep)
  182. else:
  183. ep = readlist[int(uin.replace('r',''))]
  184. chapter(sess,ep)
  185. print(link)
  186. def LesezeichenSingle(sess,mode):
  187. response = sess.get(config['urls']['base_url']+'/ucp?s=reminder&utm_source=nav#top')
  188. lpars = lesezeichenParser()
  189. content = response.content
  190. lpars.feed(str(content))
  191. watchlist = lpars.watchlist
  192. readlist = lpars.readlist
  193. if mode == 1:
  194. print('Watchlist')
  195. plist = watchlist
  196. else:
  197. print('Readlist')
  198. plist = readlist
  199. for i in range (0,len(plist) - 1):
  200. print(str(i) + ' - ' + plist[i]['name'] + ' - ' + str(plist[i]['new']))
  201. uin = input('$>: ')
  202. link = plist[int(uin)]['link']
  203. url = config['urls']['base_url']+link
  204. print(link)
  205. print(url)
  206. f = open('episode.html','w')
  207. response = sess.get(url)
  208. f.write(str(response.content))
  209. f.close
  210. print('written')
  211. def ReadConfig():
  212. global config
  213. global baseurl
  214. config = configparser.ConfigParser()
  215. config.read_file(open('settings.cfg'))
  216. baseurl = config['urls']['base_url']
  217. def ListLocal():
  218. anime = []
  219. manga = []
  220. main()