浏览代码

modification for proxer workaround

Phil 4 年之前
父节点
当前提交
9a3995aef5
共有 3 个文件被更改,包括 82 次插入7 次删除
  1. 26 3
      auth.py
  2. 51 4
      main.py
  3. 5 0
      settings.cfg

+ 26 - 3
auth.py

@@ -1,13 +1,25 @@
 import requests
+import pickle
 from html.parser import HTMLParser
 from html.entities import name2codepoint
 
 token = 'empty'
 
-def run(user,passw):
+def checkLogin():
+    #f = open('cookies','rb')
+    sess = requests.session()
+    #sess.cookies.update(pickle.load(f))
+    response = sess.get('https://proxer.net/ucp')
+    data = response.content
+    dump = open('dummys/ucp_loggedout.html','w')
+    dump.write(str(data))
+    dump.close
+
+
+def login(user,passw):
     sess = requests.Session()
     #obtain token
-    response = requests.get('https://proxer.me/')
+    response = requests.get('https://proxer.net/')
     data = response.content
     tparse = tokenParser()
     tparse.feed(str(data))
@@ -16,7 +28,7 @@ def run(user,passw):
     cookies = response.cookies
 
     #make auth
-    response = sess.post('https://proxer.me/login?' + token + '=1',
+    response = sess.post('https://proxer.net/login?' + token + '=1',
         data={'username':user,'password':passw,'remember':'1','submit':'login'},
         cookies=cookies)
     cookies = response.cookies
@@ -29,6 +41,9 @@ def run(user,passw):
     #response = requests.get('https://proxer.me/ucp?s=reminder&utm_source=nav#top',cookies=cookies)
     #print(response.content)
 
+
+
+
 class tokenParser(HTMLParser):
     token = ''
 
@@ -37,6 +52,14 @@ class tokenParser(HTMLParser):
             self.token = attrs[1][1]
             print('Token: ',self.token)
 
+class userParser(HTMLParser):
+    loggedIn = False
+    username = ''
+    def handle_starttag(self, tag, attrs):
+        if tag == 'b' and attrs[0][1] == 'username':
+            loggedIn = true
+
+            
 
 #response = requests.get('https://proxer.me/ucp?s=reminder&utm_source=nav#top',cookies=cookies)
 #print(response.content)

+ 51 - 4
main.py

@@ -1,6 +1,8 @@
 import auth as auth
 from tqdm import tqdm
+import pickle
 import os
+import configparser
 from getpass import getpass
 import requests
 from subprocess import call
@@ -10,21 +12,43 @@ from parsers.lesezeichen import lesezeichenParser
 from parsers.chapter import chapterParser
 from parsers.player import playerParser
 
+if os.path.isfile('cookies'):
+    f = open('cookies','rb') 
+    sess = requests.session()
+    sess.cookies.update(pickle.load(f))
+
 def main():
+    ReadConfig()
+    print(config['paths']['anime_path'])
     while True:
         print('PYrxoer Python wrapper for proxer.me')
+        print('0 - Load')
         print('1 - Login')
         print('2 - Lesezeichen - All')
         print('3 - Lesezeichen - Anime')
         print('4 - Lesezeichen - Manga')
+        print('5 - Browse local')
         print('99 - exit')
 
         uin = input('$>: ')
+        
+       # auth.checkLogin()
+
+        if uin == '0':
+            if os.path.isfile(config['paths']['cookie_jar']):
+                f = open(config['paths']['cookie_jar'],'rb') 
+                sess = requests.session()
+                sess.cookies.update(pickle.load(f))
+
+
+
         if uin == '1':
             user = input('username: ')
             pw = getpass('password:' )
-            sess = auth.run(user,pw)
+            sess = auth.login(user,pw)
             print(str(auth.token))
+            f = open(config['paths']['cookie_jar'],'wb')
+            pickle.dump(sess.cookies,f)
 
         if uin == '2':
             LesezeichenAll(sess)
@@ -45,7 +69,7 @@ def chapter(sess,ep):
     chapPars = chapterParser()
     chapPars.images = []
     chapPars.feed(str(content))
-    path = 'manga/'+ep['name']+'/'+ep['num']+'/'
+    path = config['paths']['manga_path']+'/'+ep['name']+'/'+ep['num']+'/'
     if not os.path.exists(path):
         os.makedirs(path)
     print(str(ep))
@@ -103,11 +127,11 @@ def chapter(sess,ep):
 def episode(sess,ep):
     response = sess.get('https://proxer.me'+ep['link'])
     content = response.content
-    print(str(content))
     epars = episodeParser()
     epars.feed(str(content))
     code = epars.code
-    link = 'https:'+epars.url.replace('#',code)
+    #link = 'https:'+epars.url.replace('#',code)
+    link = epars.url.replace('#',code)
     response = sess.get(link)
     ppars = playerParser()
     ppars.feed(str(response.content))
@@ -120,6 +144,7 @@ def episode(sess,ep):
         print('4 - go to next')
         print('5 - go to previous')
         print('6 - Details')
+        print('99 - Exit')
         path = 'anime/'+ep['name']+'/'
         if not os.path.exists(path):
             os.makedirs(path)
@@ -132,6 +157,18 @@ def episode(sess,ep):
             url = 'https://proxer.me' + ep['link'].replace('#top','?format=json&type=reminder&'+ epars.token +'=1&title=reminder_next')
             response = sess.post(url,data = {"check":1})
             print(str(response.content))
+
+        if uin =='4':
+            prev = ep
+            linkParts = ep['link'].split('/')
+            linkParts[3] = str(int(linkParts[3]) +1)
+            prev['link'] = '/'.join(linkParts)
+            prev['num'] = str(int(prev['num'])+1)
+            episode(sess,prev)
+
+        if uin == '99':
+            break
+
     
 def LesezeichenAll(sess):
     watchlist = []
@@ -186,5 +223,15 @@ def LesezeichenSingle(sess,mode):
     f.write(str(response.content))
     f.close
     print('written')
+
+def ReadConfig():
+    global config 
+    config = configparser.ConfigParser()
+    config.read_file(open('settings.cfg'))
+    
+def ListLocal():
+    anime = []
+    manga = []
+
     
 main()

+ 5 - 0
settings.cfg

@@ -0,0 +1,5 @@
+[paths]
+base_url:https://proxer.net
+anime_path:./anime
+manga_path:./manga
+cookie_jar:./cookies