From b692dc0ba8e8940844eb647a1f15e435a55ce4eb Mon Sep 17 00:00:00 2001 From: RaNaN Date: Fri, 11 Mar 2011 23:41:40 +0100 Subject: closed #259, #250 --- module/gui/PackageDock.py | 18 ++++++++- module/network/Browser.py | 8 ++++ module/network/HTTPChunk.py | 7 ++++ module/network/HTTPDownload.py | 12 +----- module/network/HTTPRequest.py | 13 ++++++- module/plugins/PluginManager.py | 19 ++++------ module/plugins/hoster/DepositfilesCom.py | 5 ++- module/plugins/hoster/FileserveCom.py | 44 +++++++++++++++++---- module/plugins/hoster/ShareonlineBiz.py | 14 +++---- module/web/media/default/js/funktions.js | 65 ++++++++++++++++---------------- 10 files changed, 131 insertions(+), 74 deletions(-) diff --git a/module/gui/PackageDock.py b/module/gui/PackageDock.py index 077abb44c..73db8f177 100644 --- a/module/gui/PackageDock.py +++ b/module/gui/PackageDock.py @@ -15,6 +15,7 @@ @author: mkaay """ +import re from PyQt4.QtCore import * from PyQt4.QtGui import * @@ -45,6 +46,16 @@ class NewPackageDock(QDockWidget): self.widget.box.clear() self.hide() + def parseUri(self): + + text=str(self.widget.box.toPlainText()) + self.widget.box.setText("") + result = re.findall(r"(?:ht|f)tps?:\/\/[a-zA-Z0-9\-\.\/\?=_&%#]+[<| |\"|\'|\r|\n|\t]{1}", text) + for url in result: + if "\n" or "\t" or "\r" or "\"" or "<" or "'" in url: + url = url[:-1] + self.widget.box.append("%s " % url) + class NewPackageWindow(QWidget): def __init__(self, dock): QWidget.__init__(self) @@ -64,6 +75,7 @@ class NewPackageWindow(QWidget): self.passwordInput = passwordInput save = QPushButton(_("Create")) + parseUri = QPushButton(_("Filter URLs")) layout.addWidget(nameLabel, 0, 0) layout.addWidget(nameInput, 0, 1) @@ -71,6 +83,8 @@ class NewPackageWindow(QWidget): layout.addWidget(passwordInput, 1, 1) layout.addWidget(linksLabel, 2, 0, 1, 2) layout.addWidget(self.box, 3, 0, 1, 2) - layout.addWidget(save, 4, 0, 1, 2) - + layout.addWidget(parseUri, 4, 0, 1, 2) + layout.addWidget(save, 5, 0, 1, 2) + self.connect(save, SIGNAL("clicked()"), self.dock.slotDone) + self.connect(parseUri, SIGNAL("clicked()"), self.dock.parseUri) \ No newline at end of file diff --git a/module/network/Browser.py b/module/network/Browser.py index 6cc907491..adb2cb5d9 100644 --- a/module/network/Browser.py +++ b/module/network/Browser.py @@ -95,6 +95,14 @@ class Browser(object): """ retrieves page """ return self.http.load(url, get, post, ref, cookies, just_header) + + def putHeader(self, name, value): + """ add a header to the request """ + self.http.putHeader(name, value) + + def clearHeaders(self): + self.http.clearHeaders() + def close(self): """ cleanup """ if hasattr(self, "http"): diff --git a/module/network/HTTPChunk.py b/module/network/HTTPChunk.py index 9ca1be909..2fc48a588 100644 --- a/module/network/HTTPChunk.py +++ b/module/network/HTTPChunk.py @@ -36,6 +36,13 @@ class ChunkInfo(): self.resume = False self.chunks = [] + def __repr__(self): + ret = "ChunkInfo: %s, %s\n" % (self.name, self.size) + for i, c in enumerate(self.chunks): + ret += "%s# %s\n" % (i, c[1]) + + return ret + def setSize(self, size): self.size = int(size) diff --git a/module/network/HTTPDownload.py b/module/network/HTTPDownload.py index 4c9d0705d..50b33cd97 100644 --- a/module/network/HTTPDownload.py +++ b/module/network/HTTPDownload.py @@ -48,8 +48,6 @@ class HTTPDownload(): self.chunks = [] - self.infoSaved = False # needed for 1 chunk resume - try: self.info = ChunkInfo.load(filename) self.info.resume = True #resume is only possible with valid info file @@ -123,6 +121,7 @@ class HTTPDownload(): def _download(self, chunks, resume): if not resume: + self.info.clear() self.info.addChunk("%s.chunk0" % self.filename, (0, 0)) #create an initial entry init = HTTPChunk(0, self, None, resume) #initial chunk that will load complete file (if needed) @@ -134,15 +133,8 @@ class HTTPDownload(): chunksCreated = False while 1: - if (chunks == 1) and self.chunkSupport and self.size and not self.infoSaved: - # if chunk size is one, save info file here to achieve resume support - self.info.setSize(self.size) - self.info.createChunks(1) - self.info.save() - self.infoSaved = True - #need to create chunks - if not chunksCreated and self.chunkSupport and self.size: #will be set later by first chunk + if not chunksCreated and self.chunkSupport and self.size: #will be setted later by first chunk if not resume: self.info.setSize(self.size) diff --git a/module/network/HTTPRequest.py b/module/network/HTTPRequest.py index 42b7aaf51..cd3635bcf 100644 --- a/module/network/HTTPRequest.py +++ b/module/network/HTTPRequest.py @@ -47,6 +47,8 @@ class HTTPRequest(): self.header = "" + self.headers = [] #temporary request header + self.initHandle() self.setInterface(interface, proxies) @@ -150,7 +152,8 @@ class HTTPRequest(): self.header = "" - #@TODO raw_cookies and some things in old backend, which are apperently not needed + if self.headers: + self.c.setopt(pycurl.HTTPHEADER, self.headers) if just_header: self.c.setopt(pycurl.NOBODY, 1) @@ -165,6 +168,8 @@ class HTTPRequest(): self.lastEffectiveURL = self.c.getinfo(pycurl.EFFECTIVE_URL) self.addCookies() + self.headers = [] + return rep def verifyHeader(self): @@ -198,6 +203,12 @@ class HTTPRequest(): """ writes header """ self.header += buf + def putHeader(self, name, value): + self.headers.append("%s: %s" % (name, value)) + + def clearHeaders(self): + self.headers = [] + def close(self): """ cleanup, unusable after this """ self.rep.close() diff --git a/module/plugins/PluginManager.py b/module/plugins/PluginManager.py index a911cdd1e..0848d520e 100644 --- a/module/plugins/PluginManager.py +++ b/module/plugins/PluginManager.py @@ -20,16 +20,11 @@ import re import sys -from os import listdir -from os import makedirs - -from os.path import isfile -from os.path import join -from os.path import exists -from os.path import abspath - +from os import listdir, makedirs +from os.path import isfile, join, exists, abspath from sys import version_info from itertools import chain +from traceback import print_exc try: from ast import literal_eval @@ -292,13 +287,15 @@ class PluginManager(): try: module = __import__(value["path"], globals(), locals(), [value["name"]] , -1) + pluginClass = getattr(module, name) except Exception, e: self.log.error(_("Error importing %(name)s: %(msg)s") % {"name": name, "msg": str(e) }) self.log.error(_("You should fix dependicies or deactivate load on startup.")) + if self.core.debug: + print_exc() + continue - - pluginClass = getattr(module, name) - + value["class"] = pluginClass classes.append(pluginClass) diff --git a/module/plugins/hoster/DepositfilesCom.py b/module/plugins/hoster/DepositfilesCom.py index 3c1124709..b2cab30de 100644 --- a/module/plugins/hoster/DepositfilesCom.py +++ b/module/plugins/hoster/DepositfilesCom.py @@ -48,8 +48,9 @@ class DepositfilesCom(Hoster): wait_time = int(wait.group(1)) self.log.info( "%s: Traffic used up. Waiting %d seconds." % (self.__name__, wait_time) ) self.setWait(wait_time) - if wait_time > 300: - self.wantReconnect = True + self.wantReconnect = True + self.wait() + self.retry() wait = re.search(r'>Try in (\d+) minutes or use GOLD account', self.html) if wait: diff --git a/module/plugins/hoster/FileserveCom.py b/module/plugins/hoster/FileserveCom.py index 2e1bb1a59..9e14bfe7d 100644 --- a/module/plugins/hoster/FileserveCom.py +++ b/module/plugins/hoster/FileserveCom.py @@ -38,7 +38,7 @@ def getInfo(urls): class FileserveCom(Hoster): __name__ = "FileserveCom" __type__ = "hoster" - __pattern__ = r"http://(www\.)?fileserve\.com/file/.*?(/.*)?" + __pattern__ = r"http://(www\.)?fileserve\.com/file/[a-zA-Z0-9]+" __version__ = "0.3" __description__ = """Fileserve.Com File Download Hoster""" __author_name__ = ("jeix", "mkaay") @@ -54,6 +54,8 @@ class FileserveCom(Hoster): else: self.multiDL = False + self.file_id = re.search(r"fileserve\.com/file/([a-zA-Z0-9]+)(http:.*)?", self.pyfile.url).group(1) + def process(self, pyfile): self.html = self.load(self.pyfile.url, ref=False, cookies=False if self.account else True, utf8=True) @@ -76,7 +78,26 @@ class FileserveCom(Hoster): self.download(self.pyfile.url, post={"download":"premium"}, cookies=True) def handleFree(self): - + + self.html = self.load(self.pyfile.url) + jsPage = re.search(r"\"(/landing/.*?/download_captcha\.js)\"", self.html) + self.req.putHeader("X-Requested-With", "XMLHttpRequest") + + jsPage = self.load("http://fileserve.com" + jsPage.group(1)) + action = self.load(self.pyfile.url, post={"checkDownload" : "check"}) + + if "timeLimit" in action: + html = self.load(self.pyfile.url, post={"checkDownload" : "showError", "errorType" : "timeLimit"}) + wait = re.search(r"You need to wait (\d+) seconds to start another download", html) + if wait: + wait = int(wait.group(1)) + else: + wait = 720 + + self.setWait(wait, True) + self.wait() + self.retry() + if r'